From 438812cb6e2d1a34d87a8f2aacc224eb90a609f3 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Rapha=C3=ABl=20Vinot?= <raphael@vinot.info>
Date: Tue, 3 Dec 2024 11:14:41 +0100
Subject: [PATCH] Merge original typing from types-redis, adapt it for valkey.
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Signed-off-by: Raphaƫl Vinot <raphael@vinot.info>
---
 .github/workflows/mypy.yml              |    36 +
 .mypy.ini                               |    83 +-
 tests/test_asyncio/compat.py            |     2 +
 tests/test_asyncio/test_bloom.py        |    52 +-
 tests/test_asyncio/test_commands.py     |   673 +-
 tests/test_asyncio/test_pipeline.py     |     4 +-
 tests/test_asyncio/test_pubsub.py       |    88 +-
 tests/test_cache.py                     |     6 +-
 tests/test_commands.py                  |    61 +-
 tests/test_pipeline.py                  |     4 +-
 tests/test_pubsub.py                    |    22 +-
 types.patch                             | 10525 ++++++++++++++++++++++
 valkey/__init__.py                      |     4 +
 valkey/asyncio/__init__.pyi             |    64 +
 valkey/asyncio/client.pyi               |  1102 +++
 valkey/asyncio/cluster.pyi              |   229 +
 valkey/asyncio/connection.pyi           |   363 +
 valkey/asyncio/lock.pyi                 |    51 +
 valkey/asyncio/parser.pyi               |     9 +
 valkey/asyncio/retry.pyi                |    12 +
 valkey/asyncio/sentinel.pyi             |   162 +
 valkey/asyncio/utils.pyi                |    15 +
 valkey/backoff.pyi                      |    31 +
 valkey/client.pyi                       |   806 ++
 valkey/cluster.pyi                      |   265 +
 valkey/commands/__init__.pyi            |    17 +
 valkey/commands/bf/__init__.pyi         |    58 +
 valkey/commands/bf/commands.pyi         |   112 +
 valkey/commands/bf/info.pyi             |    43 +
 valkey/commands/cluster.pyi             |    60 +
 valkey/commands/core.pyi                |  1771 ++++
 valkey/commands/graph/__init__.pyi      |    45 +
 valkey/commands/graph/commands.pyi      |    25 +
 valkey/commands/graph/edge.pyi          |    14 +
 valkey/commands/graph/exceptions.pyi    |     5 +
 valkey/commands/graph/execution_plan.py |     6 +-
 valkey/commands/graph/node.pyi          |    18 +
 valkey/commands/graph/path.pyi          |    18 +
 valkey/commands/graph/query_result.pyi  |    74 +
 valkey/commands/helpers.pyi             |    10 +
 valkey/commands/json/__init__.pyi       |    15 +
 valkey/commands/json/commands.pyi       |    32 +
 valkey/commands/json/decoders.pyi       |     4 +
 valkey/commands/json/path.pyi           |     5 +
 valkey/commands/parser.pyi              |     8 +
 valkey/commands/redismodules.pyi        |    14 +
 valkey/commands/search/__init__.pyi     |    40 +
 valkey/commands/search/aggregation.py   |     2 +-
 valkey/commands/search/aggregation.pyi  |    53 +
 valkey/commands/search/commands.pyi     |   111 +
 valkey/commands/search/field.py         |    14 +-
 valkey/commands/search/query.pyi        |    52 +
 valkey/commands/search/querystring.py   |     2 +-
 valkey/commands/search/reducers.py      |     2 +-
 valkey/commands/search/result.pyi       |     7 +
 valkey/commands/sentinel.pyi            |    17 +
 valkey/commands/timeseries/__init__.pyi |    14 +
 valkey/commands/timeseries/commands.pyi |   160 +
 valkey/commands/timeseries/info.pyi     |    18 +
 valkey/commands/timeseries/utils.pyi    |     5 +
 valkey/connection.pyi                   |   289 +
 valkey/crc.pyi                          |     5 +
 valkey/credentials.pyi                  |    11 +
 valkey/exceptions.pyi                   |    43 +
 valkey/lock.pyi                         |    56 +
 valkey/ocsp.pyi                         |    21 +
 valkey/retry.py                         |     2 +-
 valkey/retry.pyi                        |    11 +
 valkey/sentinel.pyi                     |    62 +
 valkey/typing.pyi                       |    34 +
 valkey/utils.pyi                        |    22 +
 71 files changed, 17592 insertions(+), 484 deletions(-)
 create mode 100644 .github/workflows/mypy.yml
 create mode 100644 types.patch
 create mode 100644 valkey/asyncio/__init__.pyi
 create mode 100644 valkey/asyncio/client.pyi
 create mode 100644 valkey/asyncio/cluster.pyi
 create mode 100644 valkey/asyncio/connection.pyi
 create mode 100644 valkey/asyncio/lock.pyi
 create mode 100644 valkey/asyncio/parser.pyi
 create mode 100644 valkey/asyncio/retry.pyi
 create mode 100644 valkey/asyncio/sentinel.pyi
 create mode 100644 valkey/asyncio/utils.pyi
 create mode 100644 valkey/backoff.pyi
 create mode 100644 valkey/client.pyi
 create mode 100644 valkey/cluster.pyi
 create mode 100644 valkey/commands/__init__.pyi
 create mode 100644 valkey/commands/bf/__init__.pyi
 create mode 100644 valkey/commands/bf/commands.pyi
 create mode 100644 valkey/commands/bf/info.pyi
 create mode 100644 valkey/commands/cluster.pyi
 create mode 100644 valkey/commands/core.pyi
 create mode 100644 valkey/commands/graph/__init__.pyi
 create mode 100644 valkey/commands/graph/commands.pyi
 create mode 100644 valkey/commands/graph/edge.pyi
 create mode 100644 valkey/commands/graph/exceptions.pyi
 create mode 100644 valkey/commands/graph/node.pyi
 create mode 100644 valkey/commands/graph/path.pyi
 create mode 100644 valkey/commands/graph/query_result.pyi
 create mode 100644 valkey/commands/helpers.pyi
 create mode 100644 valkey/commands/json/__init__.pyi
 create mode 100644 valkey/commands/json/commands.pyi
 create mode 100644 valkey/commands/json/decoders.pyi
 create mode 100644 valkey/commands/json/path.pyi
 create mode 100644 valkey/commands/parser.pyi
 create mode 100644 valkey/commands/redismodules.pyi
 create mode 100644 valkey/commands/search/__init__.pyi
 create mode 100644 valkey/commands/search/aggregation.pyi
 create mode 100644 valkey/commands/search/commands.pyi
 create mode 100644 valkey/commands/search/query.pyi
 create mode 100644 valkey/commands/search/result.pyi
 create mode 100644 valkey/commands/sentinel.pyi
 create mode 100644 valkey/commands/timeseries/__init__.pyi
 create mode 100644 valkey/commands/timeseries/commands.pyi
 create mode 100644 valkey/commands/timeseries/info.pyi
 create mode 100644 valkey/commands/timeseries/utils.pyi
 create mode 100644 valkey/connection.pyi
 create mode 100644 valkey/crc.pyi
 create mode 100644 valkey/credentials.pyi
 create mode 100644 valkey/exceptions.pyi
 create mode 100644 valkey/lock.pyi
 create mode 100644 valkey/ocsp.pyi
 create mode 100644 valkey/retry.pyi
 create mode 100644 valkey/sentinel.pyi
 create mode 100644 valkey/typing.pyi
 create mode 100644 valkey/utils.pyi

diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
new file mode 100644
index 00000000..e04b8a1f
--- /dev/null
+++ b/.github/workflows/mypy.yml
@@ -0,0 +1,36 @@
+name: Python application
+
+on:
+  push:
+    branches: [ types ]
+  pull_request:
+    branches: [ types ]
+
+jobs:
+  build:
+
+    runs-on: ubuntu-latest
+    strategy:
+      fail-fast: false
+      matrix:
+        python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
+
+    steps:
+    - uses: actions/checkout@v4
+
+    - name: Set up Python ${{matrix.python-version}}
+      uses: actions/setup-python@v5
+      with:
+        python-version: ${{matrix.python-version}}
+
+    - name: Install package
+      run: |
+        pip install mypy cryptography pyopenssl requests
+        pip install types-setuptools
+        pip install types-cachetools
+        pip install -r dev_requirements.txt
+        pip install .[libvalkey]
+
+    - name: Run MyPy
+      run: |
+        mypy --exclude build .
diff --git a/.mypy.ini b/.mypy.ini
index 0d3b08d4..358ee392 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -1,24 +1,63 @@
 [mypy]
-#, docs/examples, tests
-files = valkey
-check_untyped_defs = True
-follow_imports_for_stubs asyncio.= True
-#disallow_any_decorated = True
-disallow_subclassing_any = True
-#disallow_untyped_calls = True
-disallow_untyped_decorators = True
-#disallow_untyped_defs = True
-implicit_reexport = False
-no_implicit_optional = True
-show_error_codes = True
-strict_equality = True
-warn_incomplete_stub = True
-warn_redundant_casts = True
-warn_unreachable = True
-warn_unused_ignores = True
-disallow_any_unimported = True
-#warn_return_any = True
-
-[mypy-valkey.asyncio.lock]
-# TODO: Remove once locks has been rewritten
+strict = True
+show_error_context = True
+pretty = True
+exclude = docs|build
+
+# These next few are various gradations of forcing use of type annotations
+disallow_untyped_calls = False
+disallow_incomplete_defs = False
+disallow_untyped_defs = False
+
+# This one can be tricky to get passing if you use a lot of untyped libraries
+warn_return_any = False
+
+[mypy-valkey._parsers.*]
+ignore_errors = True
+
+[mypy-valkey._cache]
+ignore_errors = True
+
+[mypy-tests.*]
+ignore_errors = True
+[mypy-tests.test_bloom]
+ignore_errors = False
+[mypy-tests.test_asyncio.test_bloom]
+ignore_errors = False
+[mypy-tests.test_cache]
+ignore_errors = False
+[mypy-tests.test_asyncio.test_cache]
+ignore_errors = False
+[mypy-tests.test_commands]
+ignore_errors = False
+[mypy-tests.test_asyncio.test_commands]
+ignore_errors = False
+#[mypy-tests.test_cluster]
+#ignore_errors = False
+#[mypy-tests.test_asyncio.test_cluster]
+#ignore_errors = False
+#[mypy-tests.test_connection_pool]
+#ignore_errors = False
+#[mypy-tests.test_asyncio.test_connection_pool]
+#ignore_errors = False
+#[mypy-tests.test_connection]
+#ignore_errors = False
+#[mypy-tests.test_asyncio.test_connection]
+#ignore_errors = False
+[mypy-tests.test_pipeline]
+ignore_errors = False
+[mypy-tests.test_asyncio.test_pipeline]
+ignore_errors = False
+[mypy-tests.test_pubsub]
+ignore_errors = False
+[mypy-tests.test_asyncio.test_pubsub]
+ignore_errors = False
+
+[mypy-benchmarks.*]
+ignore_errors = True
+
+[mypy-whitelist]
+ignore_errors = True
+
+[mypy-tasks]
 ignore_errors = True
diff --git a/tests/test_asyncio/compat.py b/tests/test_asyncio/compat.py
index aa1dc49a..05760029 100644
--- a/tests/test_asyncio/compat.py
+++ b/tests/test_asyncio/compat.py
@@ -1,6 +1,8 @@
 import asyncio
 from unittest import mock
 
+__all__ = ["mock", "aclosing", "create_task"]
+
 try:
     mock.AsyncMock
 except AttributeError:
diff --git a/tests/test_asyncio/test_bloom.py b/tests/test_asyncio/test_bloom.py
index 04528c1c..8dac9936 100644
--- a/tests/test_asyncio/test_bloom.py
+++ b/tests/test_asyncio/test_bloom.py
@@ -16,7 +16,7 @@ def intlist(obj):
     return [int(v) for v in obj]
 
 
-async def test_create(decoded_r: valkey.Valkey):
+async def test_create(decoded_r: valkey.Valkey[str]):
     """Test CREATE/RESERVE calls"""
     assert await decoded_r.bf().create("bloom", 0.01, 1000)
     assert await decoded_r.bf().create("bloom_e", 0.01, 1000, expansion=1)
@@ -31,11 +31,11 @@ async def test_create(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_create(decoded_r: valkey.Valkey):
+async def test_tdigest_create(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("tDigest", 100)
 
 
-async def test_bf_add(decoded_r: valkey.Valkey):
+async def test_bf_add(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.bf().create("bloom", 0.01, 1000)
     assert 1 == await decoded_r.bf().add("bloom", "foo")
     assert 0 == await decoded_r.bf().add("bloom", "foo")
@@ -47,7 +47,7 @@ async def test_bf_add(decoded_r: valkey.Valkey):
     assert [1, 0] == intlist(await decoded_r.bf().mexists("bloom", "foo", "noexist"))
 
 
-async def test_bf_insert(decoded_r: valkey.Valkey):
+async def test_bf_insert(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.bf().create("bloom", 0.01, 1000)
     assert [1] == intlist(await decoded_r.bf().insert("bloom", ["foo"]))
     assert [0, 1] == intlist(await decoded_r.bf().insert("bloom", ["foo", "bar"]))
@@ -77,7 +77,7 @@ async def test_bf_insert(decoded_r: valkey.Valkey):
     )
 
 
-async def test_bf_scandump_and_loadchunk(decoded_r: valkey.Valkey):
+async def test_bf_scandump_and_loadchunk(decoded_r: valkey.Valkey[str]):
     # Store a filter
     await decoded_r.bf().create("myBloom", "0.0001", "1000")
 
@@ -124,7 +124,7 @@ async def do_verify():
     await decoded_r.bf().create("myBloom", "0.0001", "10000000")
 
 
-async def test_bf_info(decoded_r: valkey.Valkey):
+async def test_bf_info(decoded_r: valkey.Valkey[str]):
     expansion = 4
     # Store a filter
     await decoded_r.bf().create("nonscaling", "0.0001", "1000", noScale=True)
@@ -155,7 +155,7 @@ async def test_bf_info(decoded_r: valkey.Valkey):
         assert True
 
 
-async def test_bf_card(decoded_r: valkey.Valkey):
+async def test_bf_card(decoded_r: valkey.Valkey[str]):
     # return 0 if the key does not exist
     assert await decoded_r.bf().card("not_exist") == 0
 
@@ -169,7 +169,7 @@ async def test_bf_card(decoded_r: valkey.Valkey):
         await decoded_r.bf().card("setKey")
 
 
-async def test_cf_add_and_insert(decoded_r: valkey.Valkey):
+async def test_cf_add_and_insert(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.cf().create("cuckoo", 1000)
     assert await decoded_r.cf().add("cuckoo", "filter")
     assert not await decoded_r.cf().addnx("cuckoo", "filter")
@@ -194,7 +194,7 @@ async def test_cf_add_and_insert(decoded_r: valkey.Valkey):
     )
 
 
-async def test_cf_exists_and_del(decoded_r: valkey.Valkey):
+async def test_cf_exists_and_del(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.cf().create("cuckoo", 1000)
     assert await decoded_r.cf().add("cuckoo", "filter")
     assert await decoded_r.cf().exists("cuckoo", "filter")
@@ -205,7 +205,7 @@ async def test_cf_exists_and_del(decoded_r: valkey.Valkey):
     assert 0 == await decoded_r.cf().count("cuckoo", "filter")
 
 
-async def test_cms(decoded_r: valkey.Valkey):
+async def test_cms(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.cms().initbydim("dim", 1000, 5)
     assert await decoded_r.cms().initbyprob("prob", 0.01, 0.01)
     assert await decoded_r.cms().incrby("dim", ["foo"], [5])
@@ -221,7 +221,7 @@ async def test_cms(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.onlynoncluster
-async def test_cms_merge(decoded_r: valkey.Valkey):
+async def test_cms_merge(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.cms().initbydim("A", 1000, 5)
     assert await decoded_r.cms().initbydim("B", 1000, 5)
     assert await decoded_r.cms().initbydim("C", 1000, 5)
@@ -237,7 +237,7 @@ async def test_cms_merge(decoded_r: valkey.Valkey):
     assert [16, 15, 21] == await decoded_r.cms().query("C", "foo", "bar", "baz")
 
 
-async def test_topk(decoded_r: valkey.Valkey):
+async def test_topk(decoded_r: valkey.Valkey[str]):
     # test list with empty buckets
     assert await decoded_r.topk().reserve("topk", 3, 50, 4, 0.9)
     assert [
@@ -317,7 +317,7 @@ async def test_topk(decoded_r: valkey.Valkey):
     assert 0.9 == round(float(info["decay"]), 1)
 
 
-async def test_topk_incrby(decoded_r: valkey.Valkey):
+async def test_topk_incrby(decoded_r: valkey.Valkey[str]):
     await decoded_r.flushdb()
     assert await decoded_r.topk().reserve("topk", 3, 10, 3, 1)
     assert [None, None, None] == await decoded_r.topk().incrby(
@@ -332,7 +332,7 @@ async def test_topk_incrby(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_reset(decoded_r: valkey.Valkey):
+async def test_tdigest_reset(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("tDigest", 10)
     # reset on empty histogram
     assert await decoded_r.tdigest().reset("tDigest")
@@ -348,7 +348,7 @@ async def test_tdigest_reset(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.onlynoncluster
-async def test_tdigest_merge(decoded_r: valkey.Valkey):
+async def test_tdigest_merge(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("to-tDigest", 10)
     assert await decoded_r.tdigest().create("from-tDigest", 10)
     # insert data-points into sketch
@@ -375,7 +375,7 @@ async def test_tdigest_merge(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_min_and_max(decoded_r: valkey.Valkey):
+async def test_tdigest_min_and_max(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("tDigest", 100)
     # insert data-points into sketch
     assert await decoded_r.tdigest().add("tDigest", [1, 2, 3])
@@ -385,8 +385,8 @@ async def test_tdigest_min_and_max(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-@skip_ifmodversion_lt("2.4.0", "bf")
-async def test_tdigest_quantile(decoded_r: valkey.Valkey):
+@skip_ifmodversion_lt("2.4.0", "bf")  # type: ignore[misc]
+async def test_tdigest_quantile(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("tDigest", 500)
     # insert data-points into sketch
     assert await decoded_r.tdigest().add(
@@ -413,7 +413,7 @@ async def test_tdigest_quantile(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_cdf(decoded_r: valkey.Valkey):
+async def test_tdigest_cdf(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("tDigest", 100)
     # insert data-points into sketch
     assert await decoded_r.tdigest().add("tDigest", list(range(1, 10)))
@@ -424,8 +424,8 @@ async def test_tdigest_cdf(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-@skip_ifmodversion_lt("2.4.0", "bf")
-async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey):
+@skip_ifmodversion_lt("2.4.0", "bf")  # type: ignore[misc]
+async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("tDigest", 100)
     # insert data-points into sketch
     assert await decoded_r.tdigest().add("tDigest", list(range(1, 10)))
@@ -434,7 +434,7 @@ async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_rank(decoded_r: valkey.Valkey):
+async def test_tdigest_rank(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("t-digest", 500)
     assert await decoded_r.tdigest().add("t-digest", list(range(0, 20)))
     assert -1 == (await decoded_r.tdigest().rank("t-digest", -1))[0]
@@ -444,7 +444,7 @@ async def test_tdigest_rank(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_revrank(decoded_r: valkey.Valkey):
+async def test_tdigest_revrank(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("t-digest", 500)
     assert await decoded_r.tdigest().add("t-digest", list(range(0, 20)))
     assert -1 == (await decoded_r.tdigest().revrank("t-digest", 20))[0]
@@ -453,7 +453,7 @@ async def test_tdigest_revrank(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_byrank(decoded_r: valkey.Valkey):
+async def test_tdigest_byrank(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("t-digest", 500)
     assert await decoded_r.tdigest().add("t-digest", list(range(1, 11)))
     assert 1 == (await decoded_r.tdigest().byrank("t-digest", 0))[0]
@@ -464,7 +464,7 @@ async def test_tdigest_byrank(decoded_r: valkey.Valkey):
 
 
 @pytest.mark.experimental
-async def test_tdigest_byrevrank(decoded_r: valkey.Valkey):
+async def test_tdigest_byrevrank(decoded_r: valkey.Valkey[str]):
     assert await decoded_r.tdigest().create("t-digest", 500)
     assert await decoded_r.tdigest().add("t-digest", list(range(1, 11)))
     assert 10 == (await decoded_r.tdigest().byrevrank("t-digest", 0))[0]
@@ -474,7 +474,7 @@ async def test_tdigest_byrevrank(decoded_r: valkey.Valkey):
         (await decoded_r.tdigest().byrevrank("t-digest", -1))[0]
 
 
-# # async def test_pipeline(decoded_r: valkey.Valkey):
+# # async def test_pipeline(decoded_r: valkey.Valkey[str]):
 #     pipeline = await decoded_r.bf().pipeline()
 #     assert not await decoded_r.bf().execute_command("get pipeline")
 #
diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py
index 27fc2ccf..28e8dbca 100644
--- a/tests/test_asyncio/test_commands.py
+++ b/tests/test_asyncio/test_commands.py
@@ -2,6 +2,8 @@
 Tests async overrides of commands from their mixins
 """
 
+from __future__ import annotations
+
 import asyncio
 import binascii
 import datetime
@@ -9,7 +11,7 @@
 import re
 import sys
 from string import ascii_letters
-from typing import Any, Dict, List
+from typing import Any
 
 import pytest
 import pytest_asyncio
@@ -35,15 +37,15 @@
 from valkey.client import EMPTY_RESPONSE, NEVER_DECODE
 
 if sys.version_info >= (3, 11, 3):
-    from asyncio import timeout as async_timeout
+    from asyncio import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found,attr-defined]
 else:
-    from async_timeout import timeout as async_timeout
+    from async_timeout import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found]
 
 VALKEY_6_VERSION = "5.9.0"
 
 
 @pytest_asyncio.fixture()
-async def r_teardown(r: valkey.Valkey):
+async def r_teardown(r: valkey.asyncio.Valkey[str]):
     """
     A special fixture which removes the provided names from the database after use
     """
@@ -59,7 +61,7 @@ def factory(username):
 
 
 @pytest_asyncio.fixture()
-async def slowlog(r: valkey.Valkey):
+async def slowlog(r: valkey.asyncio.Valkey[str]):
     current_config = await r.config_get()
     old_slower_than_value = current_config["slowlog-log-slower-than"]
     old_max_legnth_value = current_config["slowlog-max-len"]
@@ -73,13 +75,13 @@ async def slowlog(r: valkey.Valkey):
     await r.config_set("slowlog-max-len", old_max_legnth_value)
 
 
-async def valkey_server_time(client: valkey.Valkey):
+async def valkey_server_time(client: valkey.asyncio.Valkey[bytes]):
     seconds, milliseconds = await client.time()
     timestamp = float(f"{seconds}.{milliseconds}")
     return datetime.datetime.fromtimestamp(timestamp)
 
 
-async def get_stream_message(client: valkey.Valkey, stream: str, message_id: str):
+async def get_stream_message(client: valkey.asyncio.Valkey[str], stream: str, message_id: str):
     """Fetch a stream message and format it as a (message_id, fields) pair"""
     response = await client.xrange(stream, min=message_id, max=message_id)
     assert len(response) == 1
@@ -91,7 +93,7 @@ async def get_stream_message(client: valkey.Valkey, stream: str, message_id: str
 class TestResponseCallbacks:
     """Tests for the response callback system"""
 
-    async def test_response_callbacks(self, r: valkey.Valkey):
+    async def test_response_callbacks(self, r: valkey.asyncio.Valkey[str]):
         callbacks = _ValkeyCallbacks
         if is_resp2_connection(r):
             callbacks.update(_ValkeyCallbacksRESP2)
@@ -99,32 +101,32 @@ async def test_response_callbacks(self, r: valkey.Valkey):
             callbacks.update(_ValkeyCallbacksRESP3)
         assert r.response_callbacks == callbacks
         assert id(r.response_callbacks) != id(_ValkeyCallbacks)
-        r.set_response_callback("GET", lambda x: "static")
+        r.set_response_callback("GET", lambda x: "static")  # type: ignore[arg-type]
         await r.set("a", "foo")
         assert await r.get("a") == "static"
 
-    async def test_case_insensitive_command_names(self, r: valkey.Valkey):
+    async def test_case_insensitive_command_names(self, r: valkey.asyncio.Valkey[str]):
         assert r.response_callbacks["ping"] == r.response_callbacks["PING"]
 
 
 class TestValkeyCommands:
-    async def test_command_on_invalid_key_type(self, r: valkey.Valkey):
+    async def test_command_on_invalid_key_type(self, r: valkey.asyncio.Valkey[str]):
         await r.lpush("a", "1")
         with pytest.raises(valkey.ResponseError):
             await r.get("a")
 
     # SERVER INFORMATION
     @skip_if_server_version_lt(VALKEY_6_VERSION)
-    async def test_acl_cat_no_category(self, r: valkey.Valkey):
+    async def test_acl_cat_no_category(self, r: valkey.asyncio.Valkey[str]):
         categories = await r.acl_cat()
         assert isinstance(categories, list)
-        assert "read" in categories or b"read" in categories
+        assert "read" in categories or b"read" in categories  # type: ignore[comparison-overlap]
 
     @skip_if_server_version_lt(VALKEY_6_VERSION)
-    async def test_acl_cat_with_category(self, r: valkey.Valkey):
+    async def test_acl_cat_with_category(self, r: valkey.asyncio.Valkey[str]):
         commands = await r.acl_cat("read")
         assert isinstance(commands, list)
-        assert "get" in commands or b"get" in commands
+        assert "get" in commands or b"get" in commands  # type: ignore[comparison-overlap]
 
     @skip_if_server_version_lt(VALKEY_6_VERSION)
     async def test_acl_deluser(self, r_teardown):
@@ -136,7 +138,7 @@ async def test_acl_deluser(self, r_teardown):
         assert await r.acl_deluser(username) == 1
 
     @skip_if_server_version_lt(VALKEY_6_VERSION)
-    async def test_acl_genpass(self, r: valkey.Valkey):
+    async def test_acl_genpass(self, r: valkey.asyncio.Valkey[str]):
         password = await r.acl_genpass()
         assert isinstance(password, (str, bytes))
 
@@ -311,24 +313,24 @@ async def test_acl_setuser_add_passwords_and_nopass_fails(self, r_teardown):
             await r.acl_setuser(username, passwords="+mypass", nopass=True)
 
     @skip_if_server_version_lt(VALKEY_6_VERSION)
-    async def test_acl_users(self, r: valkey.Valkey):
+    async def test_acl_users(self, r: valkey.asyncio.Valkey[str]):
         users = await r.acl_users()
         assert isinstance(users, list)
         assert len(users) > 0
 
     @skip_if_server_version_lt(VALKEY_6_VERSION)
-    async def test_acl_whoami(self, r: valkey.Valkey):
+    async def test_acl_whoami(self, r: valkey.asyncio.Valkey[str]):
         username = await r.acl_whoami()
         assert isinstance(username, (str, bytes))
 
     @pytest.mark.onlynoncluster
-    async def test_client_list(self, r: valkey.Valkey):
+    async def test_client_list(self, r: valkey.asyncio.Valkey[str]):
         clients = await r.client_list()
         assert isinstance(clients[0], dict)
         assert "addr" in clients[0]
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_client_list_type(self, r: valkey.Valkey):
+    async def test_client_list_type(self, r: valkey.asyncio.Valkey[str]):
         with pytest.raises(exceptions.ValkeyError):
             await r.client_list(_type="not a client type")
         for client_type in ["normal", "master", "replica", "pubsub"]:
@@ -337,12 +339,12 @@ async def test_client_list_type(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("5.0.0")
     @pytest.mark.onlynoncluster
-    async def test_client_id(self, r: valkey.Valkey):
+    async def test_client_id(self, r: valkey.asyncio.Valkey[str]):
         assert await r.client_id() > 0
 
     @skip_if_server_version_lt("5.0.0")
     @pytest.mark.onlynoncluster
-    async def test_client_unblock(self, r: valkey.Valkey):
+    async def test_client_unblock(self, r: valkey.asyncio.Valkey[str]):
         myid = await r.client_id()
         assert not await r.client_unblock(myid)
         assert not await r.client_unblock(myid, error=True)
@@ -350,19 +352,19 @@ async def test_client_unblock(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("2.6.9")
     @pytest.mark.onlynoncluster
-    async def test_client_getname(self, r: valkey.Valkey):
+    async def test_client_getname(self, r: valkey.asyncio.Valkey[str]):
         assert await r.client_getname() is None
 
     @skip_if_server_version_lt("2.6.9")
     @pytest.mark.onlynoncluster
-    async def test_client_setname(self, r: valkey.Valkey):
+    async def test_client_setname(self, r: valkey.asyncio.Valkey[str]):
         assert await r.client_setname("valkey_py_test")
         assert_resp_response(
             r, await r.client_getname(), "valkey_py_test", b"valkey_py_test"
         )
 
     @skip_if_server_version_lt("7.2.0")
-    async def test_client_setinfo(self, r: valkey.Valkey):
+    async def test_client_setinfo(self, r: valkey.asyncio.Valkey[str]):
         await r.ping()
         info = await r.client_info()
         assert info["lib-name"] == "valkey-py"
@@ -385,7 +387,7 @@ async def test_client_setinfo(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("2.6.9")
     @pytest.mark.onlynoncluster
-    async def test_client_kill(self, r: valkey.Valkey, r2):
+    async def test_client_kill(self, r: valkey.asyncio.Valkey[str], r2):
         await r.client_setname("valkey-py-c1")
         await r2.client_setname("valkey-py-c2")
         clients = [
@@ -398,7 +400,7 @@ async def test_client_kill(self, r: valkey.Valkey, r2):
         clients_by_name = {client.get("name"): client for client in clients}
 
         client_addr = clients_by_name["valkey-py-c2"].get("addr")
-        assert await r.client_kill(client_addr) is True
+        assert await r.client_kill(client_addr) is True  # type: ignore[arg-type]
 
         clients = [
             client
@@ -409,22 +411,22 @@ async def test_client_kill(self, r: valkey.Valkey, r2):
         assert clients[0].get("name") == "valkey-py-c1"
 
     @skip_if_server_version_lt("2.8.12")
-    async def test_client_kill_filter_invalid_params(self, r: valkey.Valkey):
+    async def test_client_kill_filter_invalid_params(self, r: valkey.asyncio.Valkey[str]):
         # empty
         with pytest.raises(exceptions.DataError):
             await r.client_kill_filter()
 
         # invalid skipme
         with pytest.raises(exceptions.DataError):
-            await r.client_kill_filter(skipme="yeah")  # type: ignore
+            await r.client_kill_filter(skipme="yeah")
 
         # invalid type
         with pytest.raises(exceptions.DataError):
-            await r.client_kill_filter(_type="caster")  # type: ignore
+            await r.client_kill_filter(_type="caster")
 
     @skip_if_server_version_lt("2.8.12")
     @pytest.mark.onlynoncluster
-    async def test_client_kill_filter_by_id(self, r: valkey.Valkey, r2):
+    async def test_client_kill_filter_by_id(self, r: valkey.asyncio.Valkey[str], r2):
         await r.client_setname("valkey-py-c1")
         await r2.client_setname("valkey-py-c2")
         clients = [
@@ -450,7 +452,7 @@ async def test_client_kill_filter_by_id(self, r: valkey.Valkey, r2):
 
     @skip_if_server_version_lt("2.8.12")
     @pytest.mark.onlynoncluster
-    async def test_client_kill_filter_by_addr(self, r: valkey.Valkey, r2):
+    async def test_client_kill_filter_by_addr(self, r: valkey.asyncio.Valkey[str], r2):
         await r.client_setname("valkey-py-c1")
         await r2.client_setname("valkey-py-c2")
         clients = [
@@ -475,7 +477,7 @@ async def test_client_kill_filter_by_addr(self, r: valkey.Valkey, r2):
         assert clients[0].get("name") == "valkey-py-c1"
 
     @skip_if_server_version_lt("2.6.9")
-    async def test_client_list_after_client_setname(self, r: valkey.Valkey):
+    async def test_client_list_after_client_setname(self, r: valkey.asyncio.Valkey[str]):
         await r.client_setname("valkey_py_test")
         clients = await r.client_list()
         # we don't know which client ours will be
@@ -483,7 +485,7 @@ async def test_client_list_after_client_setname(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("2.9.50")
     @pytest.mark.onlynoncluster
-    async def test_client_pause(self, r: valkey.Valkey):
+    async def test_client_pause(self, r: valkey.asyncio.Valkey[str]):
         assert await r.client_pause(1)
         assert await r.client_pause(timeout=1)
         with pytest.raises(exceptions.ValkeyError):
@@ -491,19 +493,19 @@ async def test_client_pause(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("7.2.0")
     @pytest.mark.onlynoncluster
-    async def test_client_no_touch(self, r: valkey.Valkey):
+    async def test_client_no_touch(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.client_no_touch("ON") == b"OK"
         assert await r.client_no_touch("OFF") == b"OK"
         with pytest.raises(TypeError):
-            await r.client_no_touch()
+            await r.client_no_touch()  # type: ignore[call-arg]
 
-    async def test_config_get(self, r: valkey.Valkey):
+    async def test_config_get(self, r: valkey.asyncio.Valkey[str]):
         data = await r.config_get()
         assert "maxmemory" in data
         assert data["maxmemory"].isdigit()
 
     @pytest.mark.onlynoncluster
-    async def test_config_resetstat(self, r: valkey.Valkey):
+    async def test_config_resetstat(self, r: valkey.asyncio.Valkey[str]):
         await r.ping()
         prior_commands_processed = int((await r.info())["total_commands_processed"])
         assert prior_commands_processed >= 1
@@ -511,24 +513,24 @@ async def test_config_resetstat(self, r: valkey.Valkey):
         reset_commands_processed = int((await r.info())["total_commands_processed"])
         assert reset_commands_processed < prior_commands_processed
 
-    async def test_config_set(self, r: valkey.Valkey):
+    async def test_config_set(self, r: valkey.asyncio.Valkey[str]):
         await r.config_set("timeout", 70)
         assert (await r.config_get())["timeout"] == "70"
         assert await r.config_set("timeout", 0)
         assert (await r.config_get())["timeout"] == "0"
 
     @pytest.mark.onlynoncluster
-    async def test_dbsize(self, r: valkey.Valkey):
+    async def test_dbsize(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "foo")
         await r.set("b", "bar")
         assert await r.dbsize() == 2
 
     @pytest.mark.onlynoncluster
-    async def test_echo(self, r: valkey.Valkey):
+    async def test_echo(self, r: valkey.asyncio.Valkey[str]):
         assert await r.echo("foo bar") == b"foo bar"
 
     @pytest.mark.onlynoncluster
-    async def test_info(self, r: valkey.Valkey):
+    async def test_info(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "foo")
         await r.set("b", "bar")
         info = await r.info()
@@ -537,21 +539,21 @@ async def test_info(self, r: valkey.Valkey):
         assert "valkey_version" in info.keys()
 
     @pytest.mark.onlynoncluster
-    async def test_lastsave(self, r: valkey.Valkey):
+    async def test_lastsave(self, r: valkey.asyncio.Valkey[str]):
         assert isinstance(await r.lastsave(), datetime.datetime)
 
-    async def test_object(self, r: valkey.Valkey):
+    async def test_object(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "foo")
         assert isinstance(await r.object("refcount", "a"), int)
         assert isinstance(await r.object("idletime", "a"), int)
         assert await r.object("encoding", "a") in (b"raw", b"embstr")
         assert await r.object("idletime", "invalid-key") is None
 
-    async def test_ping(self, r: valkey.Valkey):
+    async def test_ping(self, r: valkey.asyncio.Valkey[str]):
         assert await r.ping()
 
     @pytest.mark.onlynoncluster
-    async def test_slowlog_get(self, r: valkey.Valkey, slowlog):
+    async def test_slowlog_get(self, r: valkey.asyncio.Valkey[str], slowlog):
         assert await r.slowlog_reset()
         unicode_string = chr(3456) + "abcd" + chr(3421)
         await r.get(unicode_string)
@@ -573,7 +575,7 @@ async def test_slowlog_get(self, r: valkey.Valkey, slowlog):
         assert isinstance(slowlog[0]["duration"], int)
 
     @pytest.mark.onlynoncluster
-    async def test_slowlog_get_limit(self, r: valkey.Valkey, slowlog):
+    async def test_slowlog_get_limit(self, r: valkey.asyncio.Valkey[str], slowlog):
         assert await r.slowlog_reset()
         await r.get("foo")
         slowlog = await r.slowlog_get(1)
@@ -582,36 +584,36 @@ async def test_slowlog_get_limit(self, r: valkey.Valkey, slowlog):
         assert len(slowlog) == 1
 
     @pytest.mark.onlynoncluster
-    async def test_slowlog_length(self, r: valkey.Valkey, slowlog):
+    async def test_slowlog_length(self, r: valkey.asyncio.Valkey[str], slowlog):
         await r.get("foo")
         assert isinstance(await r.slowlog_len(), int)
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_time(self, r: valkey.Valkey):
+    async def test_time(self, r: valkey.asyncio.Valkey[str]):
         t = await r.time()
         assert len(t) == 2
         assert isinstance(t[0], int)
         assert isinstance(t[1], int)
 
-    async def test_never_decode_option(self, r: valkey.Valkey):
-        opts = {NEVER_DECODE: []}
+    async def test_never_decode_option(self, r: valkey.asyncio.Valkey[str]):
+        opts: dict[str, list[str]] = {NEVER_DECODE: []}
         await r.delete("a")
         assert await r.execute_command("EXISTS", "a", **opts) == 0
 
-    async def test_empty_response_option(self, r: valkey.Valkey):
-        opts = {EMPTY_RESPONSE: []}
+    async def test_empty_response_option(self, r: valkey.asyncio.Valkey[str]):
+        opts: dict[str, list[str]] = {EMPTY_RESPONSE: []}
         await r.delete("a")
         assert await r.execute_command("EXISTS", "a", **opts) == 0
 
     # BASIC KEY COMMANDS
-    async def test_append(self, r: valkey.Valkey):
+    async def test_append(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.append("a", "a1") == 2
         assert await r.get("a") == b"a1"
         assert await r.append("a", "a2") == 4
         assert await r.get("a") == b"a1a2"
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_bitcount(self, r: valkey.Valkey):
+    async def test_bitcount(self, r: valkey.asyncio.Valkey[str]):
         await r.setbit("a", 5, True)
         assert await r.bitcount("a") == 1
         await r.setbit("a", 6, True)
@@ -631,32 +633,32 @@ async def test_bitcount(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("2.6.0")
     @pytest.mark.onlynoncluster
-    async def test_bitop_not_empty_string(self, r: valkey.Valkey):
+    async def test_bitop_not_empty_string(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "")
         await r.bitop("not", "r", "a")
         assert await r.get("r") is None
 
     @skip_if_server_version_lt("2.6.0")
     @pytest.mark.onlynoncluster
-    async def test_bitop_not(self, r: valkey.Valkey):
+    async def test_bitop_not(self, r: valkey.asyncio.Valkey[str]):
         test_str = b"\xAA\x00\xFF\x55"
         correct = ~0xAA00FF55 & 0xFFFFFFFF
         await r.set("a", test_str)
         await r.bitop("not", "r", "a")
-        assert int(binascii.hexlify(await r.get("r")), 16) == correct
+        assert int(binascii.hexlify(await r.get("r")), 16) == correct  # type: ignore[arg-type]
 
     @skip_if_server_version_lt("2.6.0")
     @pytest.mark.onlynoncluster
-    async def test_bitop_not_in_place(self, r: valkey.Valkey):
+    async def test_bitop_not_in_place(self, r: valkey.asyncio.Valkey[str]):
         test_str = b"\xAA\x00\xFF\x55"
         correct = ~0xAA00FF55 & 0xFFFFFFFF
         await r.set("a", test_str)
         await r.bitop("not", "a", "a")
-        assert int(binascii.hexlify(await r.get("a")), 16) == correct
+        assert int(binascii.hexlify(await r.get("a")), 16) == correct  # type: ignore[arg-type]
 
     @skip_if_server_version_lt("2.6.0")
     @pytest.mark.onlynoncluster
-    async def test_bitop_single_string(self, r: valkey.Valkey):
+    async def test_bitop_single_string(self, r: valkey.asyncio.Valkey[bytes]):
         test_str = b"\x01\x02\xFF"
         await r.set("a", test_str)
         await r.bitop("and", "res1", "a")
@@ -668,19 +670,19 @@ async def test_bitop_single_string(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("2.6.0")
     @pytest.mark.onlynoncluster
-    async def test_bitop_string_operands(self, r: valkey.Valkey):
+    async def test_bitop_string_operands(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", b"\x01\x02\xFF\xFF")
         await r.set("b", b"\x01\x02\xFF")
         await r.bitop("and", "res1", "a", "b")
         await r.bitop("or", "res2", "a", "b")
         await r.bitop("xor", "res3", "a", "b")
-        assert int(binascii.hexlify(await r.get("res1")), 16) == 0x0102FF00
-        assert int(binascii.hexlify(await r.get("res2")), 16) == 0x0102FFFF
-        assert int(binascii.hexlify(await r.get("res3")), 16) == 0x000000FF
+        assert int(binascii.hexlify(await r.get("res1")), 16) == 0x0102FF00  # type: ignore[arg-type]
+        assert int(binascii.hexlify(await r.get("res2")), 16) == 0x0102FFFF  # type: ignore[arg-type]
+        assert int(binascii.hexlify(await r.get("res3")), 16) == 0x000000FF  # type: ignore[arg-type]
 
     @pytest.mark.onlynoncluster
     @skip_if_server_version_lt("2.8.7")
-    async def test_bitpos(self, r: valkey.Valkey):
+    async def test_bitpos(self, r: valkey.asyncio.Valkey[str]):
         key = "key:bitpos"
         await r.set(key, b"\xff\xf0\x00")
         assert await r.bitpos(key, 0) == 12
@@ -693,7 +695,7 @@ async def test_bitpos(self, r: valkey.Valkey):
         assert await r.bitpos(key, 1) == -1
 
     @skip_if_server_version_lt("2.8.7")
-    async def test_bitpos_wrong_arguments(self, r: valkey.Valkey):
+    async def test_bitpos_wrong_arguments(self, r: valkey.asyncio.Valkey[str]):
         key = "key:bitpos:wrong:args"
         await r.set(key, b"\xff\xf0\x00")
         with pytest.raises(exceptions.ValkeyError):
@@ -701,7 +703,7 @@ async def test_bitpos_wrong_arguments(self, r: valkey.Valkey):
         with pytest.raises(exceptions.ValkeyError):
             await r.bitpos(key, 7) == 12
 
-    async def test_decr(self, r: valkey.Valkey):
+    async def test_decr(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.decr("a") == -1
         assert await r.get("a") == b"-1"
         assert await r.decr("a") == -2
@@ -709,37 +711,37 @@ async def test_decr(self, r: valkey.Valkey):
         assert await r.decr("a", amount=5) == -7
         assert await r.get("a") == b"-7"
 
-    async def test_decrby(self, r: valkey.Valkey):
+    async def test_decrby(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.decrby("a", amount=2) == -2
         assert await r.decrby("a", amount=3) == -5
         assert await r.get("a") == b"-5"
 
-    async def test_delete(self, r: valkey.Valkey):
+    async def test_delete(self, r: valkey.asyncio.Valkey[str]):
         assert await r.delete("a") == 0
         await r.set("a", "foo")
         assert await r.delete("a") == 1
 
-    async def test_delete_with_multiple_keys(self, r: valkey.Valkey):
+    async def test_delete_with_multiple_keys(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "foo")
         await r.set("b", "bar")
         assert await r.delete("a", "b") == 2
         assert await r.get("a") is None
         assert await r.get("b") is None
 
-    async def test_delitem(self, r: valkey.Valkey):
+    async def test_delitem(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "foo")
         await r.delete("a")
         assert await r.get("a") is None
 
     @skip_if_server_version_lt("4.0.0")
-    async def test_unlink(self, r: valkey.Valkey):
+    async def test_unlink(self, r: valkey.asyncio.Valkey[str]):
         assert await r.unlink("a") == 0
         await r.set("a", "foo")
         assert await r.unlink("a") == 1
         assert await r.get("a") is None
 
     @skip_if_server_version_lt("4.0.0")
-    async def test_unlink_with_multiple_keys(self, r: valkey.Valkey):
+    async def test_unlink_with_multiple_keys(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "foo")
         await r.set("b", "bar")
         assert await r.unlink("a", "b") == 2
@@ -747,7 +749,7 @@ async def test_unlink_with_multiple_keys(self, r: valkey.Valkey):
         assert await r.get("b") is None
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_dump_and_restore(self, r: valkey.Valkey):
+    async def test_dump_and_restore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "foo")
         dumped = await r.dump("a")
         await r.delete("a")
@@ -755,7 +757,7 @@ async def test_dump_and_restore(self, r: valkey.Valkey):
         assert await r.get("a") == b"foo"
 
     @skip_if_server_version_lt("3.0.0")
-    async def test_dump_and_restore_and_replace(self, r: valkey.Valkey):
+    async def test_dump_and_restore_and_replace(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "bar")
         dumped = await r.dump("a")
         with pytest.raises(valkey.ResponseError):
@@ -765,7 +767,7 @@ async def test_dump_and_restore_and_replace(self, r: valkey.Valkey):
         assert await r.get("a") == b"bar"
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_dump_and_restore_absttl(self, r: valkey.Valkey):
+    async def test_dump_and_restore_absttl(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "foo")
         dumped = await r.dump("a")
         await r.delete("a")
@@ -777,19 +779,19 @@ async def test_dump_and_restore_absttl(self, r: valkey.Valkey):
         assert await r.get("a") == b"foo"
         assert 0 < await r.ttl("a") <= 61
 
-    async def test_exists(self, r: valkey.Valkey):
+    async def test_exists(self, r: valkey.asyncio.Valkey[str]):
         assert await r.exists("a") == 0
         await r.set("a", "foo")
         await r.set("b", "bar")
         assert await r.exists("a") == 1
         assert await r.exists("a", "b") == 2
 
-    async def test_exists_contains(self, r: valkey.Valkey):
+    async def test_exists_contains(self, r: valkey.asyncio.Valkey[str]):
         assert not await r.exists("a")
         await r.set("a", "foo")
         assert await r.exists("a")
 
-    async def test_expire(self, r: valkey.Valkey):
+    async def test_expire(self, r: valkey.asyncio.Valkey[str]):
         assert not await r.expire("a", 10)
         await r.set("a", "foo")
         assert await r.expire("a", 10)
@@ -797,24 +799,24 @@ async def test_expire(self, r: valkey.Valkey):
         assert await r.persist("a")
         assert await r.ttl("a") == -1
 
-    async def test_expireat_datetime(self, r: valkey.Valkey):
+    async def test_expireat_datetime(self, r: valkey.asyncio.Valkey[bytes]):
         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
         await r.set("a", "foo")
         assert await r.expireat("a", expire_at)
         assert 0 < await r.ttl("a") <= 61
 
-    async def test_expireat_no_key(self, r: valkey.Valkey):
+    async def test_expireat_no_key(self, r: valkey.asyncio.Valkey[bytes]):
         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
         assert not await r.expireat("a", expire_at)
 
-    async def test_expireat_unixtime(self, r: valkey.Valkey):
+    async def test_expireat_unixtime(self, r: valkey.asyncio.Valkey[bytes]):
         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
         await r.set("a", "foo")
         expire_at_seconds = int(expire_at.timestamp())
         assert await r.expireat("a", expire_at_seconds)
         assert 0 < await r.ttl("a") <= 61
 
-    async def test_get_and_set(self, r: valkey.Valkey):
+    async def test_get_and_set(self, r: valkey.asyncio.Valkey[bytes]):
         # get and set can't be tested independently of each other
         assert await r.get("a") is None
         byte_string = b"value"
@@ -825,9 +827,9 @@ async def test_get_and_set(self, r: valkey.Valkey):
         assert await r.set("unicode_string", unicode_string)
         assert await r.get("byte_string") == byte_string
         assert await r.get("integer") == str(integer).encode()
-        assert (await r.get("unicode_string")).decode("utf-8") == unicode_string
+        assert (await r.get("unicode_string")).decode("utf-8") == unicode_string  # type: ignore[union-attr]
 
-    async def test_get_set_bit(self, r: valkey.Valkey):
+    async def test_get_set_bit(self, r: valkey.asyncio.Valkey[str]):
         # no value
         assert not await r.getbit("a", 5)
         # set bit 5
@@ -843,18 +845,18 @@ async def test_get_set_bit(self, r: valkey.Valkey):
         assert await r.setbit("a", 5, True)
         assert await r.getbit("a", 5)
 
-    async def test_getrange(self, r: valkey.Valkey):
+    async def test_getrange(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "foo")
         assert await r.getrange("a", 0, 0) == b"f"
         assert await r.getrange("a", 0, 2) == b"foo"
         assert await r.getrange("a", 3, 4) == b""
 
-    async def test_getset(self, r: valkey.Valkey):
+    async def test_getset(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.getset("a", "foo") is None
         assert await r.getset("a", "bar") == b"foo"
         assert await r.get("a") == b"bar"
 
-    async def test_incr(self, r: valkey.Valkey):
+    async def test_incr(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.incr("a") == 1
         assert await r.get("a") == b"1"
         assert await r.incr("a") == 2
@@ -862,20 +864,20 @@ async def test_incr(self, r: valkey.Valkey):
         assert await r.incr("a", amount=5) == 7
         assert await r.get("a") == b"7"
 
-    async def test_incrby(self, r: valkey.Valkey):
+    async def test_incrby(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.incrby("a") == 1
         assert await r.incrby("a", 4) == 5
         assert await r.get("a") == b"5"
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_incrbyfloat(self, r: valkey.Valkey):
+    async def test_incrbyfloat(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.incrbyfloat("a") == 1.0
         assert await r.get("a") == b"1"
         assert await r.incrbyfloat("a", 1.1) == 2.1
-        assert float(await r.get("a")) == float(2.1)
+        assert float(await r.get("a")) == float(2.1)  # type: ignore[arg-type]
 
     @pytest.mark.onlynoncluster
-    async def test_keys(self, r: valkey.Valkey):
+    async def test_keys(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.keys() == []
         keys_with_underscores = {b"test_a", b"test_b"}
         keys = keys_with_underscores.union({b"testc"})
@@ -885,7 +887,7 @@ async def test_keys(self, r: valkey.Valkey):
         assert set(await r.keys(pattern="test*")) == keys
 
     @pytest.mark.onlynoncluster
-    async def test_mget(self, r: valkey.Valkey):
+    async def test_mget(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.mget([]) == []
         assert await r.mget(["a", "b"]) == [None, None]
         await r.set("a", "1")
@@ -894,24 +896,24 @@ async def test_mget(self, r: valkey.Valkey):
         assert await r.mget("a", "other", "b", "c") == [b"1", None, b"2", b"3"]
 
     @pytest.mark.onlynoncluster
-    async def test_mset(self, r: valkey.Valkey):
+    async def test_mset(self, r: valkey.asyncio.Valkey[bytes]):
         d = {"a": b"1", "b": b"2", "c": b"3"}
-        assert await r.mset(d)
+        assert await r.mset(d)  # type: ignore[arg-type]
         for k, v in d.items():
             assert await r.get(k) == v
 
     @pytest.mark.onlynoncluster
-    async def test_msetnx(self, r: valkey.Valkey):
+    async def test_msetnx(self, r: valkey.asyncio.Valkey[bytes]):
         d = {"a": b"1", "b": b"2", "c": b"3"}
-        assert await r.msetnx(d)
+        assert await r.msetnx(d)  # type: ignore[arg-type]
         d2 = {"a": b"x", "d": b"4"}
-        assert not await r.msetnx(d2)
+        assert not await r.msetnx(d2)  # type: ignore[arg-type]
         for k, v in d.items():
             assert await r.get(k) == v
         assert await r.get("d") is None
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_pexpire(self, r: valkey.Valkey):
+    async def test_pexpire(self, r: valkey.asyncio.Valkey[str]):
         assert not await r.pexpire("a", 60000)
         await r.set("a", "foo")
         assert await r.pexpire("a", 60000)
@@ -920,19 +922,19 @@ async def test_pexpire(self, r: valkey.Valkey):
         assert await r.pttl("a") == -1
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_pexpireat_datetime(self, r: valkey.Valkey):
+    async def test_pexpireat_datetime(self, r: valkey.asyncio.Valkey[bytes]):
         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
         await r.set("a", "foo")
         assert await r.pexpireat("a", expire_at)
         assert 0 < await r.pttl("a") <= 61000
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_pexpireat_no_key(self, r: valkey.Valkey):
+    async def test_pexpireat_no_key(self, r: valkey.asyncio.Valkey[bytes]):
         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
         assert not await r.pexpireat("a", expire_at)
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_pexpireat_unixtime(self, r: valkey.Valkey):
+    async def test_pexpireat_unixtime(self, r: valkey.asyncio.Valkey[bytes]):
         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
         await r.set("a", "foo")
         expire_at_milliseconds = int(expire_at.timestamp() * 1000)
@@ -940,20 +942,20 @@ async def test_pexpireat_unixtime(self, r: valkey.Valkey):
         assert 0 < await r.pttl("a") <= 61000
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_psetex(self, r: valkey.Valkey):
+    async def test_psetex(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.psetex("a", 1000, "value")
         assert await r.get("a") == b"value"
         assert 0 < await r.pttl("a") <= 1000
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_psetex_timedelta(self, r: valkey.Valkey):
+    async def test_psetex_timedelta(self, r: valkey.asyncio.Valkey[bytes]):
         expire_at = datetime.timedelta(milliseconds=1000)
         assert await r.psetex("a", expire_at, "value")
         assert await r.get("a") == b"value"
         assert 0 < await r.pttl("a") <= 1000
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_pttl(self, r: valkey.Valkey):
+    async def test_pttl(self, r: valkey.asyncio.Valkey[str]):
         assert not await r.pexpire("a", 10000)
         await r.set("a", "1")
         assert await r.pexpire("a", 10000)
@@ -962,7 +964,7 @@ async def test_pttl(self, r: valkey.Valkey):
         assert await r.pttl("a") == -1
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_pttl_no_key(self, r: valkey.Valkey):
+    async def test_pttl_no_key(self, r: valkey.asyncio.Valkey[str]):
         """PTTL on servers 2.8 and after return -2 when the key doesn't exist"""
         assert await r.pttl("a") == -2
 
@@ -980,21 +982,21 @@ async def test_hrandfield(self, r):
         assert len(await r.hrandfield("key", -10)) == 10
 
     @pytest.mark.onlynoncluster
-    async def test_randomkey(self, r: valkey.Valkey):
+    async def test_randomkey(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.randomkey() is None
         for key in ("a", "b", "c"):
             await r.set(key, 1)
         assert await r.randomkey() in (b"a", b"b", b"c")
 
     @pytest.mark.onlynoncluster
-    async def test_rename(self, r: valkey.Valkey):
+    async def test_rename(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "1")
         assert await r.rename("a", "b")
         assert await r.get("a") is None
         assert await r.get("b") == b"1"
 
     @pytest.mark.onlynoncluster
-    async def test_renamenx(self, r: valkey.Valkey):
+    async def test_renamenx(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "1")
         await r.set("b", "2")
         assert not await r.renamenx("a", "b")
@@ -1002,13 +1004,13 @@ async def test_renamenx(self, r: valkey.Valkey):
         assert await r.get("b") == b"2"
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_set_nx(self, r: valkey.Valkey):
+    async def test_set_nx(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.set("a", "1", nx=True)
         assert not await r.set("a", "2", nx=True)
         assert await r.get("a") == b"1"
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_set_xx(self, r: valkey.Valkey):
+    async def test_set_xx(self, r: valkey.asyncio.Valkey[bytes]):
         assert not await r.set("a", "1", xx=True)
         assert await r.get("a") is None
         await r.set("a", "bar")
@@ -1016,38 +1018,38 @@ async def test_set_xx(self, r: valkey.Valkey):
         assert await r.get("a") == b"2"
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_set_px(self, r: valkey.Valkey):
+    async def test_set_px(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.set("a", "1", px=10000)
         assert await r.get("a") == b"1"
         assert 0 < await r.pttl("a") <= 10000
         assert 0 < await r.ttl("a") <= 10
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_set_px_timedelta(self, r: valkey.Valkey):
+    async def test_set_px_timedelta(self, r: valkey.asyncio.Valkey[str]):
         expire_at = datetime.timedelta(milliseconds=1000)
         assert await r.set("a", "1", px=expire_at)
         assert 0 < await r.pttl("a") <= 1000
         assert 0 < await r.ttl("a") <= 1
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_set_ex(self, r: valkey.Valkey):
+    async def test_set_ex(self, r: valkey.asyncio.Valkey[str]):
         assert await r.set("a", "1", ex=10)
         assert 0 < await r.ttl("a") <= 10
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_set_ex_timedelta(self, r: valkey.Valkey):
+    async def test_set_ex_timedelta(self, r: valkey.asyncio.Valkey[str]):
         expire_at = datetime.timedelta(seconds=60)
         assert await r.set("a", "1", ex=expire_at)
         assert 0 < await r.ttl("a") <= 60
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_set_multipleoptions(self, r: valkey.Valkey):
+    async def test_set_multipleoptions(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "val")
         assert await r.set("a", "1", xx=True, px=10000)
         assert 0 < await r.ttl("a") <= 10
 
     @skip_if_server_version_lt(VALKEY_6_VERSION)
-    async def test_set_keepttl(self, r: valkey.Valkey):
+    async def test_set_keepttl(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "val")
         assert await r.set("a", "1", xx=True, px=10000)
         assert 0 < await r.ttl("a") <= 10
@@ -1055,36 +1057,36 @@ async def test_set_keepttl(self, r: valkey.Valkey):
         assert await r.get("a") == b"2"
         assert 0 < await r.ttl("a") <= 10
 
-    async def test_setex(self, r: valkey.Valkey):
+    async def test_setex(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.setex("a", 60, "1")
         assert await r.get("a") == b"1"
         assert 0 < await r.ttl("a") <= 60
 
-    async def test_setnx(self, r: valkey.Valkey):
+    async def test_setnx(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.setnx("a", "1")
         assert await r.get("a") == b"1"
         assert not await r.setnx("a", "2")
         assert await r.get("a") == b"1"
 
-    async def test_setrange(self, r: valkey.Valkey):
+    async def test_setrange(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.setrange("a", 5, "foo") == 8
         assert await r.get("a") == b"\0\0\0\0\0foo"
         await r.set("a", "abcdefghijh")
         assert await r.setrange("a", 6, "12345") == 11
         assert await r.get("a") == b"abcdef12345"
 
-    async def test_strlen(self, r: valkey.Valkey):
+    async def test_strlen(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "foo")
         assert await r.strlen("a") == 3
 
-    async def test_substr(self, r: valkey.Valkey):
+    async def test_substr(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", "0123456789")
         assert await r.substr("a", 0) == b"0123456789"
         assert await r.substr("a", 2) == b"23456789"
         assert await r.substr("a", 3, 5) == b"345"
         assert await r.substr("a", 3, -2) == b"345678"
 
-    async def test_ttl(self, r: valkey.Valkey):
+    async def test_ttl(self, r: valkey.asyncio.Valkey[str]):
         await r.set("a", "1")
         assert await r.expire("a", 10)
         assert 0 < await r.ttl("a") <= 10
@@ -1092,11 +1094,11 @@ async def test_ttl(self, r: valkey.Valkey):
         assert await r.ttl("a") == -1
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_ttl_nokey(self, r: valkey.Valkey):
+    async def test_ttl_nokey(self, r: valkey.asyncio.Valkey[str]):
         """TTL on servers 2.8 and after return -2 when the key doesn't exist"""
         assert await r.ttl("a") == -2
 
-    async def test_type(self, r: valkey.Valkey):
+    async def test_type(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.type("a") == b"none"
         await r.set("a", "1")
         assert await r.type("a") == b"string"
@@ -1112,7 +1114,7 @@ async def test_type(self, r: valkey.Valkey):
 
     # LIST COMMANDS
     @pytest.mark.onlynoncluster
-    async def test_blpop(self, r: valkey.Valkey):
+    async def test_blpop(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2")
         await r.rpush("b", "3", "4")
         assert_resp_response(
@@ -1134,7 +1136,7 @@ async def test_blpop(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_brpop(self, r: valkey.Valkey):
+    async def test_brpop(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2")
         await r.rpush("b", "3", "4")
         assert_resp_response(
@@ -1156,7 +1158,7 @@ async def test_brpop(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_brpoplpush(self, r: valkey.Valkey):
+    async def test_brpoplpush(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2")
         await r.rpush("b", "3", "4")
         assert await r.brpoplpush("a", "b") == b"2"
@@ -1166,54 +1168,54 @@ async def test_brpoplpush(self, r: valkey.Valkey):
         assert await r.lrange("b", 0, -1) == [b"1", b"2", b"3", b"4"]
 
     @pytest.mark.onlynoncluster
-    async def test_brpoplpush_empty_string(self, r: valkey.Valkey):
+    async def test_brpoplpush_empty_string(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "")
         assert await r.brpoplpush("a", "b") == b""
 
-    async def test_lindex(self, r: valkey.Valkey):
+    async def test_lindex(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2", "3")
         assert await r.lindex("a", "0") == b"1"
         assert await r.lindex("a", "1") == b"2"
         assert await r.lindex("a", "2") == b"3"
 
-    async def test_linsert(self, r: valkey.Valkey):
+    async def test_linsert(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2", "3")
         assert await r.linsert("a", "after", "2", "2.5") == 4
         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"2.5", b"3"]
         assert await r.linsert("a", "before", "2", "1.5") == 5
         assert await r.lrange("a", 0, -1) == [b"1", b"1.5", b"2", b"2.5", b"3"]
 
-    async def test_llen(self, r: valkey.Valkey):
+    async def test_llen(self, r: valkey.asyncio.Valkey[str]):
         await r.rpush("a", "1", "2", "3")
         assert await r.llen("a") == 3
 
-    async def test_lpop(self, r: valkey.Valkey):
+    async def test_lpop(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2", "3")
         assert await r.lpop("a") == b"1"
         assert await r.lpop("a") == b"2"
         assert await r.lpop("a") == b"3"
         assert await r.lpop("a") is None
 
-    async def test_lpush(self, r: valkey.Valkey):
+    async def test_lpush(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.lpush("a", "1") == 1
         assert await r.lpush("a", "2") == 2
         assert await r.lpush("a", "3", "4") == 4
         assert await r.lrange("a", 0, -1) == [b"4", b"3", b"2", b"1"]
 
-    async def test_lpushx(self, r: valkey.Valkey):
+    async def test_lpushx(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.lpushx("a", "1") == 0
         assert await r.lrange("a", 0, -1) == []
         await r.rpush("a", "1", "2", "3")
         assert await r.lpushx("a", "4") == 4
         assert await r.lrange("a", 0, -1) == [b"4", b"1", b"2", b"3"]
 
-    async def test_lrange(self, r: valkey.Valkey):
+    async def test_lrange(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2", "3", "4", "5")
         assert await r.lrange("a", 0, 2) == [b"1", b"2", b"3"]
         assert await r.lrange("a", 2, 10) == [b"3", b"4", b"5"]
         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3", b"4", b"5"]
 
-    async def test_lrem(self, r: valkey.Valkey):
+    async def test_lrem(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "Z", "b", "Z", "Z", "c", "Z", "Z")
         # remove the first 'Z'  item
         assert await r.lrem("a", 1, "Z") == 1
@@ -1225,18 +1227,18 @@ async def test_lrem(self, r: valkey.Valkey):
         assert await r.lrem("a", 0, "Z") == 2
         assert await r.lrange("a", 0, -1) == [b"b", b"c"]
 
-    async def test_lset(self, r: valkey.Valkey):
+    async def test_lset(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2", "3")
         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3"]
         assert await r.lset("a", 1, "4")
         assert await r.lrange("a", 0, 2) == [b"1", b"4", b"3"]
 
-    async def test_ltrim(self, r: valkey.Valkey):
+    async def test_ltrim(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2", "3")
         assert await r.ltrim("a", 0, 1)
         assert await r.lrange("a", 0, -1) == [b"1", b"2"]
 
-    async def test_rpop(self, r: valkey.Valkey):
+    async def test_rpop(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "1", "2", "3")
         assert await r.rpop("a") == b"3"
         assert await r.rpop("a") == b"2"
@@ -1244,21 +1246,21 @@ async def test_rpop(self, r: valkey.Valkey):
         assert await r.rpop("a") is None
 
     @pytest.mark.onlynoncluster
-    async def test_rpoplpush(self, r: valkey.Valkey):
+    async def test_rpoplpush(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "a1", "a2", "a3")
         await r.rpush("b", "b1", "b2", "b3")
         assert await r.rpoplpush("a", "b") == b"a3"
         assert await r.lrange("a", 0, -1) == [b"a1", b"a2"]
         assert await r.lrange("b", 0, -1) == [b"a3", b"b1", b"b2", b"b3"]
 
-    async def test_rpush(self, r: valkey.Valkey):
+    async def test_rpush(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.rpush("a", "1") == 1
         assert await r.rpush("a", "2") == 2
         assert await r.rpush("a", "3", "4") == 4
         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3", b"4"]
 
     @skip_if_server_version_lt("6.0.6")
-    async def test_lpos(self, r: valkey.Valkey):
+    async def test_lpos(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.rpush("a", "a", "b", "c", "1", "2", "3", "c", "c") == 8
         assert await r.lpos("a", "a") == 0
         assert await r.lpos("a", "c") == 2
@@ -1289,7 +1291,7 @@ async def test_lpos(self, r: valkey.Valkey):
         assert await r.lpos("a", "c", count=0, maxlen=3, rank=-1) == [7, 6]
         assert await r.lpos("a", "c", count=0, maxlen=7, rank=2) == [6]
 
-    async def test_rpushx(self, r: valkey.Valkey):
+    async def test_rpushx(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.rpushx("a", "b") == 0
         assert await r.lrange("a", 0, -1) == []
         await r.rpush("a", "1", "2", "3")
@@ -1299,7 +1301,7 @@ async def test_rpushx(self, r: valkey.Valkey):
     # SCAN COMMANDS
     @skip_if_server_version_lt("2.8.0")
     @pytest.mark.onlynoncluster
-    async def test_scan(self, r: valkey.Valkey):
+    async def test_scan(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", 1)
         await r.set("b", 2)
         await r.set("c", 3)
@@ -1311,7 +1313,7 @@ async def test_scan(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt(VALKEY_6_VERSION)
     @pytest.mark.onlynoncluster
-    async def test_scan_type(self, r: valkey.Valkey):
+    async def test_scan_type(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a-set", 1)
         await r.hset("a-hash", "foo", 2)
         await r.lpush("a-list", "aux", 3)
@@ -1320,7 +1322,7 @@ async def test_scan_type(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("2.8.0")
     @pytest.mark.onlynoncluster
-    async def test_scan_iter(self, r: valkey.Valkey):
+    async def test_scan_iter(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("a", 1)
         await r.set("b", 2)
         await r.set("c", 3)
@@ -1330,7 +1332,7 @@ async def test_scan_iter(self, r: valkey.Valkey):
         assert set(keys) == {b"a"}
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_sscan(self, r: valkey.Valkey):
+    async def test_sscan(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", 1, 2, 3)
         cursor, members = await r.sscan("a")
         assert cursor == 0
@@ -1339,7 +1341,7 @@ async def test_sscan(self, r: valkey.Valkey):
         assert set(members) == {b"1"}
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_sscan_iter(self, r: valkey.Valkey):
+    async def test_sscan_iter(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", 1, 2, 3)
         members = [k async for k in r.sscan_iter("a")]
         assert set(members) == {b"1", b"2", b"3"}
@@ -1347,7 +1349,7 @@ async def test_sscan_iter(self, r: valkey.Valkey):
         assert set(members) == {b"1"}
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_hscan(self, r: valkey.Valkey):
+    async def test_hscan(self, r: valkey.asyncio.Valkey[bytes]):
         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
         cursor, dic = await r.hscan("a")
         assert cursor == 0
@@ -1357,19 +1359,20 @@ async def test_hscan(self, r: valkey.Valkey):
         _, dic = await r.hscan("a_notset", match="a")
         assert dic == {}
 
+    # TODO: is that a bug?
     @skip_if_server_version_lt("7.3.240")
-    async def test_hscan_novalues(self, r: valkey.Valkey):
+    async def test_hscan_novalues(self, r: valkey.asyncio.Valkey[bytes]):
         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
         cursor, keys = await r.hscan("a", no_values=True)
         assert cursor == 0
         assert sorted(keys) == [b"a", b"b", b"c"]
         _, keys = await r.hscan("a", match="a", no_values=True)
-        assert keys == [b"a"]
+        assert keys == [b"a"]  # type: ignore[comparison-overlap]
         _, keys = await r.hscan("a_notset", match="a", no_values=True)
-        assert keys == []
+        assert keys == []  # type: ignore[comparison-overlap]
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_hscan_iter(self, r: valkey.Valkey):
+    async def test_hscan_iter(self, r: valkey.asyncio.Valkey[bytes]):
         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
         dic = {k: v async for k, v in r.hscan_iter("a")}
         assert dic == {b"a": b"1", b"b": b"2", b"c": b"3"}
@@ -1378,20 +1381,21 @@ async def test_hscan_iter(self, r: valkey.Valkey):
         dic = {k: v async for k, v in r.hscan_iter("a_notset", match="a")}
         assert dic == {}
 
+    # TODO: is that a bug?
     @skip_if_server_version_lt("7.3.240")
-    async def test_hscan_iter_novalues(self, r: valkey.Valkey):
+    async def test_hscan_iter_novalues(self, r: valkey.asyncio.Valkey[bytes]):
         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
         keys = list([k async for k in r.hscan_iter("a", no_values=True)])
-        assert sorted(keys) == [b"a", b"b", b"c"]
+        assert sorted(keys) == [b"a", b"b", b"c"]  # type: ignore[comparison-overlap]
         keys = list([k async for k in r.hscan_iter("a", match="a", no_values=True)])
-        assert keys == [b"a"]
+        assert keys == [b"a"]  # type: ignore[comparison-overlap]
         keys = list(
             [k async for k in r.hscan_iter("a", match="a_notset", no_values=True)]
         )
         assert keys == []
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_zscan(self, r: valkey.Valkey):
+    async def test_zscan(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a": 1, "b": 2, "c": 3})
         cursor, pairs = await r.zscan("a")
         assert cursor == 0
@@ -1400,7 +1404,7 @@ async def test_zscan(self, r: valkey.Valkey):
         assert set(pairs) == {(b"a", 1)}
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_zscan_iter(self, r: valkey.Valkey):
+    async def test_zscan_iter(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a": 1, "b": 2, "c": 3})
         pairs = [k async for k in r.zscan_iter("a")]
         assert set(pairs) == {(b"a", 1), (b"b", 2), (b"c", 3)}
@@ -1408,78 +1412,78 @@ async def test_zscan_iter(self, r: valkey.Valkey):
         assert set(pairs) == {(b"a", 1)}
 
     # SET COMMANDS
-    async def test_sadd(self, r: valkey.Valkey):
+    async def test_sadd(self, r: valkey.asyncio.Valkey[bytes]):
         members = {b"1", b"2", b"3"}
         await r.sadd("a", *members)
         assert set(await r.smembers("a")) == members
 
-    async def test_scard(self, r: valkey.Valkey):
+    async def test_scard(self, r: valkey.asyncio.Valkey[str]):
         await r.sadd("a", "1", "2", "3")
         assert await r.scard("a") == 3
 
     @pytest.mark.onlynoncluster
-    async def test_sdiff(self, r: valkey.Valkey):
+    async def test_sdiff(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2", "3")
         assert set(await r.sdiff("a", "b")) == {b"1", b"2", b"3"}
         await r.sadd("b", "2", "3")
-        assert await r.sdiff("a", "b") == [b"1"]
+        assert await r.sdiff("a", "b") == {b"1", }
 
     @pytest.mark.onlynoncluster
-    async def test_sdiffstore(self, r: valkey.Valkey):
+    async def test_sdiffstore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2", "3")
         assert await r.sdiffstore("c", "a", "b") == 3
         assert set(await r.smembers("c")) == {b"1", b"2", b"3"}
         await r.sadd("b", "2", "3")
         assert await r.sdiffstore("c", "a", "b") == 1
-        assert await r.smembers("c") == [b"1"]
+        assert await r.smembers("c") == [b"1", ]
 
     @pytest.mark.onlynoncluster
-    async def test_sinter(self, r: valkey.Valkey):
+    async def test_sinter(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2", "3")
-        assert await r.sinter("a", "b") == []
+        assert await r.sinter("a", "b") == set()
         await r.sadd("b", "2", "3")
         assert set(await r.sinter("a", "b")) == {b"2", b"3"}
 
     @pytest.mark.onlynoncluster
-    async def test_sinterstore(self, r: valkey.Valkey):
+    async def test_sinterstore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2", "3")
         assert await r.sinterstore("c", "a", "b") == 0
-        assert await r.smembers("c") == []
+        assert await r.smembers("c") == list()
         await r.sadd("b", "2", "3")
         assert await r.sinterstore("c", "a", "b") == 2
         assert set(await r.smembers("c")) == {b"2", b"3"}
 
-    async def test_sismember(self, r: valkey.Valkey):
+    async def test_sismember(self, r: valkey.asyncio.Valkey[str]):
         await r.sadd("a", "1", "2", "3")
         assert await r.sismember("a", "1")
         assert await r.sismember("a", "2")
         assert await r.sismember("a", "3")
         assert not await r.sismember("a", "4")
 
-    async def test_smembers(self, r: valkey.Valkey):
+    async def test_smembers(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2", "3")
         assert set(await r.smembers("a")) == {b"1", b"2", b"3"}
 
     @pytest.mark.onlynoncluster
-    async def test_smove(self, r: valkey.Valkey):
+    async def test_smove(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "a1", "a2")
         await r.sadd("b", "b1", "b2")
         assert await r.smove("a", "b", "a1")
-        assert await r.smembers("a") == [b"a2"]
+        assert await r.smembers("a") == [b"a2", ]
         assert set(await r.smembers("b")) == {b"b1", b"b2", b"a1"}
 
-    async def test_spop(self, r: valkey.Valkey):
+    async def test_spop(self, r: valkey.asyncio.Valkey[bytes]):
         s = [b"1", b"2", b"3"]
         await r.sadd("a", *s)
-        value = await r.spop("a")
+        value: bytes = await r.spop("a")  # type: ignore[assignment]
         assert value in s
-        assert set(await r.smembers("a")) == set(s) - {value}
+        assert set(await r.smembers("a")) == set(s) - {value, }
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_spop_multi_value(self, r: valkey.Valkey):
+    async def test_spop_multi_value(self, r: valkey.asyncio.Valkey[bytes]):
         s = [b"1", b"2", b"3"]
         await r.sadd("a", *s)
-        values = await r.spop("a", 2)
+        values: list[bytes] = await r.spop("a", 2)  # type: ignore[assignment]
         assert len(values) == 2
 
         for value in values:
@@ -1488,42 +1492,42 @@ async def test_spop_multi_value(self, r: valkey.Valkey):
         response = await r.spop("a", 1)
         assert set(response) == set(s) - set(values)
 
-    async def test_srandmember(self, r: valkey.Valkey):
+    async def test_srandmember(self, r: valkey.asyncio.Valkey[str]):
         s = [b"1", b"2", b"3"]
         await r.sadd("a", *s)
         assert await r.srandmember("a") in s
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_srandmember_multi_value(self, r: valkey.Valkey):
+    async def test_srandmember_multi_value(self, r: valkey.asyncio.Valkey[str]):
         s = [b"1", b"2", b"3"]
         await r.sadd("a", *s)
         randoms = await r.srandmember("a", number=2)
         assert len(randoms) == 2
         assert set(randoms).intersection(s) == set(randoms)
 
-    async def test_srem(self, r: valkey.Valkey):
+    async def test_srem(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2", "3", "4")
         assert await r.srem("a", "5") == 0
         assert await r.srem("a", "2", "4") == 2
         assert set(await r.smembers("a")) == {b"1", b"3"}
 
     @pytest.mark.onlynoncluster
-    async def test_sunion(self, r: valkey.Valkey):
+    async def test_sunion(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2")
         await r.sadd("b", "2", "3")
         assert set(await r.sunion("a", "b")) == {b"1", b"2", b"3"}
 
     @pytest.mark.onlynoncluster
-    async def test_sunionstore(self, r: valkey.Valkey):
+    async def test_sunionstore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.sadd("a", "1", "2")
         await r.sadd("b", "2", "3")
         assert await r.sunionstore("c", "a", "b") == 3
         assert set(await r.smembers("c")) == {b"1", b"2", b"3"}
 
     # SORTED SET COMMANDS
-    async def test_zadd(self, r: valkey.Valkey):
+    async def test_zadd(self, r: valkey.asyncio.Valkey[bytes]):
         mapping = {"a1": 1.0, "a2": 2.0, "a3": 3.0}
-        await r.zadd("a", mapping)
+        await r.zadd("a", mapping)  # type: ignore[arg-type]
         response = await r.zrange("a", 0, -1, withscores=True)
         assert_resp_response(
             r,
@@ -1538,13 +1542,13 @@ async def test_zadd(self, r: valkey.Valkey):
 
         # cannot use both nx and xx options
         with pytest.raises(exceptions.DataError):
-            await r.zadd("a", mapping, nx=True, xx=True)
+            await r.zadd("a", mapping, nx=True, xx=True)  # type: ignore[arg-type]
 
         # cannot use the incr options with more than one value
         with pytest.raises(exceptions.DataError):
-            await r.zadd("a", mapping, incr=True)
+            await r.zadd("a", mapping, incr=True)  # type: ignore[arg-type]
 
-    async def test_zadd_nx(self, r: valkey.Valkey):
+    async def test_zadd_nx(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.zadd("a", {"a1": 1}) == 1
         assert await r.zadd("a", {"a1": 99, "a2": 2}, nx=True) == 1
         response = await r.zrange("a", 0, -1, withscores=True)
@@ -1552,13 +1556,13 @@ async def test_zadd_nx(self, r: valkey.Valkey):
             r, response, [(b"a1", 1.0), (b"a2", 2.0)], [[b"a1", 1.0], [b"a2", 2.0]]
         )
 
-    async def test_zadd_xx(self, r: valkey.Valkey):
+    async def test_zadd_xx(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.zadd("a", {"a1": 1}) == 1
         assert await r.zadd("a", {"a1": 99, "a2": 2}, xx=True) == 0
         response = await r.zrange("a", 0, -1, withscores=True)
         assert_resp_response(r, response, [(b"a1", 99.0)], [[b"a1", 99.0]])
 
-    async def test_zadd_ch(self, r: valkey.Valkey):
+    async def test_zadd_ch(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.zadd("a", {"a1": 1}) == 1
         assert await r.zadd("a", {"a1": 99, "a2": 2}, ch=True) == 2
         response = await r.zrange("a", 0, -1, withscores=True)
@@ -1566,21 +1570,21 @@ async def test_zadd_ch(self, r: valkey.Valkey):
             r, response, [(b"a2", 2.0), (b"a1", 99.0)], [[b"a2", 2.0], [b"a1", 99.0]]
         )
 
-    async def test_zadd_incr(self, r: valkey.Valkey):
+    async def test_zadd_incr(self, r: valkey.asyncio.Valkey[str]):
         assert await r.zadd("a", {"a1": 1}) == 1
         assert await r.zadd("a", {"a1": 4.5}, incr=True) == 5.5
 
-    async def test_zadd_incr_with_xx(self, r: valkey.Valkey):
+    async def test_zadd_incr_with_xx(self, r: valkey.asyncio.Valkey[str]):
         # this asks zadd to incr 'a1' only if it exists, but it clearly
         # doesn't. Valkey returns a null value in this case and so should
         # valkey-py
         assert await r.zadd("a", {"a1": 1}, xx=True, incr=True) is None
 
-    async def test_zcard(self, r: valkey.Valkey):
+    async def test_zcard(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zcard("a") == 3
 
-    async def test_zcount(self, r: valkey.Valkey):
+    async def test_zcount(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zcount("a", "-inf", "+inf") == 3
         assert await r.zcount("a", 1, 2) == 2
@@ -1607,7 +1611,7 @@ async def test_zdiffstore(self, r):
         response = await r.zrange("out", 0, -1, withscores=True)
         assert_resp_response(r, response, [(b"a3", 3.0)], [[b"a3", 3.0]])
 
-    async def test_zincrby(self, r: valkey.Valkey):
+    async def test_zincrby(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zincrby("a", 1, "a2") == 3.0
         assert await r.zincrby("a", 5, "a3") == 8.0
@@ -1615,13 +1619,13 @@ async def test_zincrby(self, r: valkey.Valkey):
         assert await r.zscore("a", "a3") == 8.0
 
     @skip_if_server_version_lt("2.8.9")
-    async def test_zlexcount(self, r: valkey.Valkey):
+    async def test_zlexcount(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
         assert await r.zlexcount("a", "-", "+") == 7
         assert await r.zlexcount("a", "[b", "[f") == 5
 
     @pytest.mark.onlynoncluster
-    async def test_zinterstore_sum(self, r: valkey.Valkey):
+    async def test_zinterstore_sum(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -1632,7 +1636,7 @@ async def test_zinterstore_sum(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_zinterstore_max(self, r: valkey.Valkey):
+    async def test_zinterstore_max(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -1643,7 +1647,7 @@ async def test_zinterstore_max(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_zinterstore_min(self, r: valkey.Valkey):
+    async def test_zinterstore_min(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         await r.zadd("b", {"a1": 2, "a2": 3, "a3": 5})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -1654,7 +1658,7 @@ async def test_zinterstore_min(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_zinterstore_with_weight(self, r: valkey.Valkey):
+    async def test_zinterstore_with_weight(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -1665,7 +1669,7 @@ async def test_zinterstore_with_weight(self, r: valkey.Valkey):
         )
 
     @skip_if_server_version_lt("4.9.0")
-    async def test_zpopmax(self, r: valkey.Valkey):
+    async def test_zpopmax(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         response = await r.zpopmax("a")
         assert_resp_response(r, response, [(b"a3", 3)], [b"a3", 3.0])
@@ -1677,7 +1681,7 @@ async def test_zpopmax(self, r: valkey.Valkey):
         )
 
     @skip_if_server_version_lt("4.9.0")
-    async def test_zpopmin(self, r: valkey.Valkey):
+    async def test_zpopmin(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         response = await r.zpopmin("a")
         assert_resp_response(r, response, [(b"a1", 1)], [b"a1", 1.0])
@@ -1690,7 +1694,7 @@ async def test_zpopmin(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("4.9.0")
     @pytest.mark.onlynoncluster
-    async def test_bzpopmax(self, r: valkey.Valkey):
+    async def test_bzpopmax(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2})
         await r.zadd("b", {"b1": 10, "b2": 20})
         assert_resp_response(
@@ -1725,7 +1729,7 @@ async def test_bzpopmax(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("4.9.0")
     @pytest.mark.onlynoncluster
-    async def test_bzpopmin(self, r: valkey.Valkey):
+    async def test_bzpopmin(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2})
         await r.zadd("b", {"b1": 10, "b2": 20})
         assert_resp_response(
@@ -1758,7 +1762,7 @@ async def test_bzpopmin(self, r: valkey.Valkey):
             r, await r.bzpopmin("c", timeout=1), (b"c", b"c1", 100), [b"c", b"c1", 100]
         )
 
-    async def test_zrange(self, r: valkey.Valkey):
+    async def test_zrange(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zrange("a", 0, 1) == [b"a1", b"a2"]
         assert await r.zrange("a", 1, 2) == [b"a2", b"a3"]
@@ -1780,7 +1784,7 @@ async def test_zrange(self, r: valkey.Valkey):
         # ]
 
     @skip_if_server_version_lt("2.8.9")
-    async def test_zrangebylex(self, r: valkey.Valkey):
+    async def test_zrangebylex(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
         assert await r.zrangebylex("a", "-", "[c") == [b"a", b"b", b"c"]
         assert await r.zrangebylex("a", "-", "(c") == [b"a", b"b"]
@@ -1789,7 +1793,7 @@ async def test_zrangebylex(self, r: valkey.Valkey):
         assert await r.zrangebylex("a", "-", "+", start=3, num=2) == [b"d", b"e"]
 
     @skip_if_server_version_lt("2.9.9")
-    async def test_zrevrangebylex(self, r: valkey.Valkey):
+    async def test_zrevrangebylex(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
         assert await r.zrevrangebylex("a", "[c", "-") == [b"c", b"b", b"a"]
         assert await r.zrevrangebylex("a", "(c", "-") == [b"b", b"a"]
@@ -1803,7 +1807,7 @@ async def test_zrevrangebylex(self, r: valkey.Valkey):
         assert await r.zrevrangebylex("a", "+", "[f") == [b"g", b"f"]
         assert await r.zrevrangebylex("a", "+", "-", start=3, num=2) == [b"d", b"c"]
 
-    async def test_zrangebyscore(self, r: valkey.Valkey):
+    async def test_zrangebyscore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zrangebyscore("a", 2, 4) == [b"a2", b"a3", b"a4"]
 
@@ -1830,14 +1834,14 @@ async def test_zrangebyscore(self, r: valkey.Valkey):
             [[b"a2", 2], [b"a3", 3], [b"a4", 4]],
         )
 
-    async def test_zrank(self, r: valkey.Valkey):
+    async def test_zrank(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zrank("a", "a1") == 0
         assert await r.zrank("a", "a2") == 1
         assert await r.zrank("a", "a6") is None
 
     @skip_if_server_version_lt("7.2.0")
-    async def test_zrank_withscore(self, r: valkey.Valkey):
+    async def test_zrank_withscore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zrank("a", "a1") == 0
         assert await r.zrank("a", "a2") == 1
@@ -1847,20 +1851,20 @@ async def test_zrank_withscore(self, r: valkey.Valkey):
         )
         assert await r.zrank("a", "a6", withscore=True) is None
 
-    async def test_zrem(self, r: valkey.Valkey):
+    async def test_zrem(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zrem("a", "a2") == 1
         assert await r.zrange("a", 0, -1) == [b"a1", b"a3"]
         assert await r.zrem("a", "b") == 0
         assert await r.zrange("a", 0, -1) == [b"a1", b"a3"]
 
-    async def test_zrem_multiple_keys(self, r: valkey.Valkey):
+    async def test_zrem_multiple_keys(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zrem("a", "a1", "a2") == 2
         assert await r.zrange("a", 0, 5) == [b"a3"]
 
     @skip_if_server_version_lt("2.8.9")
-    async def test_zremrangebylex(self, r: valkey.Valkey):
+    async def test_zremrangebylex(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
         assert await r.zremrangebylex("a", "-", "[c") == 3
         assert await r.zrange("a", 0, -1) == [b"d", b"e", b"f", b"g"]
@@ -1869,19 +1873,19 @@ async def test_zremrangebylex(self, r: valkey.Valkey):
         assert await r.zremrangebylex("a", "[h", "+") == 0
         assert await r.zrange("a", 0, -1) == [b"d", b"e"]
 
-    async def test_zremrangebyrank(self, r: valkey.Valkey):
+    async def test_zremrangebyrank(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zremrangebyrank("a", 1, 3) == 3
         assert await r.zrange("a", 0, 5) == [b"a1", b"a5"]
 
-    async def test_zremrangebyscore(self, r: valkey.Valkey):
+    async def test_zremrangebyscore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zremrangebyscore("a", 2, 4) == 3
         assert await r.zrange("a", 0, -1) == [b"a1", b"a5"]
         assert await r.zremrangebyscore("a", 2, 4) == 0
         assert await r.zrange("a", 0, -1) == [b"a1", b"a5"]
 
-    async def test_zrevrange(self, r: valkey.Valkey):
+    async def test_zrevrange(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zrevrange("a", 0, 1) == [b"a3", b"a2"]
         assert await r.zrevrange("a", 1, 2) == [b"a2", b"a1"]
@@ -1902,7 +1906,7 @@ async def test_zrevrange(self, r: valkey.Valkey):
             r, response, [(b"a3", 3), (b"a2", 2)], [[b"a3", 3], [b"a2", 2]]
         )
 
-    async def test_zrevrangebyscore(self, r: valkey.Valkey):
+    async def test_zrevrangebyscore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zrevrangebyscore("a", 4, 2) == [b"a4", b"a3", b"a2"]
 
@@ -1929,14 +1933,14 @@ async def test_zrevrangebyscore(self, r: valkey.Valkey):
             [[b"a4", 4], [b"a3", 3], [b"a2", 2]],
         )
 
-    async def test_zrevrank(self, r: valkey.Valkey):
+    async def test_zrevrank(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zrevrank("a", "a1") == 4
         assert await r.zrevrank("a", "a2") == 3
         assert await r.zrevrank("a", "a6") is None
 
     @skip_if_server_version_lt("7.2.0")
-    async def test_zrevrank_withscore(self, r: valkey.Valkey):
+    async def test_zrevrank_withscore(self, r: valkey.asyncio.Valkey[bytes]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert await r.zrevrank("a", "a1") == 4
         assert await r.zrevrank("a", "a2") == 3
@@ -1946,14 +1950,14 @@ async def test_zrevrank_withscore(self, r: valkey.Valkey):
         )
         assert await r.zrevrank("a", "a6", withscore=True) is None
 
-    async def test_zscore(self, r: valkey.Valkey):
+    async def test_zscore(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         assert await r.zscore("a", "a1") == 1.0
         assert await r.zscore("a", "a2") == 2.0
         assert await r.zscore("a", "a4") is None
 
     @pytest.mark.onlynoncluster
-    async def test_zunionstore_sum(self, r: valkey.Valkey):
+    async def test_zunionstore_sum(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -1967,7 +1971,7 @@ async def test_zunionstore_sum(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_zunionstore_max(self, r: valkey.Valkey):
+    async def test_zunionstore_max(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -1981,7 +1985,7 @@ async def test_zunionstore_max(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_zunionstore_min(self, r: valkey.Valkey):
+    async def test_zunionstore_min(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 4})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -1995,7 +1999,7 @@ async def test_zunionstore_min(self, r: valkey.Valkey):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_zunionstore_with_weight(self, r: valkey.Valkey):
+    async def test_zunionstore_with_weight(self, r: valkey.asyncio.Valkey[str]):
         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
@@ -2010,7 +2014,7 @@ async def test_zunionstore_with_weight(self, r: valkey.Valkey):
 
     # HYPERLOGLOG TESTS
     @skip_if_server_version_lt("2.8.9")
-    async def test_pfadd(self, r: valkey.Valkey):
+    async def test_pfadd(self, r: valkey.asyncio.Valkey[str]):
         members = {b"1", b"2", b"3"}
         assert await r.pfadd("a", *members) == 1
         assert await r.pfadd("a", *members) == 0
@@ -2018,18 +2022,18 @@ async def test_pfadd(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("2.8.9")
     @pytest.mark.onlynoncluster
-    async def test_pfcount(self, r: valkey.Valkey):
+    async def test_pfcount(self, r: valkey.asyncio.Valkey[str]):
         members = {b"1", b"2", b"3"}
         await r.pfadd("a", *members)
         assert await r.pfcount("a") == len(members)
         members_b = {b"2", b"3", b"4"}
         await r.pfadd("b", *members_b)
         assert await r.pfcount("b") == len(members_b)
-        assert await r.pfcount("a", "b") == len(members_b.union(members))
+        assert await r.pfcount("a", "b") == len(members_b.union(members))  # type: ignore[call-arg]
 
     @skip_if_server_version_lt("2.8.9")
     @pytest.mark.onlynoncluster
-    async def test_pfmerge(self, r: valkey.Valkey):
+    async def test_pfmerge(self, r: valkey.asyncio.Valkey[str]):
         mema = {b"1", b"2", b"3"}
         memb = {b"2", b"3", b"4"}
         memc = {b"5", b"6", b"7"}
@@ -2042,7 +2046,7 @@ async def test_pfmerge(self, r: valkey.Valkey):
         assert await r.pfcount("d") == 7
 
     # HASH COMMANDS
-    async def test_hget_and_hset(self, r: valkey.Valkey):
+    async def test_hget_and_hset(self, r: valkey.asyncio.Valkey[bytes]):
         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
         assert await r.hget("a", "1") == b"1"
         assert await r.hget("a", "2") == b"2"
@@ -2060,10 +2064,10 @@ async def test_hget_and_hset(self, r: valkey.Valkey):
         assert await r.hget("a", "b") is None
 
         # keys with bool(key) == False
-        assert await r.hset("a", 0, 10) == 1
+        assert await r.hset("a", 0, 10) == 1  # type: ignore[call-overload]
         assert await r.hset("a", "", 10) == 1
 
-    async def test_hset_with_multi_key_values(self, r: valkey.Valkey):
+    async def test_hset_with_multi_key_values(self, r: valkey.asyncio.Valkey[bytes]):
         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
         assert await r.hget("a", "1") == b"1"
         assert await r.hget("a", "2") == b"2"
@@ -2074,94 +2078,94 @@ async def test_hset_with_multi_key_values(self, r: valkey.Valkey):
         assert await r.hget("b", "2") == b"2"
         assert await r.hget("b", "foo") == b"bar"
 
-    async def test_hset_without_data(self, r: valkey.Valkey):
+    async def test_hset_without_data(self, r: valkey.asyncio.Valkey[str]):
         with pytest.raises(exceptions.DataError):
-            await r.hset("x")
+            await r.hset("x")  # type: ignore[call-overload]
 
-    async def test_hdel(self, r: valkey.Valkey):
+    async def test_hdel(self, r: valkey.asyncio.Valkey[str]):
         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
         assert await r.hdel("a", "2") == 1
         assert await r.hget("a", "2") is None
         assert await r.hdel("a", "1", "3") == 2
         assert await r.hlen("a") == 0
 
-    async def test_hexists(self, r: valkey.Valkey):
+    async def test_hexists(self, r: valkey.asyncio.Valkey[str]):
         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
         assert await r.hexists("a", "1")
         assert not await r.hexists("a", "4")
 
-    async def test_hgetall(self, r: valkey.Valkey):
+    async def test_hgetall(self, r: valkey.asyncio.Valkey[bytes]):
         h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"}
-        await r.hset("a", mapping=h)
+        await r.hset("a", mapping=h)  # type: ignore[arg-type]
         assert await r.hgetall("a") == h
 
-    async def test_hincrby(self, r: valkey.Valkey):
+    async def test_hincrby(self, r: valkey.asyncio.Valkey[str]):
         assert await r.hincrby("a", "1") == 1
         assert await r.hincrby("a", "1", amount=2) == 3
         assert await r.hincrby("a", "1", amount=-2) == 1
 
     @skip_if_server_version_lt("2.6.0")
-    async def test_hincrbyfloat(self, r: valkey.Valkey):
+    async def test_hincrbyfloat(self, r: valkey.asyncio.Valkey[str]):
         assert await r.hincrbyfloat("a", "1") == 1.0
         assert await r.hincrbyfloat("a", "1") == 2.0
         assert await r.hincrbyfloat("a", "1", 1.2) == 3.2
 
-    async def test_hkeys(self, r: valkey.Valkey):
+    async def test_hkeys(self, r: valkey.asyncio.Valkey[bytes]):
         h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"}
-        await r.hset("a", mapping=h)
+        await r.hset("a", mapping=h)  # type: ignore[arg-type]
         local_keys = list(h.keys())
         remote_keys = await r.hkeys("a")
         assert sorted(local_keys) == sorted(remote_keys)
 
-    async def test_hlen(self, r: valkey.Valkey):
+    async def test_hlen(self, r: valkey.asyncio.Valkey[str]):
         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
         assert await r.hlen("a") == 3
 
-    async def test_hmget(self, r: valkey.Valkey):
+    async def test_hmget(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
         assert await r.hmget("a", "a", "b", "c") == [b"1", b"2", b"3"]
 
-    async def test_hmset(self, r: valkey.Valkey):
+    async def test_hmset(self, r: valkey.asyncio.Valkey[bytes]):
         warning_message = (
             r"^Valkey(?:Cluster)*\.hmset\(\) is deprecated\. "
             r"Use Valkey(?:Cluster)*\.hset\(\) instead\.$"
         )
         h = {b"a": b"1", b"b": b"2", b"c": b"3"}
         with pytest.warns(DeprecationWarning, match=warning_message):
-            assert await r.hmset("a", h)
+            assert await r.hmset("a", h) # type: ignore[arg-type]
         assert await r.hgetall("a") == h
 
-    async def test_hsetnx(self, r: valkey.Valkey):
+    async def test_hsetnx(self, r: valkey.asyncio.Valkey[bytes]):
         # Initially set the hash field
         assert await r.hsetnx("a", "1", 1)
         assert await r.hget("a", "1") == b"1"
         assert not await r.hsetnx("a", "1", 2)
         assert await r.hget("a", "1") == b"1"
 
-    async def test_hvals(self, r: valkey.Valkey):
+    async def test_hvals(self, r: valkey.asyncio.Valkey[bytes]):
         h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"}
-        await r.hset("a", mapping=h)
+        await r.hset("a", mapping=h)  # type: ignore[arg-type]
         local_vals = list(h.values())
         remote_vals = await r.hvals("a")
         assert sorted(local_vals) == sorted(remote_vals)
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_hstrlen(self, r: valkey.Valkey):
+    async def test_hstrlen(self, r: valkey.asyncio.Valkey[str]):
         await r.hset("a", mapping={"1": "22", "2": "333"})
         assert await r.hstrlen("a", "1") == 2
         assert await r.hstrlen("a", "2") == 3
 
     # SORT
-    async def test_sort_basic(self, r: valkey.Valkey):
+    async def test_sort_basic(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "3", "2", "1", "4")
         assert await r.sort("a") == [b"1", b"2", b"3", b"4"]
 
-    async def test_sort_limited(self, r: valkey.Valkey):
+    async def test_sort_limited(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "3", "2", "1", "4")
         assert await r.sort("a", start=1, num=2) == [b"2", b"3"]
 
     @pytest.mark.onlynoncluster
-    async def test_sort_by(self, r: valkey.Valkey):
+    async def test_sort_by(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("score:1", 8)
         await r.set("score:2", 3)
         await r.set("score:3", 5)
@@ -2169,7 +2173,7 @@ async def test_sort_by(self, r: valkey.Valkey):
         assert await r.sort("a", by="score:*") == [b"2", b"3", b"1"]
 
     @pytest.mark.onlynoncluster
-    async def test_sort_get(self, r: valkey.Valkey):
+    async def test_sort_get(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("user:1", "u1")
         await r.set("user:2", "u2")
         await r.set("user:3", "u3")
@@ -2177,7 +2181,7 @@ async def test_sort_get(self, r: valkey.Valkey):
         assert await r.sort("a", get="user:*") == [b"u1", b"u2", b"u3"]
 
     @pytest.mark.onlynoncluster
-    async def test_sort_get_multi(self, r: valkey.Valkey):
+    async def test_sort_get_multi(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("user:1", "u1")
         await r.set("user:2", "u2")
         await r.set("user:3", "u3")
@@ -2192,19 +2196,19 @@ async def test_sort_get_multi(self, r: valkey.Valkey):
         ]
 
     @pytest.mark.onlynoncluster
-    async def test_sort_get_groups_two(self, r: valkey.Valkey):
+    async def test_sort_get_groups_two(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("user:1", "u1")
         await r.set("user:2", "u2")
         await r.set("user:3", "u3")
         await r.rpush("a", "2", "3", "1")
-        assert await r.sort("a", get=("user:*", "#"), groups=True) == [
+        assert await r.sort("a", get=("user:*", "#"), groups=True) == [  # type: ignore[comparison-overlap]
             (b"u1", b"1"),
             (b"u2", b"2"),
             (b"u3", b"3"),
         ]
 
     @pytest.mark.onlynoncluster
-    async def test_sort_groups_string_get(self, r: valkey.Valkey):
+    async def test_sort_groups_string_get(self, r: valkey.asyncio.Valkey[str]):
         await r.set("user:1", "u1")
         await r.set("user:2", "u2")
         await r.set("user:3", "u3")
@@ -2213,7 +2217,7 @@ async def test_sort_groups_string_get(self, r: valkey.Valkey):
             await r.sort("a", get="user:*", groups=True)
 
     @pytest.mark.onlynoncluster
-    async def test_sort_groups_just_one_get(self, r: valkey.Valkey):
+    async def test_sort_groups_just_one_get(self, r: valkey.asyncio.Valkey[str]):
         await r.set("user:1", "u1")
         await r.set("user:2", "u2")
         await r.set("user:3", "u3")
@@ -2221,7 +2225,7 @@ async def test_sort_groups_just_one_get(self, r: valkey.Valkey):
         with pytest.raises(exceptions.DataError):
             await r.sort("a", get=["user:*"], groups=True)
 
-    async def test_sort_groups_no_get(self, r: valkey.Valkey):
+    async def test_sort_groups_no_get(self, r: valkey.asyncio.Valkey[str]):
         await r.set("user:1", "u1")
         await r.set("user:2", "u2")
         await r.set("user:3", "u3")
@@ -2230,7 +2234,7 @@ async def test_sort_groups_no_get(self, r: valkey.Valkey):
             await r.sort("a", groups=True)
 
     @pytest.mark.onlynoncluster
-    async def test_sort_groups_three_gets(self, r: valkey.Valkey):
+    async def test_sort_groups_three_gets(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("user:1", "u1")
         await r.set("user:2", "u2")
         await r.set("user:3", "u3")
@@ -2238,28 +2242,28 @@ async def test_sort_groups_three_gets(self, r: valkey.Valkey):
         await r.set("door:2", "d2")
         await r.set("door:3", "d3")
         await r.rpush("a", "2", "3", "1")
-        assert await r.sort("a", get=("user:*", "door:*", "#"), groups=True) == [
+        assert await r.sort("a", get=("user:*", "door:*", "#"), groups=True) == [  # type: ignore[comparison-overlap]
             (b"u1", b"d1", b"1"),
             (b"u2", b"d2", b"2"),
             (b"u3", b"d3", b"3"),
         ]
 
-    async def test_sort_desc(self, r: valkey.Valkey):
+    async def test_sort_desc(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "2", "3", "1")
         assert await r.sort("a", desc=True) == [b"3", b"2", b"1"]
 
-    async def test_sort_alpha(self, r: valkey.Valkey):
+    async def test_sort_alpha(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "e", "c", "b", "d", "a")
         assert await r.sort("a", alpha=True) == [b"a", b"b", b"c", b"d", b"e"]
 
     @pytest.mark.onlynoncluster
-    async def test_sort_store(self, r: valkey.Valkey):
+    async def test_sort_store(self, r: valkey.asyncio.Valkey[bytes]):
         await r.rpush("a", "2", "3", "1")
         assert await r.sort("a", store="sorted_values") == 3
         assert await r.lrange("sorted_values", 0, -1) == [b"1", b"2", b"3"]
 
     @pytest.mark.onlynoncluster
-    async def test_sort_all_options(self, r: valkey.Valkey):
+    async def test_sort_all_options(self, r: valkey.asyncio.Valkey[bytes]):
         await r.set("user:1:username", "zeus")
         await r.set("user:2:username", "titan")
         await r.set("user:3:username", "hermes")
@@ -2297,7 +2301,7 @@ async def test_sort_all_options(self, r: valkey.Valkey):
             b"apple juice",
         ]
 
-    async def test_sort_issue_924(self, r: valkey.Valkey):
+    async def test_sort_issue_924(self, r: valkey.asyncio.Valkey[str]):
         # Tests for issue https://github.com/andymccurdy/redis-py/issues/924
         await r.execute_command("SADD", "issue#924", 1)
         await r.execute_command("SORT", "issue#924")
@@ -2374,12 +2378,12 @@ async def test_cluster_slaves(self, mock_cluster_resp_slaves):
     @skip_if_server_version_lt("3.0.0")
     @skip_if_server_version_gte("7.0.0")
     @pytest.mark.onlynoncluster
-    async def test_readwrite(self, r: valkey.Valkey):
+    async def test_readwrite(self, r: valkey.asyncio.Valkey[str]):
         assert await r.readwrite()
 
     @skip_if_server_version_lt("3.0.0")
     @pytest.mark.onlynoncluster
-    async def test_readonly(self, r: valkey.Valkey, valkey_version: Version):
+    async def test_readonly(self, r: valkey.asyncio.Valkey[str], valkey_version: Version):
         # NOTE: Valkey 8.0.0 changes the behaviour of READONLY
         # See https://github.com/valkey-io/valkey/pull/325
         if valkey_version < Version("8.0.0"):
@@ -2395,7 +2399,7 @@ async def test_mock_readonly(self, mock_cluster_resp_ok):
 
     # GEO COMMANDS
     @skip_if_server_version_lt("3.2.0")
-    async def test_geoadd(self, r: valkey.Valkey):
+    async def test_geoadd(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2406,12 +2410,12 @@ async def test_geoadd(self, r: valkey.Valkey):
         assert await r.zcard("barcelona") == 2
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_geoadd_invalid_params(self, r: valkey.Valkey):
+    async def test_geoadd_invalid_params(self, r: valkey.asyncio.Valkey[str]):
         with pytest.raises(exceptions.ValkeyError):
             await r.geoadd("barcelona", (1, 2))
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_geodist(self, r: valkey.Valkey):
+    async def test_geodist(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2422,7 +2426,7 @@ async def test_geodist(self, r: valkey.Valkey):
         assert await r.geodist("barcelona", "place1", "place2") == 3067.4157
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_geodist_units(self, r: valkey.Valkey):
+    async def test_geodist_units(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2433,18 +2437,18 @@ async def test_geodist_units(self, r: valkey.Valkey):
         assert await r.geodist("barcelona", "place1", "place2", "km") == 3.0674
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_geodist_missing_one_member(self, r: valkey.Valkey):
+    async def test_geodist_missing_one_member(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1")
         await r.geoadd("barcelona", values)
         assert await r.geodist("barcelona", "place1", "missing_member", "km") is None
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_geodist_invalid_units(self, r: valkey.Valkey):
+    async def test_geodist_invalid_units(self, r: valkey.asyncio.Valkey[str]):
         with pytest.raises(exceptions.ValkeyError):
             assert await r.geodist("x", "y", "z", "inches")
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_geohash(self, r: valkey.Valkey):
+    async def test_geohash(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2460,7 +2464,7 @@ async def test_geohash(self, r: valkey.Valkey):
         )
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_geopos(self, r: valkey.Valkey):
+    async def test_geopos(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2483,16 +2487,16 @@ async def test_geopos(self, r: valkey.Valkey):
         )
 
     @skip_if_server_version_lt("4.0.0")
-    async def test_geopos_no_value(self, r: valkey.Valkey):
+    async def test_geopos_no_value(self, r: valkey.asyncio.Valkey[str]):
         assert await r.geopos("barcelona", "place1", "place2") == [None, None]
 
     @skip_if_server_version_lt("3.2.0")
     @skip_if_server_version_gte("4.0.0")
-    async def test_old_geopos_no_value(self, r: valkey.Valkey):
+    async def test_old_geopos_no_value(self, r: valkey.asyncio.Valkey[str]):
         assert await r.geopos("barcelona", "place1", "place2") == []
 
     @skip_if_server_version_lt("6.2.0")
-    async def test_geosearch(self, r: valkey.Valkey):
+    async def test_geosearch(self, r: valkey.asyncio.Valkey[str]):
         values = (
             (2.1909389952632, 41.433791470673, "place1")
             + (2.1873744593677, 41.406342043777, b"\x80place2")
@@ -2520,13 +2524,13 @@ async def test_geosearch(self, r: valkey.Valkey):
             "barcelona", member="place3", radius=100, unit="km", count=2
         ) == [b"place3", b"\x80place2"]
         search_res = await r.geosearch(
-            "barcelona", member="place3", radius=100, unit="km", count=1, any=1
+            "barcelona", member="place3", radius=100, unit="km", count=1, any=True
         )
         assert search_res[0] in [b"place1", b"place3", b"\x80place2"]
 
     @skip_unless_arch_bits(64)
     @skip_if_server_version_lt("6.2.0")
-    async def test_geosearch_member(self, r: valkey.Valkey):
+    async def test_geosearch_member(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2564,7 +2568,7 @@ async def test_geosearch_member(self, r: valkey.Valkey):
         )
 
     @skip_if_server_version_lt("6.2.0")
-    async def test_geosearch_sort(self, r: valkey.Valkey):
+    async def test_geosearch_sort(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2603,9 +2607,9 @@ async def test_geosearch_sort(self, r: valkey.Valkey):
     )
     async def test_geosearch_with(
         self,
-        r: valkey.Valkey,
-        geosearch_kwargs: Dict[str, Any],
-        expected_geosearch_result: List[Any],
+        r: valkey.asyncio.Valkey[str],
+        geosearch_kwargs: dict[str, Any],
+        expected_geosearch_result: list[Any],
     ):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
@@ -2646,7 +2650,7 @@ async def test_geosearch_with(
         )
 
     @skip_if_server_version_lt("6.2.0")
-    async def test_geosearch_negative(self, r: valkey.Valkey):
+    async def test_geosearch_negative(self, r: valkey.asyncio.Valkey[str]):
         # not specifying member nor longitude and latitude
         with pytest.raises(exceptions.DataError):
             assert await r.geosearch("barcelona")
@@ -2689,11 +2693,11 @@ async def test_geosearch_negative(self, r: valkey.Valkey):
 
         # use any without count
         with pytest.raises(exceptions.DataError):
-            assert await r.geosearch("barcelona", member="place3", radius=100, any=1)
+            assert await r.geosearch("barcelona", member="place3", radius=100, any=True)
 
     @pytest.mark.onlynoncluster
     @skip_if_server_version_lt("6.2.0")
-    async def test_geosearchstore(self, r: valkey.Valkey):
+    async def test_geosearchstore(self, r: valkey.asyncio.Valkey[bytes]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2713,7 +2717,7 @@ async def test_geosearchstore(self, r: valkey.Valkey):
     @pytest.mark.onlynoncluster
     @skip_unless_arch_bits(64)
     @skip_if_server_version_lt("6.2.0")
-    async def test_geosearchstore_dist(self, r: valkey.Valkey):
+    async def test_geosearchstore_dist(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2731,10 +2735,11 @@ async def test_geosearchstore_dist(self, r: valkey.Valkey):
         )
         # instead of save the geo score, the distance is saved.
         score = await r.zscore("places_barcelona", "place1")
+        assert score is not None
         assert math.isclose(score, 88.05060698409301)
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_georadius(self, r: valkey.Valkey):
+    async def test_georadius(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2746,7 +2751,7 @@ async def test_georadius(self, r: valkey.Valkey):
         assert await r.georadius("barcelona", 2.187, 41.406, 1000) == [b"\x80place2"]
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_georadius_no_values(self, r: valkey.Valkey):
+    async def test_georadius_no_values(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2757,7 +2762,7 @@ async def test_georadius_no_values(self, r: valkey.Valkey):
         assert await r.georadius("barcelona", 1, 2, 1000) == []
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_georadius_units(self, r: valkey.Valkey):
+    async def test_georadius_units(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2793,7 +2798,7 @@ async def test_georadius_units(self, r: valkey.Valkey):
         ],
     )
     async def test_georadius_with(
-        self, r: valkey.Valkey, georadius_kwargs, expected_georadius_result
+        self, r: valkey.asyncio.Valkey[str], georadius_kwargs, expected_georadius_result
     ):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
@@ -2837,7 +2842,7 @@ async def test_georadius_with(
         )
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_georadius_count(self, r: valkey.Valkey):
+    async def test_georadius_count(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2850,7 +2855,7 @@ async def test_georadius_count(self, r: valkey.Valkey):
         ]
 
     @skip_if_server_version_lt("3.2.0")
-    async def test_georadius_sort(self, r: valkey.Valkey):
+    async def test_georadius_sort(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2869,7 +2874,7 @@ async def test_georadius_sort(self, r: valkey.Valkey):
 
     @skip_if_server_version_lt("3.2.0")
     @pytest.mark.onlynoncluster
-    async def test_georadius_store(self, r: valkey.Valkey):
+    async def test_georadius_store(self, r: valkey.asyncio.Valkey[bytes]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2883,7 +2888,7 @@ async def test_georadius_store(self, r: valkey.Valkey):
     @skip_unless_arch_bits(64)
     @skip_if_server_version_lt("3.2.0")
     @pytest.mark.onlynoncluster
-    async def test_georadius_store_dist(self, r: valkey.Valkey):
+    async def test_georadius_store_dist(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2896,11 +2901,11 @@ async def test_georadius_store_dist(self, r: valkey.Valkey):
         )
         # instead of save the geo score, the distance is saved.
         z_score = await r.zscore("places_barcelona", "place1")
-        assert math.isclose(z_score, 88.05060698409301)
+        assert math.isclose(z_score, 88.05060698409301)  # type: ignore[arg-type]
 
     @skip_unless_arch_bits(64)
     @skip_if_server_version_lt("3.2.0")
-    async def test_georadiusmember(self, r: valkey.Valkey):
+    async def test_georadiusmember(self, r: valkey.asyncio.Valkey[str]):
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
@@ -2933,7 +2938,7 @@ async def test_georadiusmember(self, r: valkey.Valkey):
         ]
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xack(self, r: valkey.Valkey):
+    async def test_xack(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         consumer = "consumer"
@@ -2954,7 +2959,7 @@ async def test_xack(self, r: valkey.Valkey):
         assert await r.xack(stream, group, m2, m3) == 2
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xadd(self, r: valkey.Valkey):
+    async def test_xadd(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         message_id = await r.xadd(stream, {"foo": "bar"})
         assert re.match(rb"[0-9]+\-[0-9]+", message_id)
@@ -2968,7 +2973,7 @@ async def test_xadd(self, r: valkey.Valkey):
         assert await r.xlen(stream) == 2
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xclaim(self, r: valkey.Valkey):
+    async def test_xclaim(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         consumer1 = "consumer1"
@@ -3006,7 +3011,7 @@ async def test_xclaim(self, r: valkey.Valkey):
         ) == [message_id]
 
     @skip_if_server_version_lt("7.0.0")
-    async def test_xclaim_trimmed(self, r: valkey.Valkey):
+    async def test_xclaim_trimmed(self, r: valkey.asyncio.Valkey[str]):
         # xclaim should not raise an exception if the item is not there
         stream = "stream"
         group = "group"
@@ -3030,7 +3035,7 @@ async def test_xclaim_trimmed(self, r: valkey.Valkey):
         assert item[0][0] == sid2
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xdel(self, r: valkey.Valkey):
+    async def test_xdel(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
 
         # deleting from an empty stream doesn't do anything
@@ -3045,7 +3050,7 @@ async def test_xdel(self, r: valkey.Valkey):
         assert await r.xdel(stream, m2, m3) == 2
 
     @skip_if_server_version_lt("7.0.0")
-    async def test_xgroup_create(self, r: valkey.Valkey):
+    async def test_xgroup_create(self, r: valkey.asyncio.Valkey[str]):
         # tests xgroup_create and xinfo_groups
         stream = "stream"
         group = "group"
@@ -3068,7 +3073,7 @@ async def test_xgroup_create(self, r: valkey.Valkey):
         assert await r.xinfo_groups(stream) == expected
 
     @skip_if_server_version_lt("7.0.0")
-    async def test_xgroup_create_mkstream(self, r: valkey.Valkey):
+    async def test_xgroup_create_mkstream(self, r: valkey.asyncio.Valkey[str]):
         # tests xgroup_create and xinfo_groups
         stream = "stream"
         group = "group"
@@ -3094,7 +3099,7 @@ async def test_xgroup_create_mkstream(self, r: valkey.Valkey):
         assert await r.xinfo_groups(stream) == expected
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xgroup_delconsumer(self, r: valkey.Valkey):
+    async def test_xgroup_delconsumer(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         consumer = "consumer"
@@ -3112,7 +3117,7 @@ async def test_xgroup_delconsumer(self, r: valkey.Valkey):
         assert await r.xgroup_delconsumer(stream, group, consumer) == 2
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xgroup_destroy(self, r: valkey.Valkey):
+    async def test_xgroup_destroy(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         await r.xadd(stream, {"foo": "bar"})
@@ -3124,7 +3129,7 @@ async def test_xgroup_destroy(self, r: valkey.Valkey):
         assert await r.xgroup_destroy(stream, group)
 
     @skip_if_server_version_lt("7.0.0")
-    async def test_xgroup_setid(self, r: valkey.Valkey):
+    async def test_xgroup_setid(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         message_id = await r.xadd(stream, {"foo": "bar"})
@@ -3145,7 +3150,7 @@ async def test_xgroup_setid(self, r: valkey.Valkey):
         assert await r.xinfo_groups(stream) == expected
 
     @skip_if_server_version_lt("7.2.0")
-    async def test_xinfo_consumers(self, r: valkey.Valkey):
+    async def test_xinfo_consumers(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         consumer1 = "consumer1"
@@ -3172,7 +3177,7 @@ async def test_xinfo_consumers(self, r: valkey.Valkey):
         assert info == expected
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xinfo_stream(self, r: valkey.Valkey):
+    async def test_xinfo_stream(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         m1 = await r.xadd(stream, {"foo": "bar"})
         m2 = await r.xadd(stream, {"foo": "bar"})
@@ -3189,7 +3194,7 @@ async def test_xinfo_stream(self, r: valkey.Valkey):
         assert info["last-entry"] is None
 
     @skip_if_server_version_lt("6.0.0")
-    async def test_xinfo_stream_full(self, r: valkey.Valkey):
+    async def test_xinfo_stream_full(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
 
@@ -3208,7 +3213,7 @@ async def test_xinfo_stream_full(self, r: valkey.Valkey):
         assert isinstance(consumer, dict)
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xlen(self, r: valkey.Valkey):
+    async def test_xlen(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         assert await r.xlen(stream) == 0
         await r.xadd(stream, {"foo": "bar"})
@@ -3216,7 +3221,7 @@ async def test_xlen(self, r: valkey.Valkey):
         assert await r.xlen(stream) == 2
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xpending(self, r: valkey.Valkey):
+    async def test_xpending(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         consumer1 = "consumer1"
@@ -3226,7 +3231,7 @@ async def test_xpending(self, r: valkey.Valkey):
         await r.xgroup_create(stream, group, 0)
 
         # xpending on a group that has no consumers yet
-        expected = {"pending": 0, "min": None, "max": None, "consumers": []}
+        expected: dict[str, int | None | list[Any]] = {"pending": 0, "min": None, "max": None, "consumers": []}
         assert await r.xpending(stream, group) == expected
 
         # read 1 message from the group with each consumer
@@ -3245,7 +3250,7 @@ async def test_xpending(self, r: valkey.Valkey):
         assert await r.xpending(stream, group) == expected
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xpending_range(self, r: valkey.Valkey):
+    async def test_xpending_range(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         consumer1 = "consumer1"
@@ -3269,7 +3274,7 @@ async def test_xpending_range(self, r: valkey.Valkey):
         assert response[1]["consumer"] == consumer2.encode()
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xrange(self, r: valkey.Valkey):
+    async def test_xrange(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         m1 = await r.xadd(stream, {"foo": "bar"})
         m2 = await r.xadd(stream, {"foo": "bar"})
@@ -3292,7 +3297,7 @@ def get_ids(results):
         assert get_ids(results) == [m1]
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xread(self, r: valkey.Valkey):
+    async def test_xread(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         m1 = await r.xadd(stream, {"foo": "bar"})
         m2 = await r.xadd(stream, {"bing": "baz"})
@@ -3323,7 +3328,7 @@ async def test_xread(self, r: valkey.Valkey):
         )
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xreadgroup(self, r: valkey.Valkey):
+    async def test_xreadgroup(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         group = "group"
         consumer = "consumer"
@@ -3390,7 +3395,7 @@ async def test_xreadgroup(self, r: valkey.Valkey):
         )
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xrevrange(self, r: valkey.Valkey):
+    async def test_xrevrange(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
         m1 = await r.xadd(stream, {"foo": "bar"})
         m2 = await r.xadd(stream, {"foo": "bar"})
@@ -3413,7 +3418,7 @@ def get_ids(results):
         assert get_ids(results) == [m4]
 
     @skip_if_server_version_lt("5.0.0")
-    async def test_xtrim(self, r: valkey.Valkey):
+    async def test_xtrim(self, r: valkey.asyncio.Valkey[str]):
         stream = "stream"
 
         # trimming an empty key doesn't do anything
@@ -3432,7 +3437,7 @@ async def test_xtrim(self, r: valkey.Valkey):
         assert await r.xtrim(stream, 3, approximate=False) == 1
 
     @pytest.mark.onlynoncluster
-    async def test_bitfield_operations(self, r: valkey.Valkey):
+    async def test_bitfield_operations(self, r: valkey.asyncio.Valkey[str]):
         # comments show affected bits
         await r.execute_command("SELECT", 10)
         bf = r.bitfield("a")
@@ -3502,7 +3507,7 @@ async def test_bitfield_operations(self, r: valkey.Valkey):
         assert resp == [0, None, 255]
 
     @skip_if_server_version_lt("6.0.0")
-    async def test_bitfield_ro(self, r: valkey.Valkey):
+    async def test_bitfield_ro(self, r: valkey.asyncio.Valkey[str]):
         bf = r.bitfield("a")
         resp = await bf.set("u8", 8, 255).execute()
         assert resp == [0]
@@ -3515,7 +3520,7 @@ async def test_bitfield_ro(self, r: valkey.Valkey):
         assert resp == [0, 15, 15, 14]
 
     @skip_if_server_version_lt("4.0.0")
-    async def test_memory_stats(self, r: valkey.Valkey):
+    async def test_memory_stats(self, r: valkey.asyncio.Valkey[str]):
         # put a key into the current db to make sure that "db.<current-db>"
         # has data
         await r.set("foo", "bar")
@@ -3526,18 +3531,18 @@ async def test_memory_stats(self, r: valkey.Valkey):
                 assert not isinstance(value, list)
 
     @skip_if_server_version_lt("4.0.0")
-    async def test_memory_usage(self, r: valkey.Valkey):
+    async def test_memory_usage(self, r: valkey.asyncio.Valkey[str]):
         await r.set("foo", "bar")
         assert isinstance(await r.memory_usage("foo"), int)
 
     @skip_if_server_version_lt("4.0.0")
-    async def test_module_list(self, r: valkey.Valkey):
+    async def test_module_list(self, r: valkey.asyncio.Valkey[str]):
         assert isinstance(await r.module_list(), list)
         for x in await r.module_list():
             assert isinstance(x, dict)
 
     @pytest.mark.onlynoncluster
-    async def test_interrupted_command(self, r: valkey.Valkey):
+    async def test_interrupted_command(self, r: valkey.asyncio.Valkey[str]):
         """
         Regression test for issue #1128:  An Un-handled BaseException
         will leave the socket with un-read response to a previous
@@ -3554,7 +3559,7 @@ async def helper():
             # because the timeout won't catch its Cancelled Error if the task
             # has a pending cancel.  Python documentation probably should reflect this.
             if sys.version_info >= (3, 11):
-                asyncio.current_task().uncancel()
+                asyncio.current_task().uncancel()  # type: ignore[union-attr]
             # if all is well, we can continue.  The following should not hang.
             await r.set("status", "down")
 
@@ -3570,7 +3575,7 @@ async def helper():
 
 @pytest.mark.onlynoncluster
 class TestBinarySave:
-    async def test_binary_get_set(self, r: valkey.Valkey):
+    async def test_binary_get_set(self, r: valkey.asyncio.Valkey[bytes]):
         assert await r.set(" foo bar ", "123")
         assert await r.get(" foo bar ") == b"123"
 
@@ -3590,7 +3595,7 @@ async def test_binary_get_set(self, r: valkey.Valkey):
         assert await r.delete(" foo\r\nbar\r\n ")
         assert await r.delete(" \r\n\t\x07\x13 ")
 
-    async def test_binary_lists(self, r: valkey.Valkey):
+    async def test_binary_lists(self, r: valkey.asyncio.Valkey[bytes]):
         mapping = {
             b"foo bar": [b"1", b"2", b"3"],
             b"foo\r\nbar\r\n": [b"4", b"5", b"6"],
@@ -3607,7 +3612,7 @@ async def test_binary_lists(self, r: valkey.Valkey):
         for key, value in mapping.items():
             assert await r.lrange(key, 0, -1) == value
 
-    async def test_22_info(self, r: valkey.Valkey):
+    async def test_22_info(self, r: valkey.asyncio.Valkey[str]):
         info = (
             "allocation_stats:6=1,7=1,8=7141,9=180,10=92,11=116,12=5330,"
             "13=123,14=3091,15=11048,16=225842,17=1784,18=814,19=12020,"
@@ -3639,14 +3644,14 @@ async def test_22_info(self, r: valkey.Valkey):
         assert "6" in parsed["allocation_stats"]
         assert ">=256" in parsed["allocation_stats"]
 
-    async def test_large_responses(self, r: valkey.Valkey):
+    async def test_large_responses(self, r: valkey.asyncio.Valkey[bytes]):
         """The PythonParser has some special cases for return values > 1MB"""
         # load up 5MB of data into a key
         data = "".join([ascii_letters] * (5000000 // len(ascii_letters)))
         await r.set("a", data)
         assert await r.get("a") == data.encode()
 
-    async def test_floating_point_encoding(self, r: valkey.Valkey):
+    async def test_floating_point_encoding(self, r: valkey.asyncio.Valkey[str]):
         """
         High precision floating point values sent to the server should keep
         precision.
diff --git a/tests/test_asyncio/test_pipeline.py b/tests/test_asyncio/test_pipeline.py
index 5021f91c..cb28b0ff 100644
--- a/tests/test_asyncio/test_pipeline.py
+++ b/tests/test_asyncio/test_pipeline.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import pytest
 import valkey
 from tests.conftest import skip_if_server_version_lt
@@ -308,7 +310,7 @@ async def test_aclosing(self, r):
     async def test_transaction_callable(self, r):
         await r.set("a", 1)
         await r.set("b", 2)
-        has_run = []
+        has_run: list[str] = []
 
         async def my_transaction(pipe):
             a_value = await pipe.get("a")
diff --git a/tests/test_asyncio/test_pubsub.py b/tests/test_asyncio/test_pubsub.py
index 8afb2256..517177e4 100644
--- a/tests/test_asyncio/test_pubsub.py
+++ b/tests/test_asyncio/test_pubsub.py
@@ -8,9 +8,9 @@
 # the functionality is available in 3.11.x but has a major issue before
 # 3.11.3. See https://github.com/redis/redis-py/issues/2633
 if sys.version_info >= (3, 11, 3):
-    from asyncio import timeout as async_timeout
+    from asyncio import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found,attr-defined]
 else:
-    from async_timeout import timeout as async_timeout
+    from async_timeout import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found]
 
 import pytest
 import pytest_asyncio
@@ -23,7 +23,7 @@
 from .compat import aclosing, create_task, mock
 
 
-def with_timeout(t):
+def with_timeout(t: int):
     def wrapper(corofunc):
         @functools.wraps(corofunc)
         async def run(*args, **kwargs):
@@ -83,7 +83,7 @@ def make_subscribe_test_data(pubsub, type):
 
 
 @pytest_asyncio.fixture()
-async def pubsub(r: valkey.Valkey):
+async def pubsub(r: valkey.Valkey[bytes]):
     async with r.pubsub() as p:
         yield p
 
@@ -214,7 +214,7 @@ async def test_subscribe_property_with_patterns(self, pubsub):
         kwargs = make_subscribe_test_data(pubsub, "pattern")
         await self._test_subscribed_property(**kwargs)
 
-    async def test_aclosing(self, r: valkey.Valkey):
+    async def test_aclosing(self, r: valkey.Valkey[str]):
         p = r.pubsub()
         async with aclosing(p):
             assert p.subscribed is False
@@ -222,7 +222,7 @@ async def test_aclosing(self, r: valkey.Valkey):
             assert p.subscribed is True
         assert p.subscribed is False
 
-    async def test_context_manager(self, r: valkey.Valkey):
+    async def test_context_manager(self, r: valkey.Valkey[str]):
         p = r.pubsub()
         async with p:
             assert p.subscribed is False
@@ -230,7 +230,7 @@ async def test_context_manager(self, r: valkey.Valkey):
             assert p.subscribed is True
         assert p.subscribed is False
 
-    async def test_close_is_aclose(self, r: valkey.Valkey):
+    async def test_close_is_aclose(self, r: valkey.Valkey[str]):
         """
         Test backwards compatible close method
         """
@@ -242,7 +242,7 @@ async def test_close_is_aclose(self, r: valkey.Valkey):
             await p.close()
         assert p.subscribed is False
 
-    async def test_reset_is_aclose(self, r: valkey.Valkey):
+    async def test_reset_is_aclose(self, r: valkey.Valkey[str]):
         """
         Test backwards compatible reset method
         """
@@ -254,7 +254,7 @@ async def test_reset_is_aclose(self, r: valkey.Valkey):
             await p.reset()
         assert p.subscribed is False
 
-    async def test_ignore_all_subscribe_messages(self, r: valkey.Valkey):
+    async def test_ignore_all_subscribe_messages(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
 
         checks = (
@@ -347,7 +347,7 @@ def message_handler(self, message):
     async def async_message_handler(self, message):
         self.async_message = message
 
-    async def test_published_message_to_channel(self, r: valkey.Valkey, pubsub):
+    async def test_published_message_to_channel(self, r: valkey.Valkey[str], pubsub):
         p = pubsub
         await p.subscribe("foo")
         assert await wait_for_message(p) == make_message("subscribe", "foo", 1)
@@ -357,7 +357,7 @@ async def test_published_message_to_channel(self, r: valkey.Valkey, pubsub):
         assert isinstance(message, dict)
         assert message == make_message("message", "foo", "test message")
 
-    async def test_published_message_to_pattern(self, r: valkey.Valkey, pubsub):
+    async def test_published_message_to_pattern(self, r: valkey.Valkey[str], pubsub):
         p = pubsub
         await p.subscribe("foo")
         await p.psubscribe("f*")
@@ -380,7 +380,7 @@ async def test_published_message_to_pattern(self, r: valkey.Valkey, pubsub):
         assert message2 in expected
         assert message1 != message2
 
-    async def test_channel_message_handler(self, r: valkey.Valkey):
+    async def test_channel_message_handler(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         await p.subscribe(foo=self.message_handler)
         assert await wait_for_message(p) is None
@@ -411,7 +411,7 @@ async def test_channel_sync_async_message_handler(self, r):
         await p.aclose()
 
     @pytest.mark.onlynoncluster
-    async def test_pattern_message_handler(self, r: valkey.Valkey):
+    async def test_pattern_message_handler(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         await p.psubscribe(**{"f*": self.message_handler})
         assert await wait_for_message(p) is None
@@ -422,7 +422,7 @@ async def test_pattern_message_handler(self, r: valkey.Valkey):
         )
         await p.aclose()
 
-    async def test_unicode_channel_message_handler(self, r: valkey.Valkey):
+    async def test_unicode_channel_message_handler(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         channel = "uni" + chr(4456) + "code"
         channels = {channel: self.message_handler}
@@ -436,7 +436,7 @@ async def test_unicode_channel_message_handler(self, r: valkey.Valkey):
     @pytest.mark.onlynoncluster
     # see: https://valkey-py-cluster.readthedocs.io/en/stable/pubsub.html
     # #known-limitations-with-pubsub
-    async def test_unicode_pattern_message_handler(self, r: valkey.Valkey):
+    async def test_unicode_pattern_message_handler(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         pattern = "uni" + chr(4456) + "*"
         channel = "uni" + chr(4456) + "code"
@@ -449,7 +449,7 @@ async def test_unicode_pattern_message_handler(self, r: valkey.Valkey):
         )
         await p.aclose()
 
-    async def test_get_message_without_subscribe(self, r: valkey.Valkey, pubsub):
+    async def test_get_message_without_subscribe(self, r: valkey.Valkey[str], pubsub):
         p = pubsub
         with pytest.raises(RuntimeError) as info:
             await p.get_message()
@@ -522,7 +522,7 @@ async def test_pattern_subscribe_unsubscribe(self, pubsub):
             "punsubscribe", self.pattern, 0
         )
 
-    async def test_channel_publish(self, r: valkey.Valkey, pubsub):
+    async def test_channel_publish(self, r: valkey.Valkey[str], pubsub):
         p = pubsub
         await p.subscribe(self.channel)
         assert await wait_for_message(p) == self.make_message(
@@ -534,7 +534,7 @@ async def test_channel_publish(self, r: valkey.Valkey, pubsub):
         )
 
     @pytest.mark.onlynoncluster
-    async def test_pattern_publish(self, r: valkey.Valkey, pubsub):
+    async def test_pattern_publish(self, r: valkey.Valkey[str], pubsub):
         p = pubsub
         await p.psubscribe(self.pattern)
         assert await wait_for_message(p) == self.make_message(
@@ -545,7 +545,7 @@ async def test_pattern_publish(self, r: valkey.Valkey, pubsub):
             "pmessage", self.channel, self.data, pattern=self.pattern
         )
 
-    async def test_channel_message_handler(self, r: valkey.Valkey):
+    async def test_channel_message_handler(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         await p.subscribe(**{self.channel: self.message_handler})
         assert await wait_for_message(p) is None
@@ -563,7 +563,7 @@ async def test_channel_message_handler(self, r: valkey.Valkey):
         assert self.message == self.make_message("message", self.channel, new_data)
         await p.aclose()
 
-    async def test_pattern_message_handler(self, r: valkey.Valkey):
+    async def test_pattern_message_handler(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         await p.psubscribe(**{self.pattern: self.message_handler})
         assert await wait_for_message(p) is None
@@ -585,7 +585,7 @@ async def test_pattern_message_handler(self, r: valkey.Valkey):
         )
         await p.aclose()
 
-    async def test_context_manager(self, r: valkey.Valkey):
+    async def test_context_manager(self, r: valkey.Valkey[str]):
         async with r.pubsub() as pubsub:
             await pubsub.subscribe("foo")
             assert pubsub.connection is not None
@@ -598,7 +598,7 @@ async def test_context_manager(self, r: valkey.Valkey):
 
 @pytest.mark.onlynoncluster
 class TestPubSubValkeyDown:
-    async def test_channel_subscribe(self, r: valkey.Valkey):
+    async def test_channel_subscribe(self):
         r = valkey.Valkey(host="localhost", port=6390)
         p = r.pubsub()
         with pytest.raises(ConnectionError):
@@ -609,17 +609,17 @@ async def test_channel_subscribe(self, r: valkey.Valkey):
 class TestPubSubSubcommands:
     @pytest.mark.onlynoncluster
     @skip_if_server_version_lt("2.8.0")
-    async def test_pubsub_channels(self, r: valkey.Valkey, pubsub):
+    async def test_pubsub_channels(self, r: valkey.Valkey[bytes], pubsub):
         p = pubsub
         await p.subscribe("foo", "bar", "baz", "quux")
         for i in range(4):
             assert (await wait_for_message(p))["type"] == "subscribe"
         expected = [b"bar", b"baz", b"foo", b"quux"]
-        assert all([channel in await r.pubsub_channels() for channel in expected])
+        assert all([channel in await r.pubsub_channels() for channel in expected])  # type: ignore[comparison-overlap]
 
     @pytest.mark.onlynoncluster
     @skip_if_server_version_lt("2.8.0")
-    async def test_pubsub_numsub(self, r: valkey.Valkey):
+    async def test_pubsub_numsub(self, r: valkey.Valkey[bytes]):
         p1 = r.pubsub()
         await p1.subscribe("foo", "bar", "baz")
         for i in range(3):
@@ -633,13 +633,13 @@ async def test_pubsub_numsub(self, r: valkey.Valkey):
         assert (await wait_for_message(p3))["type"] == "subscribe"
 
         channels = [(b"foo", 1), (b"bar", 2), (b"baz", 3)]
-        assert await r.pubsub_numsub("foo", "bar", "baz") == channels
+        assert await r.pubsub_numsub("foo", "bar", "baz") == channels  # type: ignore[comparison-overlap]
         await p1.aclose()
         await p2.aclose()
         await p3.aclose()
 
     @skip_if_server_version_lt("2.8.0")
-    async def test_pubsub_numpat(self, r: valkey.Valkey):
+    async def test_pubsub_numpat(self, r: valkey.Valkey[str]):
         p = r.pubsub()
         await p.psubscribe("*oo", "*ar", "b*z")
         for i in range(3):
@@ -651,7 +651,7 @@ async def test_pubsub_numpat(self, r: valkey.Valkey):
 @pytest.mark.onlynoncluster
 class TestPubSubPings:
     @skip_if_server_version_lt("3.0.0")
-    async def test_send_pubsub_ping(self, r: valkey.Valkey):
+    async def test_send_pubsub_ping(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         await p.subscribe("foo")
         await p.ping()
@@ -661,7 +661,7 @@ async def test_send_pubsub_ping(self, r: valkey.Valkey):
         await p.aclose()
 
     @skip_if_server_version_lt("3.0.0")
-    async def test_send_pubsub_ping_message(self, r: valkey.Valkey):
+    async def test_send_pubsub_ping_message(self, r: valkey.Valkey[str]):
         p = r.pubsub(ignore_subscribe_messages=True)
         await p.subscribe("foo")
         await p.ping(message="hello world")
@@ -675,7 +675,7 @@ async def test_send_pubsub_ping_message(self, r: valkey.Valkey):
 class TestPubSubConnectionKilled:
     @skip_if_server_version_lt("3.0.0")
     async def test_connection_error_raised_when_connection_dies(
-        self, r: valkey.Valkey, pubsub
+        self, r: valkey.Valkey[str], pubsub
     ):
         p = pubsub
         await p.subscribe("foo")
@@ -698,13 +698,13 @@ async def test_get_message_with_timeout_returns_none(self, pubsub):
 
 @pytest.mark.onlynoncluster
 class TestPubSubReconnect:
-    @with_timeout(2)
-    async def test_reconnect_listen(self, r: valkey.Valkey, pubsub):
+    @with_timeout(2)  # type: ignore[misc]
+    async def test_reconnect_listen(self, r: valkey.Valkey[str], pubsub):
         """
         Test that a loop processing PubSub messages can survive
         a disconnect, by issuing a connect() call.
         """
-        messages = asyncio.Queue()
+        messages = asyncio.Queue()  # type: ignore[var-annotated]
         interrupt = False
 
         async def loop():
@@ -775,11 +775,11 @@ async def _subscribe(self, p, *args, **kwargs):
             ):
                 return
 
-    async def test_callbacks(self, r: valkey.Valkey, pubsub):
+    async def test_callbacks(self, r: valkey.Valkey[str], pubsub):
         def callback(message):
             messages.put_nowait(message)
 
-        messages = asyncio.Queue()
+        messages = asyncio.Queue()  # type: ignore[var-annotated]
         p = pubsub
         await self._subscribe(p, foo=callback)
         task = asyncio.get_running_loop().create_task(p.run())
@@ -797,12 +797,12 @@ def callback(message):
             "type": "message",
         }
 
-    async def test_exception_handler(self, r: valkey.Valkey, pubsub):
+    async def test_exception_handler(self, r: valkey.Valkey[str], pubsub):
         def exception_handler_callback(e, pubsub) -> None:
             assert pubsub == p
             exceptions.put_nowait(e)
 
-        exceptions = asyncio.Queue()
+        exceptions = asyncio.Queue()  # type: ignore[var-annotated]
         p = pubsub
         await self._subscribe(p, foo=lambda x: None)
         with mock.patch.object(p, "get_message", side_effect=Exception("error")):
@@ -817,11 +817,11 @@ def exception_handler_callback(e, pubsub) -> None:
                 pass
         assert str(e) == "error"
 
-    async def test_late_subscribe(self, r: valkey.Valkey, pubsub):
+    async def test_late_subscribe(self, r: valkey.Valkey[str], pubsub):
         def callback(message):
             messages.put_nowait(message)
 
-        messages = asyncio.Queue()
+        messages = asyncio.Queue()  # type: ignore[var-annotated]
         p = pubsub
         task = asyncio.get_running_loop().create_task(p.run())
         # wait until loop gets settled.  Add a subscription
@@ -856,7 +856,7 @@ class TestPubSubAutoReconnect:
     timeout = 2
 
     async def mysetup(self, r, method):
-        self.messages = asyncio.Queue()
+        self.messages = asyncio.Queue()  # type: ignore[var-annotated]
         self.pubsub = r.pubsub()
         # State: 0 = initial state , 1 = after disconnect, 2 = ConnectionError is seen,
         # 3=successfully reconnected 4 = exit
@@ -892,7 +892,7 @@ async def mykill(self):
             self.state = 4  # quit
         await self.task
 
-    async def test_reconnect_socket_error(self, r: valkey.Valkey, method):
+    async def test_reconnect_socket_error(self, r: valkey.Valkey[str], method):
         """
         Test that a socket error will cause reconnect
         """
@@ -921,7 +921,7 @@ async def test_reconnect_socket_error(self, r: valkey.Valkey, method):
         finally:
             await self.mykill()
 
-    async def test_reconnect_disconnect(self, r: valkey.Valkey, method):
+    async def test_reconnect_disconnect(self, r: valkey.Valkey[str], method):
         """
         Test that a manual disconnect() will cause reconnect
         """
@@ -992,7 +992,7 @@ class TestBaseException:
     @pytest.mark.skipif(
         sys.version_info < (3, 8), reason="requires python 3.8 or higher"
     )
-    async def test_outer_timeout(self, r: valkey.Valkey):
+    async def test_outer_timeout(self, r: valkey.Valkey[str]):
         """
         Using asyncio_timeout manually outside the inner method timeouts works.
         This works on Python versions 3.8 and greater, at which time asyncio.
@@ -1026,7 +1026,7 @@ async def get_msg_or_timeout(timeout=0.1):
     @pytest.mark.skipif(
         sys.version_info < (3, 8), reason="requires python 3.8 or higher"
     )
-    async def test_base_exception(self, r: valkey.Valkey):
+    async def test_base_exception(self, r: valkey.Valkey[str]):
         """
         Manually trigger a BaseException inside the parser's .read_response method
         and verify that it isn't caught
diff --git a/tests/test_cache.py b/tests/test_cache.py
index 63784101..25792fa5 100644
--- a/tests/test_cache.py
+++ b/tests/test_cache.py
@@ -8,7 +8,9 @@
 from tests.conftest import _get_client
 from valkey import ValkeyError
 from valkey._cache import AbstractCache, EvictionPolicy, _LocalCache
-from valkey.typing import KeyT, ResponseT
+
+# It is defined, just not in __all__
+from valkey.typing import KeyT, ResponseT  # type: ignore[attr-defined]
 from valkey.utils import LIBVALKEY_AVAILABLE
 
 
@@ -529,7 +531,7 @@ def test_cache_decode_response(self, local_cache, sentinel_setup, master):
 class TestCustomCache:
     class _CustomCache(AbstractCache):
         def __init__(self):
-            self.responses = cachetools.LRUCache(maxsize=1000)
+            self.responses = cachetools.LRUCache(maxsize=1000)  # type: ignore[var-annotated]
             self.keys_to_commands = defaultdict(list)
             self.commands_to_keys = defaultdict(list)
 
diff --git a/tests/test_commands.py b/tests/test_commands.py
index ec8074fc..593f2bb9 100644
--- a/tests/test_commands.py
+++ b/tests/test_commands.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 import binascii
 import datetime
 import math
@@ -6,6 +8,7 @@
 import time
 from asyncio import CancelledError
 from string import ascii_letters
+from typing import Any
 from unittest import mock
 from unittest.mock import patch
 
@@ -545,7 +548,7 @@ def test_client_setname(self, r):
         assert_resp_response(r, r.client_getname(), "valkey_py_test", b"valkey_py_test")
 
     @skip_if_server_version_lt("7.2.0")
-    def test_client_setinfo(self, r: valkey.Valkey):
+    def test_client_setinfo(self, r: valkey.Valkey[str]):
         r.ping()
         info = r.client_info()
         assert info["lib-name"] == "valkey-py"
@@ -776,7 +779,7 @@ def test_config_get(self, r):
         # assert data['maxmemory'].isdigit()
 
     @skip_if_server_version_lt("7.0.0")
-    def test_config_get_multi_params(self, r: valkey.Valkey):
+    def test_config_get_multi_params(self, r: valkey.Valkey[str]):
         res = r.config_get("*max-*-entries*", "maxmemory")
         assert "maxmemory" in res
         assert "hash-max-listpack-entries" in res
@@ -797,7 +800,7 @@ def test_config_set(self, r):
         assert r.config_get()["timeout"] == "0"
 
     @skip_if_server_version_lt("7.0.0")
-    def test_config_set_multi_params(self, r: valkey.Valkey):
+    def test_config_set_multi_params(self, r: valkey.Valkey[str]):
         r.config_set("timeout", 70, "maxmemory", 100)
         assert r.config_get()["timeout"] == "70"
         assert r.config_get()["maxmemory"] == "100"
@@ -960,13 +963,13 @@ def test_bgsave(self, r):
         time.sleep(0.3)
         assert r.bgsave(True)
 
-    def test_never_decode_option(self, r: valkey.Valkey):
-        opts = {NEVER_DECODE: []}
+    def test_never_decode_option(self, r: valkey.Valkey[str]):
+        opts: dict[str, list[Any]] = {NEVER_DECODE: []}
         r.delete("a")
         assert r.execute_command("EXISTS", "a", **opts) == 0
 
-    def test_empty_response_option(self, r: valkey.Valkey):
-        opts = {EMPTY_RESPONSE: []}
+    def test_empty_response_option(self, r: valkey.Valkey[str]):
+        opts: dict[str, list[Any]] = {EMPTY_RESPONSE: []}
         r.delete("a")
         assert r.execute_command("EXISTS", "a", **opts) == 0
 
@@ -2839,7 +2842,7 @@ def test_zrank(self, r):
         assert r.zrank("a", "a6") is None
 
     @skip_if_server_version_lt("7.2.0")
-    def test_zrank_withscore(self, r: valkey.Valkey):
+    def test_zrank_withscore(self, r: valkey.Valkey[str]):
         r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
         assert r.zrank("a", "a1") == 0
         assert r.zrank("a", "a2") == 1
@@ -3457,7 +3460,7 @@ def test_geoadd(self, r):
 
     @skip_if_server_version_lt("6.2.0")
     def test_geoadd_nx(self, r):
-        values = (2.1909389952632, 41.433791470673, "place1") + (
+        values: Any = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
             41.406342043777,
             "place2",
@@ -3473,7 +3476,7 @@ def test_geoadd_nx(self, r):
 
     @skip_if_server_version_lt("6.2.0")
     def test_geoadd_xx(self, r):
-        values = (2.1909389952632, 41.433791470673, "place1")
+        values: Any = (2.1909389952632, 41.433791470673, "place1")
         assert r.geoadd("a", values) == 1
         values = (2.1909389952632, 41.433791470673, "place1") + (
             2.1873744593677,
@@ -3485,7 +3488,7 @@ def test_geoadd_xx(self, r):
 
     @skip_if_server_version_lt("6.2.0")
     def test_geoadd_ch(self, r):
-        values = (2.1909389952632, 41.433791470673, "place1")
+        values: Any = (2.1909389952632, 41.433791470673, "place1")
         assert r.geoadd("a", values) == 1
         values = (2.1909389952632, 31.433791470673, "place1") + (
             2.1873744593677,
@@ -4106,7 +4109,7 @@ def test_xadd_minlen_and_limit(self, r):
         assert r.xadd(stream, {"foo": "bar"}, approximate=True, minid=m3)
 
     @skip_if_server_version_lt("7.0.0")
-    def test_xadd_explicit_ms(self, r: valkey.Valkey):
+    def test_xadd_explicit_ms(self, r: valkey.Valkey[str]):
         stream = "stream"
         message_id = r.xadd(stream, {"foo": "bar"}, "9999999999999999999-*")
         ms = message_id[: message_id.index(b"-")]
@@ -4283,7 +4286,7 @@ def test_xgroup_create_mkstream(self, r):
         assert r.xinfo_groups(stream) == expected
 
     @skip_if_server_version_lt("7.0.0")
-    def test_xgroup_create_entriesread(self, r: valkey.Valkey):
+    def test_xgroup_create_entriesread(self, r: valkey.Valkey[str]):
         stream = "stream"
         group = "group"
         r.xadd(stream, {"foo": "bar"})
@@ -4462,7 +4465,7 @@ def test_xpending(self, r):
         r.xgroup_create(stream, group, 0)
 
         # xpending on a group that has no consumers yet
-        expected = {"pending": 0, "min": None, "max": None, "consumers": []}
+        expected: dict[str, Any] = {"pending": 0, "min": None, "max": None, "consumers": []}
         assert r.xpending(stream, group) == expected
 
         # read 1 message from the group with each consumer
@@ -4841,7 +4844,7 @@ def test_bitfield_operations(self, r):
         assert resp == [0, None, 255]
 
     @skip_if_server_version_lt("6.0.0")
-    def test_bitfield_ro(self, r: valkey.Valkey):
+    def test_bitfield_ro(self, r: valkey.Valkey[str]):
         bf = r.bitfield("a")
         resp = bf.set("u8", 8, 255).execute()
         assert resp == [0]
@@ -4885,25 +4888,25 @@ def test_memory_usage(self, r):
         assert isinstance(r.memory_usage("foo"), int)
 
     @skip_if_server_version_lt("7.0.0")
-    def test_latency_histogram_not_implemented(self, r: valkey.Valkey):
+    def test_latency_histogram_not_implemented(self, r: valkey.Valkey[str]):
         with pytest.raises(NotImplementedError):
             r.latency_histogram()
 
-    def test_latency_graph_not_implemented(self, r: valkey.Valkey):
+    def test_latency_graph_not_implemented(self, r: valkey.Valkey[str]):
         with pytest.raises(NotImplementedError):
             r.latency_graph()
 
-    def test_latency_doctor_not_implemented(self, r: valkey.Valkey):
+    def test_latency_doctor_not_implemented(self, r: valkey.Valkey[str]):
         with pytest.raises(NotImplementedError):
             r.latency_doctor()
 
-    def test_latency_history(self, r: valkey.Valkey):
+    def test_latency_history(self, r: valkey.Valkey[str]):
         assert r.latency_history("command") == []
 
-    def test_latency_latest(self, r: valkey.Valkey):
+    def test_latency_latest(self, r: valkey.Valkey[str]):
         assert r.latency_latest() == []
 
-    def test_latency_reset(self, r: valkey.Valkey):
+    def test_latency_reset(self, r: valkey.Valkey[str]):
         assert r.latency_reset() == 0
 
     @skip_if_server_version_lt("4.0.0")
@@ -4924,7 +4927,7 @@ def test_command_docs(self, r):
             r.command_docs("set")
 
     @skip_if_server_version_lt("7.0.0")
-    def test_command_list(self, r: valkey.Valkey):
+    def test_command_list(self, r: valkey.Valkey[str]):
         assert len(r.command_list()) > 300
         assert len(r.command_list(module="fakemod")) == 0
         assert len(r.command_list(category="list")) > 15
@@ -4963,7 +4966,7 @@ def test_command(self, r):
 
     @pytest.mark.onlynoncluster
     @skip_if_server_version_lt("7.0.0")
-    def test_command_getkeysandflags(self, r: valkey.Valkey):
+    def test_command_getkeysandflags(self, r: valkey.Valkey[str]):
         res = r.command_getkeysandflags("LMOVE", "mylist1", "mylist2", "left", "left")
         assert res == [
             [b"mylist1", [b"RW", b"access", b"delete"]],
@@ -4983,7 +4986,7 @@ def test_module(self, r):
 
     @pytest.mark.onlynoncluster
     @skip_if_server_version_lt("7.0.0")
-    def test_module_loadex(self, r: valkey.Valkey):
+    def test_module_loadex(self, r: valkey.Valkey[str]):
         with pytest.raises(valkey.exceptions.ModuleError) as excinfo:
             r.module_loadex("/some/fake/path")
             assert "Error loading the extension." in str(excinfo.value)
@@ -5042,14 +5045,14 @@ def test_replicaof(self, r):
             assert r.replicaof("NO ONE")
         assert r.replicaof("NO", "ONE")
 
-    def test_shutdown(self, r: valkey.Valkey):
-        r.execute_command = mock.MagicMock()
+    def test_shutdown(self, r: valkey.Valkey[str]):
+        r.execute_command = mock.MagicMock()  # type: ignore[method-assign]
         r.execute_command("SHUTDOWN", "NOSAVE")
         r.execute_command.assert_called_once_with("SHUTDOWN", "NOSAVE")
 
     @skip_if_server_version_lt("7.0.0")
-    def test_shutdown_with_params(self, r: valkey.Valkey):
-        r.execute_command = mock.MagicMock()
+    def test_shutdown_with_params(self, r: valkey.Valkey[str]):
+        r.execute_command = mock.MagicMock()  # type: ignore[method-assign]
         r.execute_command("SHUTDOWN", "SAVE", "NOW", "FORCE")
         r.execute_command.assert_called_once_with("SHUTDOWN", "SAVE", "NOW", "FORCE")
         r.execute_command("SHUTDOWN", "ABORT")
@@ -5073,7 +5076,7 @@ def test_psync(self, r):
         assert b"FULLRESYNC" in res
 
     @pytest.mark.onlynoncluster
-    def test_interrupted_command(self, r: valkey.Valkey):
+    def test_interrupted_command(self, r: valkey.Valkey[str]):
         """
         Regression test for issue #1128:  An Un-handled BaseException
         will leave the socket with un-read response to a previous
diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py
index 065f898c..9fd76656 100644
--- a/tests/test_pipeline.py
+++ b/tests/test_pipeline.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
 from contextlib import closing
 from unittest import mock
 
@@ -309,7 +311,7 @@ def test_closing(self, r):
     def test_transaction_callable(self, r):
         r["a"] = 1
         r["b"] = 2
-        has_run = []
+        has_run: list[str] = []
 
         def my_transaction(pipe):
             a_value = pipe.get("a")
diff --git a/tests/test_pubsub.py b/tests/test_pubsub.py
index 01b5dee8..76809fee 100644
--- a/tests/test_pubsub.py
+++ b/tests/test_pubsub.py
@@ -115,7 +115,7 @@ def test_shard_channel_subscribe_unsubscribe(self, r):
     @pytest.mark.onlycluster
     @skip_if_server_version_lt("7.0.0")
     def test_shard_channel_subscribe_unsubscribe_cluster(self, r):
-        node_channels = defaultdict(int)
+        node_channels = defaultdict(int)  # type: ignore[var-annotated]
         p = r.pubsub()
         keys = {
             "foo": r.get_node_from_key("foo"),
@@ -632,7 +632,7 @@ def message_handler(self, message):
 
     @pytest.fixture()
     def r(self, request):
-        return _get_client(valkey.Valkey, request=request, decode_responses=True)
+        return _get_client(valkey.Valkey[str], request=request, decode_responses=True)
 
     def test_channel_subscribe_unsubscribe(self, r):
         p = r.pubsub()
@@ -768,7 +768,7 @@ def test_context_manager(self, r):
 
 class TestPubSubValkeyDown:
     def test_channel_subscribe(self, r):
-        r = valkey.Valkey(host="localhost", port=6390)
+        r = valkey.Valkey[str](host="localhost", port=6390)
         p = r.pubsub()
         with pytest.raises(ConnectionError):
             p.subscribe("foo")
@@ -845,7 +845,7 @@ def test_pubsub_numpat(self, r):
 
     @pytest.mark.onlycluster
     @skip_if_server_version_lt("7.0.0")
-    def test_pubsub_shardnumsub(self, r):
+    def test_pubsub_shardnumsub(self, r: valkey.ValkeyCluster[bytes]):
         channels = {
             b"foo": r.get_node_from_key("foo"),
             b"bar": r.get_node_from_key("bar"),
@@ -866,8 +866,8 @@ def test_pubsub_shardnumsub(self, r):
         p3.ssubscribe("baz")
         assert wait_for_message(p3, node=channels[b"baz"])["type"] == "ssubscribe"
 
-        channels = [(b"foo", 1), (b"bar", 2), (b"baz", 3)]
-        assert r.pubsub_shardnumsub("foo", "bar", "baz", target_nodes="all") == channels
+        channels_names = [(b"foo", 1), (b"bar", 2), (b"baz", 3)]
+        assert r.pubsub_shardnumsub("foo", "bar", "baz", target_nodes="all") == channels_names  # type: ignore[attr-defined]
 
 
 class TestPubSubPings:
@@ -972,7 +972,7 @@ class TestPubSubDeadlock:
     @pytest.mark.timeout(30, method="thread")
     def test_pubsub_deadlock(self, master_host):
         pool = valkey.ConnectionPool(host=master_host[0], port=master_host[1])
-        r = valkey.Valkey(connection_pool=pool)
+        r = valkey.Valkey[str](connection_pool=pool)
 
         for i in range(60):
             p = r.pubsub()
@@ -985,7 +985,7 @@ def test_pubsub_deadlock(self, master_host):
 @pytest.mark.onlynoncluster
 class TestPubSubAutoReconnect:
     def mysetup(self, r, method):
-        self.messages = queue.Queue()
+        self.messages = queue.Queue()  # type: ignore[var-annotated]
         self.pubsub = r.pubsub()
         self.state = 0
         self.cond = threading.Condition()
@@ -1026,7 +1026,7 @@ def mycleanup(self):
             self.cond.notify()
         self.thread.join()
 
-    def test_reconnect_socket_error(self, r: valkey.Valkey, method):
+    def test_reconnect_socket_error(self, r: valkey.Valkey[str], method):
         """
         Test that a socket error will cause reconnect
         """
@@ -1048,7 +1048,7 @@ def test_reconnect_socket_error(self, r: valkey.Valkey, method):
         finally:
             self.mycleanup()
 
-    def test_reconnect_disconnect(self, r: valkey.Valkey, method):
+    def test_reconnect_disconnect(self, r: valkey.Valkey[str], method):
         """
         Test that a manual disconnect() will cause reconnect
         """
@@ -1107,7 +1107,7 @@ def loop_step_listen(self):
 
 @pytest.mark.onlynoncluster
 class TestBaseException:
-    def test_base_exception(self, r: valkey.Valkey):
+    def test_base_exception(self, r: valkey.Valkey[str]):
         """
         Manually trigger a BaseException inside the parser's .read_response method
         and verify that it isn't caught
diff --git a/types.patch b/types.patch
new file mode 100644
index 00000000..8deb849a
--- /dev/null
+++ b/types.patch
@@ -0,0 +1,10525 @@
+diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml
+new file mode 100644
+index 0000000..e04b8a1
+--- /dev/null
++++ b/.github/workflows/mypy.yml
+@@ -0,0 +1,36 @@
++name: Python application
++
++on:
++  push:
++    branches: [ types ]
++  pull_request:
++    branches: [ types ]
++
++jobs:
++  build:
++
++    runs-on: ubuntu-latest
++    strategy:
++      fail-fast: false
++      matrix:
++        python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
++
++    steps:
++    - uses: actions/checkout@v4
++
++    - name: Set up Python ${{matrix.python-version}}
++      uses: actions/setup-python@v5
++      with:
++        python-version: ${{matrix.python-version}}
++
++    - name: Install package
++      run: |
++        pip install mypy cryptography pyopenssl requests
++        pip install types-setuptools
++        pip install types-cachetools
++        pip install -r dev_requirements.txt
++        pip install .[libvalkey]
++
++    - name: Run MyPy
++      run: |
++        mypy --exclude build .
+diff --git a/.mypy.ini b/.mypy.ini
+index 0d3b08d..358ee39 100644
+--- a/.mypy.ini
++++ b/.mypy.ini
+@@ -1,24 +1,63 @@
+ [mypy]
+-#, docs/examples, tests
+-files = valkey
+-check_untyped_defs = True
+-follow_imports_for_stubs asyncio.= True
+-#disallow_any_decorated = True
+-disallow_subclassing_any = True
+-#disallow_untyped_calls = True
+-disallow_untyped_decorators = True
+-#disallow_untyped_defs = True
+-implicit_reexport = False
+-no_implicit_optional = True
+-show_error_codes = True
+-strict_equality = True
+-warn_incomplete_stub = True
+-warn_redundant_casts = True
+-warn_unreachable = True
+-warn_unused_ignores = True
+-disallow_any_unimported = True
+-#warn_return_any = True
+-
+-[mypy-valkey.asyncio.lock]
+-# TODO: Remove once locks has been rewritten
++strict = True
++show_error_context = True
++pretty = True
++exclude = docs|build
++
++# These next few are various gradations of forcing use of type annotations
++disallow_untyped_calls = False
++disallow_incomplete_defs = False
++disallow_untyped_defs = False
++
++# This one can be tricky to get passing if you use a lot of untyped libraries
++warn_return_any = False
++
++[mypy-valkey._parsers.*]
++ignore_errors = True
++
++[mypy-valkey._cache]
++ignore_errors = True
++
++[mypy-tests.*]
++ignore_errors = True
++[mypy-tests.test_bloom]
++ignore_errors = False
++[mypy-tests.test_asyncio.test_bloom]
++ignore_errors = False
++[mypy-tests.test_cache]
++ignore_errors = False
++[mypy-tests.test_asyncio.test_cache]
++ignore_errors = False
++[mypy-tests.test_commands]
++ignore_errors = False
++[mypy-tests.test_asyncio.test_commands]
++ignore_errors = False
++#[mypy-tests.test_cluster]
++#ignore_errors = False
++#[mypy-tests.test_asyncio.test_cluster]
++#ignore_errors = False
++#[mypy-tests.test_connection_pool]
++#ignore_errors = False
++#[mypy-tests.test_asyncio.test_connection_pool]
++#ignore_errors = False
++#[mypy-tests.test_connection]
++#ignore_errors = False
++#[mypy-tests.test_asyncio.test_connection]
++#ignore_errors = False
++[mypy-tests.test_pipeline]
++ignore_errors = False
++[mypy-tests.test_asyncio.test_pipeline]
++ignore_errors = False
++[mypy-tests.test_pubsub]
++ignore_errors = False
++[mypy-tests.test_asyncio.test_pubsub]
++ignore_errors = False
++
++[mypy-benchmarks.*]
++ignore_errors = True
++
++[mypy-whitelist]
++ignore_errors = True
++
++[mypy-tasks]
+ ignore_errors = True
+diff --git a/tests/test_asyncio/compat.py b/tests/test_asyncio/compat.py
+index aa1dc49..0576002 100644
+--- a/tests/test_asyncio/compat.py
++++ b/tests/test_asyncio/compat.py
+@@ -1,6 +1,8 @@
+ import asyncio
+ from unittest import mock
+ 
++__all__ = ["mock", "aclosing", "create_task"]
++
+ try:
+     mock.AsyncMock
+ except AttributeError:
+diff --git a/tests/test_asyncio/test_bloom.py b/tests/test_asyncio/test_bloom.py
+index 04528c1..8dac993 100644
+--- a/tests/test_asyncio/test_bloom.py
++++ b/tests/test_asyncio/test_bloom.py
+@@ -16,7 +16,7 @@ def intlist(obj):
+     return [int(v) for v in obj]
+ 
+ 
+-async def test_create(decoded_r: valkey.Valkey):
++async def test_create(decoded_r: valkey.Valkey[str]):
+     """Test CREATE/RESERVE calls"""
+     assert await decoded_r.bf().create("bloom", 0.01, 1000)
+     assert await decoded_r.bf().create("bloom_e", 0.01, 1000, expansion=1)
+@@ -31,11 +31,11 @@ async def test_create(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_create(decoded_r: valkey.Valkey):
++async def test_tdigest_create(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("tDigest", 100)
+ 
+ 
+-async def test_bf_add(decoded_r: valkey.Valkey):
++async def test_bf_add(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.bf().create("bloom", 0.01, 1000)
+     assert 1 == await decoded_r.bf().add("bloom", "foo")
+     assert 0 == await decoded_r.bf().add("bloom", "foo")
+@@ -47,7 +47,7 @@ async def test_bf_add(decoded_r: valkey.Valkey):
+     assert [1, 0] == intlist(await decoded_r.bf().mexists("bloom", "foo", "noexist"))
+ 
+ 
+-async def test_bf_insert(decoded_r: valkey.Valkey):
++async def test_bf_insert(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.bf().create("bloom", 0.01, 1000)
+     assert [1] == intlist(await decoded_r.bf().insert("bloom", ["foo"]))
+     assert [0, 1] == intlist(await decoded_r.bf().insert("bloom", ["foo", "bar"]))
+@@ -77,7 +77,7 @@ async def test_bf_insert(decoded_r: valkey.Valkey):
+     )
+ 
+ 
+-async def test_bf_scandump_and_loadchunk(decoded_r: valkey.Valkey):
++async def test_bf_scandump_and_loadchunk(decoded_r: valkey.Valkey[str]):
+     # Store a filter
+     await decoded_r.bf().create("myBloom", "0.0001", "1000")
+ 
+@@ -124,7 +124,7 @@ async def test_bf_scandump_and_loadchunk(decoded_r: valkey.Valkey):
+     await decoded_r.bf().create("myBloom", "0.0001", "10000000")
+ 
+ 
+-async def test_bf_info(decoded_r: valkey.Valkey):
++async def test_bf_info(decoded_r: valkey.Valkey[str]):
+     expansion = 4
+     # Store a filter
+     await decoded_r.bf().create("nonscaling", "0.0001", "1000", noScale=True)
+@@ -155,7 +155,7 @@ async def test_bf_info(decoded_r: valkey.Valkey):
+         assert True
+ 
+ 
+-async def test_bf_card(decoded_r: valkey.Valkey):
++async def test_bf_card(decoded_r: valkey.Valkey[str]):
+     # return 0 if the key does not exist
+     assert await decoded_r.bf().card("not_exist") == 0
+ 
+@@ -169,7 +169,7 @@ async def test_bf_card(decoded_r: valkey.Valkey):
+         await decoded_r.bf().card("setKey")
+ 
+ 
+-async def test_cf_add_and_insert(decoded_r: valkey.Valkey):
++async def test_cf_add_and_insert(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.cf().create("cuckoo", 1000)
+     assert await decoded_r.cf().add("cuckoo", "filter")
+     assert not await decoded_r.cf().addnx("cuckoo", "filter")
+@@ -194,7 +194,7 @@ async def test_cf_add_and_insert(decoded_r: valkey.Valkey):
+     )
+ 
+ 
+-async def test_cf_exists_and_del(decoded_r: valkey.Valkey):
++async def test_cf_exists_and_del(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.cf().create("cuckoo", 1000)
+     assert await decoded_r.cf().add("cuckoo", "filter")
+     assert await decoded_r.cf().exists("cuckoo", "filter")
+@@ -205,7 +205,7 @@ async def test_cf_exists_and_del(decoded_r: valkey.Valkey):
+     assert 0 == await decoded_r.cf().count("cuckoo", "filter")
+ 
+ 
+-async def test_cms(decoded_r: valkey.Valkey):
++async def test_cms(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.cms().initbydim("dim", 1000, 5)
+     assert await decoded_r.cms().initbyprob("prob", 0.01, 0.01)
+     assert await decoded_r.cms().incrby("dim", ["foo"], [5])
+@@ -221,7 +221,7 @@ async def test_cms(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.onlynoncluster
+-async def test_cms_merge(decoded_r: valkey.Valkey):
++async def test_cms_merge(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.cms().initbydim("A", 1000, 5)
+     assert await decoded_r.cms().initbydim("B", 1000, 5)
+     assert await decoded_r.cms().initbydim("C", 1000, 5)
+@@ -237,7 +237,7 @@ async def test_cms_merge(decoded_r: valkey.Valkey):
+     assert [16, 15, 21] == await decoded_r.cms().query("C", "foo", "bar", "baz")
+ 
+ 
+-async def test_topk(decoded_r: valkey.Valkey):
++async def test_topk(decoded_r: valkey.Valkey[str]):
+     # test list with empty buckets
+     assert await decoded_r.topk().reserve("topk", 3, 50, 4, 0.9)
+     assert [
+@@ -317,7 +317,7 @@ async def test_topk(decoded_r: valkey.Valkey):
+     assert 0.9 == round(float(info["decay"]), 1)
+ 
+ 
+-async def test_topk_incrby(decoded_r: valkey.Valkey):
++async def test_topk_incrby(decoded_r: valkey.Valkey[str]):
+     await decoded_r.flushdb()
+     assert await decoded_r.topk().reserve("topk", 3, 10, 3, 1)
+     assert [None, None, None] == await decoded_r.topk().incrby(
+@@ -332,7 +332,7 @@ async def test_topk_incrby(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_reset(decoded_r: valkey.Valkey):
++async def test_tdigest_reset(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("tDigest", 10)
+     # reset on empty histogram
+     assert await decoded_r.tdigest().reset("tDigest")
+@@ -348,7 +348,7 @@ async def test_tdigest_reset(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.onlynoncluster
+-async def test_tdigest_merge(decoded_r: valkey.Valkey):
++async def test_tdigest_merge(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("to-tDigest", 10)
+     assert await decoded_r.tdigest().create("from-tDigest", 10)
+     # insert data-points into sketch
+@@ -375,7 +375,7 @@ async def test_tdigest_merge(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_min_and_max(decoded_r: valkey.Valkey):
++async def test_tdigest_min_and_max(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("tDigest", 100)
+     # insert data-points into sketch
+     assert await decoded_r.tdigest().add("tDigest", [1, 2, 3])
+@@ -385,8 +385,8 @@ async def test_tdigest_min_and_max(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-@skip_ifmodversion_lt("2.4.0", "bf")
+-async def test_tdigest_quantile(decoded_r: valkey.Valkey):
++@skip_ifmodversion_lt("2.4.0", "bf")  # type: ignore[misc]
++async def test_tdigest_quantile(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("tDigest", 500)
+     # insert data-points into sketch
+     assert await decoded_r.tdigest().add(
+@@ -413,7 +413,7 @@ async def test_tdigest_quantile(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_cdf(decoded_r: valkey.Valkey):
++async def test_tdigest_cdf(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("tDigest", 100)
+     # insert data-points into sketch
+     assert await decoded_r.tdigest().add("tDigest", list(range(1, 10)))
+@@ -424,8 +424,8 @@ async def test_tdigest_cdf(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-@skip_ifmodversion_lt("2.4.0", "bf")
+-async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey):
++@skip_ifmodversion_lt("2.4.0", "bf")  # type: ignore[misc]
++async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("tDigest", 100)
+     # insert data-points into sketch
+     assert await decoded_r.tdigest().add("tDigest", list(range(1, 10)))
+@@ -434,7 +434,7 @@ async def test_tdigest_trimmed_mean(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_rank(decoded_r: valkey.Valkey):
++async def test_tdigest_rank(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("t-digest", 500)
+     assert await decoded_r.tdigest().add("t-digest", list(range(0, 20)))
+     assert -1 == (await decoded_r.tdigest().rank("t-digest", -1))[0]
+@@ -444,7 +444,7 @@ async def test_tdigest_rank(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_revrank(decoded_r: valkey.Valkey):
++async def test_tdigest_revrank(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("t-digest", 500)
+     assert await decoded_r.tdigest().add("t-digest", list(range(0, 20)))
+     assert -1 == (await decoded_r.tdigest().revrank("t-digest", 20))[0]
+@@ -453,7 +453,7 @@ async def test_tdigest_revrank(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_byrank(decoded_r: valkey.Valkey):
++async def test_tdigest_byrank(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("t-digest", 500)
+     assert await decoded_r.tdigest().add("t-digest", list(range(1, 11)))
+     assert 1 == (await decoded_r.tdigest().byrank("t-digest", 0))[0]
+@@ -464,7 +464,7 @@ async def test_tdigest_byrank(decoded_r: valkey.Valkey):
+ 
+ 
+ @pytest.mark.experimental
+-async def test_tdigest_byrevrank(decoded_r: valkey.Valkey):
++async def test_tdigest_byrevrank(decoded_r: valkey.Valkey[str]):
+     assert await decoded_r.tdigest().create("t-digest", 500)
+     assert await decoded_r.tdigest().add("t-digest", list(range(1, 11)))
+     assert 10 == (await decoded_r.tdigest().byrevrank("t-digest", 0))[0]
+@@ -474,7 +474,7 @@ async def test_tdigest_byrevrank(decoded_r: valkey.Valkey):
+         (await decoded_r.tdigest().byrevrank("t-digest", -1))[0]
+ 
+ 
+-# # async def test_pipeline(decoded_r: valkey.Valkey):
++# # async def test_pipeline(decoded_r: valkey.Valkey[str]):
+ #     pipeline = await decoded_r.bf().pipeline()
+ #     assert not await decoded_r.bf().execute_command("get pipeline")
+ #
+diff --git a/tests/test_asyncio/test_commands.py b/tests/test_asyncio/test_commands.py
+index 27fc2cc..28e8dbc 100644
+--- a/tests/test_asyncio/test_commands.py
++++ b/tests/test_asyncio/test_commands.py
+@@ -2,6 +2,8 @@
+ Tests async overrides of commands from their mixins
+ """
+ 
++from __future__ import annotations
++
+ import asyncio
+ import binascii
+ import datetime
+@@ -9,7 +11,7 @@ import math
+ import re
+ import sys
+ from string import ascii_letters
+-from typing import Any, Dict, List
++from typing import Any
+ 
+ import pytest
+ import pytest_asyncio
+@@ -35,15 +37,15 @@ from valkey._parsers.helpers import (
+ from valkey.client import EMPTY_RESPONSE, NEVER_DECODE
+ 
+ if sys.version_info >= (3, 11, 3):
+-    from asyncio import timeout as async_timeout
++    from asyncio import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found,attr-defined]
+ else:
+-    from async_timeout import timeout as async_timeout
++    from async_timeout import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found]
+ 
+ VALKEY_6_VERSION = "5.9.0"
+ 
+ 
+ @pytest_asyncio.fixture()
+-async def r_teardown(r: valkey.Valkey):
++async def r_teardown(r: valkey.asyncio.Valkey[str]):
+     """
+     A special fixture which removes the provided names from the database after use
+     """
+@@ -59,7 +61,7 @@ async def r_teardown(r: valkey.Valkey):
+ 
+ 
+ @pytest_asyncio.fixture()
+-async def slowlog(r: valkey.Valkey):
++async def slowlog(r: valkey.asyncio.Valkey[str]):
+     current_config = await r.config_get()
+     old_slower_than_value = current_config["slowlog-log-slower-than"]
+     old_max_legnth_value = current_config["slowlog-max-len"]
+@@ -73,13 +75,13 @@ async def slowlog(r: valkey.Valkey):
+     await r.config_set("slowlog-max-len", old_max_legnth_value)
+ 
+ 
+-async def valkey_server_time(client: valkey.Valkey):
++async def valkey_server_time(client: valkey.asyncio.Valkey[bytes]):
+     seconds, milliseconds = await client.time()
+     timestamp = float(f"{seconds}.{milliseconds}")
+     return datetime.datetime.fromtimestamp(timestamp)
+ 
+ 
+-async def get_stream_message(client: valkey.Valkey, stream: str, message_id: str):
++async def get_stream_message(client: valkey.asyncio.Valkey[str], stream: str, message_id: str):
+     """Fetch a stream message and format it as a (message_id, fields) pair"""
+     response = await client.xrange(stream, min=message_id, max=message_id)
+     assert len(response) == 1
+@@ -91,7 +93,7 @@ async def get_stream_message(client: valkey.Valkey, stream: str, message_id: str
+ class TestResponseCallbacks:
+     """Tests for the response callback system"""
+ 
+-    async def test_response_callbacks(self, r: valkey.Valkey):
++    async def test_response_callbacks(self, r: valkey.asyncio.Valkey[str]):
+         callbacks = _ValkeyCallbacks
+         if is_resp2_connection(r):
+             callbacks.update(_ValkeyCallbacksRESP2)
+@@ -99,32 +101,32 @@ class TestResponseCallbacks:
+             callbacks.update(_ValkeyCallbacksRESP3)
+         assert r.response_callbacks == callbacks
+         assert id(r.response_callbacks) != id(_ValkeyCallbacks)
+-        r.set_response_callback("GET", lambda x: "static")
++        r.set_response_callback("GET", lambda x: "static")  # type: ignore[arg-type]
+         await r.set("a", "foo")
+         assert await r.get("a") == "static"
+ 
+-    async def test_case_insensitive_command_names(self, r: valkey.Valkey):
++    async def test_case_insensitive_command_names(self, r: valkey.asyncio.Valkey[str]):
+         assert r.response_callbacks["ping"] == r.response_callbacks["PING"]
+ 
+ 
+ class TestValkeyCommands:
+-    async def test_command_on_invalid_key_type(self, r: valkey.Valkey):
++    async def test_command_on_invalid_key_type(self, r: valkey.asyncio.Valkey[str]):
+         await r.lpush("a", "1")
+         with pytest.raises(valkey.ResponseError):
+             await r.get("a")
+ 
+     # SERVER INFORMATION
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+-    async def test_acl_cat_no_category(self, r: valkey.Valkey):
++    async def test_acl_cat_no_category(self, r: valkey.asyncio.Valkey[str]):
+         categories = await r.acl_cat()
+         assert isinstance(categories, list)
+-        assert "read" in categories or b"read" in categories
++        assert "read" in categories or b"read" in categories  # type: ignore[comparison-overlap]
+ 
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+-    async def test_acl_cat_with_category(self, r: valkey.Valkey):
++    async def test_acl_cat_with_category(self, r: valkey.asyncio.Valkey[str]):
+         commands = await r.acl_cat("read")
+         assert isinstance(commands, list)
+-        assert "get" in commands or b"get" in commands
++        assert "get" in commands or b"get" in commands  # type: ignore[comparison-overlap]
+ 
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+     async def test_acl_deluser(self, r_teardown):
+@@ -136,7 +138,7 @@ class TestValkeyCommands:
+         assert await r.acl_deluser(username) == 1
+ 
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+-    async def test_acl_genpass(self, r: valkey.Valkey):
++    async def test_acl_genpass(self, r: valkey.asyncio.Valkey[str]):
+         password = await r.acl_genpass()
+         assert isinstance(password, (str, bytes))
+ 
+@@ -311,24 +313,24 @@ class TestValkeyCommands:
+             await r.acl_setuser(username, passwords="+mypass", nopass=True)
+ 
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+-    async def test_acl_users(self, r: valkey.Valkey):
++    async def test_acl_users(self, r: valkey.asyncio.Valkey[str]):
+         users = await r.acl_users()
+         assert isinstance(users, list)
+         assert len(users) > 0
+ 
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+-    async def test_acl_whoami(self, r: valkey.Valkey):
++    async def test_acl_whoami(self, r: valkey.asyncio.Valkey[str]):
+         username = await r.acl_whoami()
+         assert isinstance(username, (str, bytes))
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_client_list(self, r: valkey.Valkey):
++    async def test_client_list(self, r: valkey.asyncio.Valkey[str]):
+         clients = await r.client_list()
+         assert isinstance(clients[0], dict)
+         assert "addr" in clients[0]
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_client_list_type(self, r: valkey.Valkey):
++    async def test_client_list_type(self, r: valkey.asyncio.Valkey[str]):
+         with pytest.raises(exceptions.ValkeyError):
+             await r.client_list(_type="not a client type")
+         for client_type in ["normal", "master", "replica", "pubsub"]:
+@@ -337,12 +339,12 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("5.0.0")
+     @pytest.mark.onlynoncluster
+-    async def test_client_id(self, r: valkey.Valkey):
++    async def test_client_id(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.client_id() > 0
+ 
+     @skip_if_server_version_lt("5.0.0")
+     @pytest.mark.onlynoncluster
+-    async def test_client_unblock(self, r: valkey.Valkey):
++    async def test_client_unblock(self, r: valkey.asyncio.Valkey[str]):
+         myid = await r.client_id()
+         assert not await r.client_unblock(myid)
+         assert not await r.client_unblock(myid, error=True)
+@@ -350,19 +352,19 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.6.9")
+     @pytest.mark.onlynoncluster
+-    async def test_client_getname(self, r: valkey.Valkey):
++    async def test_client_getname(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.client_getname() is None
+ 
+     @skip_if_server_version_lt("2.6.9")
+     @pytest.mark.onlynoncluster
+-    async def test_client_setname(self, r: valkey.Valkey):
++    async def test_client_setname(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.client_setname("valkey_py_test")
+         assert_resp_response(
+             r, await r.client_getname(), "valkey_py_test", b"valkey_py_test"
+         )
+ 
+     @skip_if_server_version_lt("7.2.0")
+-    async def test_client_setinfo(self, r: valkey.Valkey):
++    async def test_client_setinfo(self, r: valkey.asyncio.Valkey[str]):
+         await r.ping()
+         info = await r.client_info()
+         assert info["lib-name"] == "valkey-py"
+@@ -385,7 +387,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.6.9")
+     @pytest.mark.onlynoncluster
+-    async def test_client_kill(self, r: valkey.Valkey, r2):
++    async def test_client_kill(self, r: valkey.asyncio.Valkey[str], r2):
+         await r.client_setname("valkey-py-c1")
+         await r2.client_setname("valkey-py-c2")
+         clients = [
+@@ -398,7 +400,7 @@ class TestValkeyCommands:
+         clients_by_name = {client.get("name"): client for client in clients}
+ 
+         client_addr = clients_by_name["valkey-py-c2"].get("addr")
+-        assert await r.client_kill(client_addr) is True
++        assert await r.client_kill(client_addr) is True  # type: ignore[arg-type]
+ 
+         clients = [
+             client
+@@ -409,22 +411,22 @@ class TestValkeyCommands:
+         assert clients[0].get("name") == "valkey-py-c1"
+ 
+     @skip_if_server_version_lt("2.8.12")
+-    async def test_client_kill_filter_invalid_params(self, r: valkey.Valkey):
++    async def test_client_kill_filter_invalid_params(self, r: valkey.asyncio.Valkey[str]):
+         # empty
+         with pytest.raises(exceptions.DataError):
+             await r.client_kill_filter()
+ 
+         # invalid skipme
+         with pytest.raises(exceptions.DataError):
+-            await r.client_kill_filter(skipme="yeah")  # type: ignore
++            await r.client_kill_filter(skipme="yeah")
+ 
+         # invalid type
+         with pytest.raises(exceptions.DataError):
+-            await r.client_kill_filter(_type="caster")  # type: ignore
++            await r.client_kill_filter(_type="caster")
+ 
+     @skip_if_server_version_lt("2.8.12")
+     @pytest.mark.onlynoncluster
+-    async def test_client_kill_filter_by_id(self, r: valkey.Valkey, r2):
++    async def test_client_kill_filter_by_id(self, r: valkey.asyncio.Valkey[str], r2):
+         await r.client_setname("valkey-py-c1")
+         await r2.client_setname("valkey-py-c2")
+         clients = [
+@@ -450,7 +452,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.8.12")
+     @pytest.mark.onlynoncluster
+-    async def test_client_kill_filter_by_addr(self, r: valkey.Valkey, r2):
++    async def test_client_kill_filter_by_addr(self, r: valkey.asyncio.Valkey[str], r2):
+         await r.client_setname("valkey-py-c1")
+         await r2.client_setname("valkey-py-c2")
+         clients = [
+@@ -475,7 +477,7 @@ class TestValkeyCommands:
+         assert clients[0].get("name") == "valkey-py-c1"
+ 
+     @skip_if_server_version_lt("2.6.9")
+-    async def test_client_list_after_client_setname(self, r: valkey.Valkey):
++    async def test_client_list_after_client_setname(self, r: valkey.asyncio.Valkey[str]):
+         await r.client_setname("valkey_py_test")
+         clients = await r.client_list()
+         # we don't know which client ours will be
+@@ -483,7 +485,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.9.50")
+     @pytest.mark.onlynoncluster
+-    async def test_client_pause(self, r: valkey.Valkey):
++    async def test_client_pause(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.client_pause(1)
+         assert await r.client_pause(timeout=1)
+         with pytest.raises(exceptions.ValkeyError):
+@@ -491,19 +493,19 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("7.2.0")
+     @pytest.mark.onlynoncluster
+-    async def test_client_no_touch(self, r: valkey.Valkey):
++    async def test_client_no_touch(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.client_no_touch("ON") == b"OK"
+         assert await r.client_no_touch("OFF") == b"OK"
+         with pytest.raises(TypeError):
+-            await r.client_no_touch()
++            await r.client_no_touch()  # type: ignore[call-arg]
+ 
+-    async def test_config_get(self, r: valkey.Valkey):
++    async def test_config_get(self, r: valkey.asyncio.Valkey[str]):
+         data = await r.config_get()
+         assert "maxmemory" in data
+         assert data["maxmemory"].isdigit()
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_config_resetstat(self, r: valkey.Valkey):
++    async def test_config_resetstat(self, r: valkey.asyncio.Valkey[str]):
+         await r.ping()
+         prior_commands_processed = int((await r.info())["total_commands_processed"])
+         assert prior_commands_processed >= 1
+@@ -511,24 +513,24 @@ class TestValkeyCommands:
+         reset_commands_processed = int((await r.info())["total_commands_processed"])
+         assert reset_commands_processed < prior_commands_processed
+ 
+-    async def test_config_set(self, r: valkey.Valkey):
++    async def test_config_set(self, r: valkey.asyncio.Valkey[str]):
+         await r.config_set("timeout", 70)
+         assert (await r.config_get())["timeout"] == "70"
+         assert await r.config_set("timeout", 0)
+         assert (await r.config_get())["timeout"] == "0"
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_dbsize(self, r: valkey.Valkey):
++    async def test_dbsize(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "foo")
+         await r.set("b", "bar")
+         assert await r.dbsize() == 2
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_echo(self, r: valkey.Valkey):
++    async def test_echo(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.echo("foo bar") == b"foo bar"
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_info(self, r: valkey.Valkey):
++    async def test_info(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "foo")
+         await r.set("b", "bar")
+         info = await r.info()
+@@ -537,21 +539,21 @@ class TestValkeyCommands:
+         assert "valkey_version" in info.keys()
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_lastsave(self, r: valkey.Valkey):
++    async def test_lastsave(self, r: valkey.asyncio.Valkey[str]):
+         assert isinstance(await r.lastsave(), datetime.datetime)
+ 
+-    async def test_object(self, r: valkey.Valkey):
++    async def test_object(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "foo")
+         assert isinstance(await r.object("refcount", "a"), int)
+         assert isinstance(await r.object("idletime", "a"), int)
+         assert await r.object("encoding", "a") in (b"raw", b"embstr")
+         assert await r.object("idletime", "invalid-key") is None
+ 
+-    async def test_ping(self, r: valkey.Valkey):
++    async def test_ping(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.ping()
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_slowlog_get(self, r: valkey.Valkey, slowlog):
++    async def test_slowlog_get(self, r: valkey.asyncio.Valkey[str], slowlog):
+         assert await r.slowlog_reset()
+         unicode_string = chr(3456) + "abcd" + chr(3421)
+         await r.get(unicode_string)
+@@ -573,7 +575,7 @@ class TestValkeyCommands:
+         assert isinstance(slowlog[0]["duration"], int)
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_slowlog_get_limit(self, r: valkey.Valkey, slowlog):
++    async def test_slowlog_get_limit(self, r: valkey.asyncio.Valkey[str], slowlog):
+         assert await r.slowlog_reset()
+         await r.get("foo")
+         slowlog = await r.slowlog_get(1)
+@@ -582,36 +584,36 @@ class TestValkeyCommands:
+         assert len(slowlog) == 1
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_slowlog_length(self, r: valkey.Valkey, slowlog):
++    async def test_slowlog_length(self, r: valkey.asyncio.Valkey[str], slowlog):
+         await r.get("foo")
+         assert isinstance(await r.slowlog_len(), int)
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_time(self, r: valkey.Valkey):
++    async def test_time(self, r: valkey.asyncio.Valkey[str]):
+         t = await r.time()
+         assert len(t) == 2
+         assert isinstance(t[0], int)
+         assert isinstance(t[1], int)
+ 
+-    async def test_never_decode_option(self, r: valkey.Valkey):
+-        opts = {NEVER_DECODE: []}
++    async def test_never_decode_option(self, r: valkey.asyncio.Valkey[str]):
++        opts: dict[str, list[str]] = {NEVER_DECODE: []}
+         await r.delete("a")
+         assert await r.execute_command("EXISTS", "a", **opts) == 0
+ 
+-    async def test_empty_response_option(self, r: valkey.Valkey):
+-        opts = {EMPTY_RESPONSE: []}
++    async def test_empty_response_option(self, r: valkey.asyncio.Valkey[str]):
++        opts: dict[str, list[str]] = {EMPTY_RESPONSE: []}
+         await r.delete("a")
+         assert await r.execute_command("EXISTS", "a", **opts) == 0
+ 
+     # BASIC KEY COMMANDS
+-    async def test_append(self, r: valkey.Valkey):
++    async def test_append(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.append("a", "a1") == 2
+         assert await r.get("a") == b"a1"
+         assert await r.append("a", "a2") == 4
+         assert await r.get("a") == b"a1a2"
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_bitcount(self, r: valkey.Valkey):
++    async def test_bitcount(self, r: valkey.asyncio.Valkey[str]):
+         await r.setbit("a", 5, True)
+         assert await r.bitcount("a") == 1
+         await r.setbit("a", 6, True)
+@@ -631,32 +633,32 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.6.0")
+     @pytest.mark.onlynoncluster
+-    async def test_bitop_not_empty_string(self, r: valkey.Valkey):
++    async def test_bitop_not_empty_string(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "")
+         await r.bitop("not", "r", "a")
+         assert await r.get("r") is None
+ 
+     @skip_if_server_version_lt("2.6.0")
+     @pytest.mark.onlynoncluster
+-    async def test_bitop_not(self, r: valkey.Valkey):
++    async def test_bitop_not(self, r: valkey.asyncio.Valkey[str]):
+         test_str = b"\xAA\x00\xFF\x55"
+         correct = ~0xAA00FF55 & 0xFFFFFFFF
+         await r.set("a", test_str)
+         await r.bitop("not", "r", "a")
+-        assert int(binascii.hexlify(await r.get("r")), 16) == correct
++        assert int(binascii.hexlify(await r.get("r")), 16) == correct  # type: ignore[arg-type]
+ 
+     @skip_if_server_version_lt("2.6.0")
+     @pytest.mark.onlynoncluster
+-    async def test_bitop_not_in_place(self, r: valkey.Valkey):
++    async def test_bitop_not_in_place(self, r: valkey.asyncio.Valkey[str]):
+         test_str = b"\xAA\x00\xFF\x55"
+         correct = ~0xAA00FF55 & 0xFFFFFFFF
+         await r.set("a", test_str)
+         await r.bitop("not", "a", "a")
+-        assert int(binascii.hexlify(await r.get("a")), 16) == correct
++        assert int(binascii.hexlify(await r.get("a")), 16) == correct  # type: ignore[arg-type]
+ 
+     @skip_if_server_version_lt("2.6.0")
+     @pytest.mark.onlynoncluster
+-    async def test_bitop_single_string(self, r: valkey.Valkey):
++    async def test_bitop_single_string(self, r: valkey.asyncio.Valkey[bytes]):
+         test_str = b"\x01\x02\xFF"
+         await r.set("a", test_str)
+         await r.bitop("and", "res1", "a")
+@@ -668,19 +670,19 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.6.0")
+     @pytest.mark.onlynoncluster
+-    async def test_bitop_string_operands(self, r: valkey.Valkey):
++    async def test_bitop_string_operands(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", b"\x01\x02\xFF\xFF")
+         await r.set("b", b"\x01\x02\xFF")
+         await r.bitop("and", "res1", "a", "b")
+         await r.bitop("or", "res2", "a", "b")
+         await r.bitop("xor", "res3", "a", "b")
+-        assert int(binascii.hexlify(await r.get("res1")), 16) == 0x0102FF00
+-        assert int(binascii.hexlify(await r.get("res2")), 16) == 0x0102FFFF
+-        assert int(binascii.hexlify(await r.get("res3")), 16) == 0x000000FF
++        assert int(binascii.hexlify(await r.get("res1")), 16) == 0x0102FF00  # type: ignore[arg-type]
++        assert int(binascii.hexlify(await r.get("res2")), 16) == 0x0102FFFF  # type: ignore[arg-type]
++        assert int(binascii.hexlify(await r.get("res3")), 16) == 0x000000FF  # type: ignore[arg-type]
+ 
+     @pytest.mark.onlynoncluster
+     @skip_if_server_version_lt("2.8.7")
+-    async def test_bitpos(self, r: valkey.Valkey):
++    async def test_bitpos(self, r: valkey.asyncio.Valkey[str]):
+         key = "key:bitpos"
+         await r.set(key, b"\xff\xf0\x00")
+         assert await r.bitpos(key, 0) == 12
+@@ -693,7 +695,7 @@ class TestValkeyCommands:
+         assert await r.bitpos(key, 1) == -1
+ 
+     @skip_if_server_version_lt("2.8.7")
+-    async def test_bitpos_wrong_arguments(self, r: valkey.Valkey):
++    async def test_bitpos_wrong_arguments(self, r: valkey.asyncio.Valkey[str]):
+         key = "key:bitpos:wrong:args"
+         await r.set(key, b"\xff\xf0\x00")
+         with pytest.raises(exceptions.ValkeyError):
+@@ -701,7 +703,7 @@ class TestValkeyCommands:
+         with pytest.raises(exceptions.ValkeyError):
+             await r.bitpos(key, 7) == 12
+ 
+-    async def test_decr(self, r: valkey.Valkey):
++    async def test_decr(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.decr("a") == -1
+         assert await r.get("a") == b"-1"
+         assert await r.decr("a") == -2
+@@ -709,37 +711,37 @@ class TestValkeyCommands:
+         assert await r.decr("a", amount=5) == -7
+         assert await r.get("a") == b"-7"
+ 
+-    async def test_decrby(self, r: valkey.Valkey):
++    async def test_decrby(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.decrby("a", amount=2) == -2
+         assert await r.decrby("a", amount=3) == -5
+         assert await r.get("a") == b"-5"
+ 
+-    async def test_delete(self, r: valkey.Valkey):
++    async def test_delete(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.delete("a") == 0
+         await r.set("a", "foo")
+         assert await r.delete("a") == 1
+ 
+-    async def test_delete_with_multiple_keys(self, r: valkey.Valkey):
++    async def test_delete_with_multiple_keys(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "foo")
+         await r.set("b", "bar")
+         assert await r.delete("a", "b") == 2
+         assert await r.get("a") is None
+         assert await r.get("b") is None
+ 
+-    async def test_delitem(self, r: valkey.Valkey):
++    async def test_delitem(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "foo")
+         await r.delete("a")
+         assert await r.get("a") is None
+ 
+     @skip_if_server_version_lt("4.0.0")
+-    async def test_unlink(self, r: valkey.Valkey):
++    async def test_unlink(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.unlink("a") == 0
+         await r.set("a", "foo")
+         assert await r.unlink("a") == 1
+         assert await r.get("a") is None
+ 
+     @skip_if_server_version_lt("4.0.0")
+-    async def test_unlink_with_multiple_keys(self, r: valkey.Valkey):
++    async def test_unlink_with_multiple_keys(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "foo")
+         await r.set("b", "bar")
+         assert await r.unlink("a", "b") == 2
+@@ -747,7 +749,7 @@ class TestValkeyCommands:
+         assert await r.get("b") is None
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_dump_and_restore(self, r: valkey.Valkey):
++    async def test_dump_and_restore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "foo")
+         dumped = await r.dump("a")
+         await r.delete("a")
+@@ -755,7 +757,7 @@ class TestValkeyCommands:
+         assert await r.get("a") == b"foo"
+ 
+     @skip_if_server_version_lt("3.0.0")
+-    async def test_dump_and_restore_and_replace(self, r: valkey.Valkey):
++    async def test_dump_and_restore_and_replace(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "bar")
+         dumped = await r.dump("a")
+         with pytest.raises(valkey.ResponseError):
+@@ -765,7 +767,7 @@ class TestValkeyCommands:
+         assert await r.get("a") == b"bar"
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_dump_and_restore_absttl(self, r: valkey.Valkey):
++    async def test_dump_and_restore_absttl(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "foo")
+         dumped = await r.dump("a")
+         await r.delete("a")
+@@ -777,19 +779,19 @@ class TestValkeyCommands:
+         assert await r.get("a") == b"foo"
+         assert 0 < await r.ttl("a") <= 61
+ 
+-    async def test_exists(self, r: valkey.Valkey):
++    async def test_exists(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.exists("a") == 0
+         await r.set("a", "foo")
+         await r.set("b", "bar")
+         assert await r.exists("a") == 1
+         assert await r.exists("a", "b") == 2
+ 
+-    async def test_exists_contains(self, r: valkey.Valkey):
++    async def test_exists_contains(self, r: valkey.asyncio.Valkey[str]):
+         assert not await r.exists("a")
+         await r.set("a", "foo")
+         assert await r.exists("a")
+ 
+-    async def test_expire(self, r: valkey.Valkey):
++    async def test_expire(self, r: valkey.asyncio.Valkey[str]):
+         assert not await r.expire("a", 10)
+         await r.set("a", "foo")
+         assert await r.expire("a", 10)
+@@ -797,24 +799,24 @@ class TestValkeyCommands:
+         assert await r.persist("a")
+         assert await r.ttl("a") == -1
+ 
+-    async def test_expireat_datetime(self, r: valkey.Valkey):
++    async def test_expireat_datetime(self, r: valkey.asyncio.Valkey[bytes]):
+         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
+         await r.set("a", "foo")
+         assert await r.expireat("a", expire_at)
+         assert 0 < await r.ttl("a") <= 61
+ 
+-    async def test_expireat_no_key(self, r: valkey.Valkey):
++    async def test_expireat_no_key(self, r: valkey.asyncio.Valkey[bytes]):
+         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
+         assert not await r.expireat("a", expire_at)
+ 
+-    async def test_expireat_unixtime(self, r: valkey.Valkey):
++    async def test_expireat_unixtime(self, r: valkey.asyncio.Valkey[bytes]):
+         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
+         await r.set("a", "foo")
+         expire_at_seconds = int(expire_at.timestamp())
+         assert await r.expireat("a", expire_at_seconds)
+         assert 0 < await r.ttl("a") <= 61
+ 
+-    async def test_get_and_set(self, r: valkey.Valkey):
++    async def test_get_and_set(self, r: valkey.asyncio.Valkey[bytes]):
+         # get and set can't be tested independently of each other
+         assert await r.get("a") is None
+         byte_string = b"value"
+@@ -825,9 +827,9 @@ class TestValkeyCommands:
+         assert await r.set("unicode_string", unicode_string)
+         assert await r.get("byte_string") == byte_string
+         assert await r.get("integer") == str(integer).encode()
+-        assert (await r.get("unicode_string")).decode("utf-8") == unicode_string
++        assert (await r.get("unicode_string")).decode("utf-8") == unicode_string  # type: ignore[union-attr]
+ 
+-    async def test_get_set_bit(self, r: valkey.Valkey):
++    async def test_get_set_bit(self, r: valkey.asyncio.Valkey[str]):
+         # no value
+         assert not await r.getbit("a", 5)
+         # set bit 5
+@@ -843,18 +845,18 @@ class TestValkeyCommands:
+         assert await r.setbit("a", 5, True)
+         assert await r.getbit("a", 5)
+ 
+-    async def test_getrange(self, r: valkey.Valkey):
++    async def test_getrange(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "foo")
+         assert await r.getrange("a", 0, 0) == b"f"
+         assert await r.getrange("a", 0, 2) == b"foo"
+         assert await r.getrange("a", 3, 4) == b""
+ 
+-    async def test_getset(self, r: valkey.Valkey):
++    async def test_getset(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.getset("a", "foo") is None
+         assert await r.getset("a", "bar") == b"foo"
+         assert await r.get("a") == b"bar"
+ 
+-    async def test_incr(self, r: valkey.Valkey):
++    async def test_incr(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.incr("a") == 1
+         assert await r.get("a") == b"1"
+         assert await r.incr("a") == 2
+@@ -862,20 +864,20 @@ class TestValkeyCommands:
+         assert await r.incr("a", amount=5) == 7
+         assert await r.get("a") == b"7"
+ 
+-    async def test_incrby(self, r: valkey.Valkey):
++    async def test_incrby(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.incrby("a") == 1
+         assert await r.incrby("a", 4) == 5
+         assert await r.get("a") == b"5"
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_incrbyfloat(self, r: valkey.Valkey):
++    async def test_incrbyfloat(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.incrbyfloat("a") == 1.0
+         assert await r.get("a") == b"1"
+         assert await r.incrbyfloat("a", 1.1) == 2.1
+-        assert float(await r.get("a")) == float(2.1)
++        assert float(await r.get("a")) == float(2.1)  # type: ignore[arg-type]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_keys(self, r: valkey.Valkey):
++    async def test_keys(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.keys() == []
+         keys_with_underscores = {b"test_a", b"test_b"}
+         keys = keys_with_underscores.union({b"testc"})
+@@ -885,7 +887,7 @@ class TestValkeyCommands:
+         assert set(await r.keys(pattern="test*")) == keys
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_mget(self, r: valkey.Valkey):
++    async def test_mget(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.mget([]) == []
+         assert await r.mget(["a", "b"]) == [None, None]
+         await r.set("a", "1")
+@@ -894,24 +896,24 @@ class TestValkeyCommands:
+         assert await r.mget("a", "other", "b", "c") == [b"1", None, b"2", b"3"]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_mset(self, r: valkey.Valkey):
++    async def test_mset(self, r: valkey.asyncio.Valkey[bytes]):
+         d = {"a": b"1", "b": b"2", "c": b"3"}
+-        assert await r.mset(d)
++        assert await r.mset(d)  # type: ignore[arg-type]
+         for k, v in d.items():
+             assert await r.get(k) == v
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_msetnx(self, r: valkey.Valkey):
++    async def test_msetnx(self, r: valkey.asyncio.Valkey[bytes]):
+         d = {"a": b"1", "b": b"2", "c": b"3"}
+-        assert await r.msetnx(d)
++        assert await r.msetnx(d)  # type: ignore[arg-type]
+         d2 = {"a": b"x", "d": b"4"}
+-        assert not await r.msetnx(d2)
++        assert not await r.msetnx(d2)  # type: ignore[arg-type]
+         for k, v in d.items():
+             assert await r.get(k) == v
+         assert await r.get("d") is None
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_pexpire(self, r: valkey.Valkey):
++    async def test_pexpire(self, r: valkey.asyncio.Valkey[str]):
+         assert not await r.pexpire("a", 60000)
+         await r.set("a", "foo")
+         assert await r.pexpire("a", 60000)
+@@ -920,19 +922,19 @@ class TestValkeyCommands:
+         assert await r.pttl("a") == -1
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_pexpireat_datetime(self, r: valkey.Valkey):
++    async def test_pexpireat_datetime(self, r: valkey.asyncio.Valkey[bytes]):
+         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
+         await r.set("a", "foo")
+         assert await r.pexpireat("a", expire_at)
+         assert 0 < await r.pttl("a") <= 61000
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_pexpireat_no_key(self, r: valkey.Valkey):
++    async def test_pexpireat_no_key(self, r: valkey.asyncio.Valkey[bytes]):
+         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
+         assert not await r.pexpireat("a", expire_at)
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_pexpireat_unixtime(self, r: valkey.Valkey):
++    async def test_pexpireat_unixtime(self, r: valkey.asyncio.Valkey[bytes]):
+         expire_at = await valkey_server_time(r) + datetime.timedelta(minutes=1)
+         await r.set("a", "foo")
+         expire_at_milliseconds = int(expire_at.timestamp() * 1000)
+@@ -940,20 +942,20 @@ class TestValkeyCommands:
+         assert 0 < await r.pttl("a") <= 61000
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_psetex(self, r: valkey.Valkey):
++    async def test_psetex(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.psetex("a", 1000, "value")
+         assert await r.get("a") == b"value"
+         assert 0 < await r.pttl("a") <= 1000
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_psetex_timedelta(self, r: valkey.Valkey):
++    async def test_psetex_timedelta(self, r: valkey.asyncio.Valkey[bytes]):
+         expire_at = datetime.timedelta(milliseconds=1000)
+         assert await r.psetex("a", expire_at, "value")
+         assert await r.get("a") == b"value"
+         assert 0 < await r.pttl("a") <= 1000
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_pttl(self, r: valkey.Valkey):
++    async def test_pttl(self, r: valkey.asyncio.Valkey[str]):
+         assert not await r.pexpire("a", 10000)
+         await r.set("a", "1")
+         assert await r.pexpire("a", 10000)
+@@ -962,7 +964,7 @@ class TestValkeyCommands:
+         assert await r.pttl("a") == -1
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_pttl_no_key(self, r: valkey.Valkey):
++    async def test_pttl_no_key(self, r: valkey.asyncio.Valkey[str]):
+         """PTTL on servers 2.8 and after return -2 when the key doesn't exist"""
+         assert await r.pttl("a") == -2
+ 
+@@ -980,21 +982,21 @@ class TestValkeyCommands:
+         assert len(await r.hrandfield("key", -10)) == 10
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_randomkey(self, r: valkey.Valkey):
++    async def test_randomkey(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.randomkey() is None
+         for key in ("a", "b", "c"):
+             await r.set(key, 1)
+         assert await r.randomkey() in (b"a", b"b", b"c")
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_rename(self, r: valkey.Valkey):
++    async def test_rename(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "1")
+         assert await r.rename("a", "b")
+         assert await r.get("a") is None
+         assert await r.get("b") == b"1"
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_renamenx(self, r: valkey.Valkey):
++    async def test_renamenx(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "1")
+         await r.set("b", "2")
+         assert not await r.renamenx("a", "b")
+@@ -1002,13 +1004,13 @@ class TestValkeyCommands:
+         assert await r.get("b") == b"2"
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_set_nx(self, r: valkey.Valkey):
++    async def test_set_nx(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.set("a", "1", nx=True)
+         assert not await r.set("a", "2", nx=True)
+         assert await r.get("a") == b"1"
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_set_xx(self, r: valkey.Valkey):
++    async def test_set_xx(self, r: valkey.asyncio.Valkey[bytes]):
+         assert not await r.set("a", "1", xx=True)
+         assert await r.get("a") is None
+         await r.set("a", "bar")
+@@ -1016,38 +1018,38 @@ class TestValkeyCommands:
+         assert await r.get("a") == b"2"
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_set_px(self, r: valkey.Valkey):
++    async def test_set_px(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.set("a", "1", px=10000)
+         assert await r.get("a") == b"1"
+         assert 0 < await r.pttl("a") <= 10000
+         assert 0 < await r.ttl("a") <= 10
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_set_px_timedelta(self, r: valkey.Valkey):
++    async def test_set_px_timedelta(self, r: valkey.asyncio.Valkey[str]):
+         expire_at = datetime.timedelta(milliseconds=1000)
+         assert await r.set("a", "1", px=expire_at)
+         assert 0 < await r.pttl("a") <= 1000
+         assert 0 < await r.ttl("a") <= 1
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_set_ex(self, r: valkey.Valkey):
++    async def test_set_ex(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.set("a", "1", ex=10)
+         assert 0 < await r.ttl("a") <= 10
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_set_ex_timedelta(self, r: valkey.Valkey):
++    async def test_set_ex_timedelta(self, r: valkey.asyncio.Valkey[str]):
+         expire_at = datetime.timedelta(seconds=60)
+         assert await r.set("a", "1", ex=expire_at)
+         assert 0 < await r.ttl("a") <= 60
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_set_multipleoptions(self, r: valkey.Valkey):
++    async def test_set_multipleoptions(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "val")
+         assert await r.set("a", "1", xx=True, px=10000)
+         assert 0 < await r.ttl("a") <= 10
+ 
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+-    async def test_set_keepttl(self, r: valkey.Valkey):
++    async def test_set_keepttl(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "val")
+         assert await r.set("a", "1", xx=True, px=10000)
+         assert 0 < await r.ttl("a") <= 10
+@@ -1055,36 +1057,36 @@ class TestValkeyCommands:
+         assert await r.get("a") == b"2"
+         assert 0 < await r.ttl("a") <= 10
+ 
+-    async def test_setex(self, r: valkey.Valkey):
++    async def test_setex(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.setex("a", 60, "1")
+         assert await r.get("a") == b"1"
+         assert 0 < await r.ttl("a") <= 60
+ 
+-    async def test_setnx(self, r: valkey.Valkey):
++    async def test_setnx(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.setnx("a", "1")
+         assert await r.get("a") == b"1"
+         assert not await r.setnx("a", "2")
+         assert await r.get("a") == b"1"
+ 
+-    async def test_setrange(self, r: valkey.Valkey):
++    async def test_setrange(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.setrange("a", 5, "foo") == 8
+         assert await r.get("a") == b"\0\0\0\0\0foo"
+         await r.set("a", "abcdefghijh")
+         assert await r.setrange("a", 6, "12345") == 11
+         assert await r.get("a") == b"abcdef12345"
+ 
+-    async def test_strlen(self, r: valkey.Valkey):
++    async def test_strlen(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "foo")
+         assert await r.strlen("a") == 3
+ 
+-    async def test_substr(self, r: valkey.Valkey):
++    async def test_substr(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", "0123456789")
+         assert await r.substr("a", 0) == b"0123456789"
+         assert await r.substr("a", 2) == b"23456789"
+         assert await r.substr("a", 3, 5) == b"345"
+         assert await r.substr("a", 3, -2) == b"345678"
+ 
+-    async def test_ttl(self, r: valkey.Valkey):
++    async def test_ttl(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("a", "1")
+         assert await r.expire("a", 10)
+         assert 0 < await r.ttl("a") <= 10
+@@ -1092,11 +1094,11 @@ class TestValkeyCommands:
+         assert await r.ttl("a") == -1
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_ttl_nokey(self, r: valkey.Valkey):
++    async def test_ttl_nokey(self, r: valkey.asyncio.Valkey[str]):
+         """TTL on servers 2.8 and after return -2 when the key doesn't exist"""
+         assert await r.ttl("a") == -2
+ 
+-    async def test_type(self, r: valkey.Valkey):
++    async def test_type(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.type("a") == b"none"
+         await r.set("a", "1")
+         assert await r.type("a") == b"string"
+@@ -1112,7 +1114,7 @@ class TestValkeyCommands:
+ 
+     # LIST COMMANDS
+     @pytest.mark.onlynoncluster
+-    async def test_blpop(self, r: valkey.Valkey):
++    async def test_blpop(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2")
+         await r.rpush("b", "3", "4")
+         assert_resp_response(
+@@ -1134,7 +1136,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_brpop(self, r: valkey.Valkey):
++    async def test_brpop(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2")
+         await r.rpush("b", "3", "4")
+         assert_resp_response(
+@@ -1156,7 +1158,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_brpoplpush(self, r: valkey.Valkey):
++    async def test_brpoplpush(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2")
+         await r.rpush("b", "3", "4")
+         assert await r.brpoplpush("a", "b") == b"2"
+@@ -1166,54 +1168,54 @@ class TestValkeyCommands:
+         assert await r.lrange("b", 0, -1) == [b"1", b"2", b"3", b"4"]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_brpoplpush_empty_string(self, r: valkey.Valkey):
++    async def test_brpoplpush_empty_string(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "")
+         assert await r.brpoplpush("a", "b") == b""
+ 
+-    async def test_lindex(self, r: valkey.Valkey):
++    async def test_lindex(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2", "3")
+         assert await r.lindex("a", "0") == b"1"
+         assert await r.lindex("a", "1") == b"2"
+         assert await r.lindex("a", "2") == b"3"
+ 
+-    async def test_linsert(self, r: valkey.Valkey):
++    async def test_linsert(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2", "3")
+         assert await r.linsert("a", "after", "2", "2.5") == 4
+         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"2.5", b"3"]
+         assert await r.linsert("a", "before", "2", "1.5") == 5
+         assert await r.lrange("a", 0, -1) == [b"1", b"1.5", b"2", b"2.5", b"3"]
+ 
+-    async def test_llen(self, r: valkey.Valkey):
++    async def test_llen(self, r: valkey.asyncio.Valkey[str]):
+         await r.rpush("a", "1", "2", "3")
+         assert await r.llen("a") == 3
+ 
+-    async def test_lpop(self, r: valkey.Valkey):
++    async def test_lpop(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2", "3")
+         assert await r.lpop("a") == b"1"
+         assert await r.lpop("a") == b"2"
+         assert await r.lpop("a") == b"3"
+         assert await r.lpop("a") is None
+ 
+-    async def test_lpush(self, r: valkey.Valkey):
++    async def test_lpush(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.lpush("a", "1") == 1
+         assert await r.lpush("a", "2") == 2
+         assert await r.lpush("a", "3", "4") == 4
+         assert await r.lrange("a", 0, -1) == [b"4", b"3", b"2", b"1"]
+ 
+-    async def test_lpushx(self, r: valkey.Valkey):
++    async def test_lpushx(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.lpushx("a", "1") == 0
+         assert await r.lrange("a", 0, -1) == []
+         await r.rpush("a", "1", "2", "3")
+         assert await r.lpushx("a", "4") == 4
+         assert await r.lrange("a", 0, -1) == [b"4", b"1", b"2", b"3"]
+ 
+-    async def test_lrange(self, r: valkey.Valkey):
++    async def test_lrange(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2", "3", "4", "5")
+         assert await r.lrange("a", 0, 2) == [b"1", b"2", b"3"]
+         assert await r.lrange("a", 2, 10) == [b"3", b"4", b"5"]
+         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3", b"4", b"5"]
+ 
+-    async def test_lrem(self, r: valkey.Valkey):
++    async def test_lrem(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "Z", "b", "Z", "Z", "c", "Z", "Z")
+         # remove the first 'Z'  item
+         assert await r.lrem("a", 1, "Z") == 1
+@@ -1225,18 +1227,18 @@ class TestValkeyCommands:
+         assert await r.lrem("a", 0, "Z") == 2
+         assert await r.lrange("a", 0, -1) == [b"b", b"c"]
+ 
+-    async def test_lset(self, r: valkey.Valkey):
++    async def test_lset(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2", "3")
+         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3"]
+         assert await r.lset("a", 1, "4")
+         assert await r.lrange("a", 0, 2) == [b"1", b"4", b"3"]
+ 
+-    async def test_ltrim(self, r: valkey.Valkey):
++    async def test_ltrim(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2", "3")
+         assert await r.ltrim("a", 0, 1)
+         assert await r.lrange("a", 0, -1) == [b"1", b"2"]
+ 
+-    async def test_rpop(self, r: valkey.Valkey):
++    async def test_rpop(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "1", "2", "3")
+         assert await r.rpop("a") == b"3"
+         assert await r.rpop("a") == b"2"
+@@ -1244,21 +1246,21 @@ class TestValkeyCommands:
+         assert await r.rpop("a") is None
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_rpoplpush(self, r: valkey.Valkey):
++    async def test_rpoplpush(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "a1", "a2", "a3")
+         await r.rpush("b", "b1", "b2", "b3")
+         assert await r.rpoplpush("a", "b") == b"a3"
+         assert await r.lrange("a", 0, -1) == [b"a1", b"a2"]
+         assert await r.lrange("b", 0, -1) == [b"a3", b"b1", b"b2", b"b3"]
+ 
+-    async def test_rpush(self, r: valkey.Valkey):
++    async def test_rpush(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.rpush("a", "1") == 1
+         assert await r.rpush("a", "2") == 2
+         assert await r.rpush("a", "3", "4") == 4
+         assert await r.lrange("a", 0, -1) == [b"1", b"2", b"3", b"4"]
+ 
+     @skip_if_server_version_lt("6.0.6")
+-    async def test_lpos(self, r: valkey.Valkey):
++    async def test_lpos(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.rpush("a", "a", "b", "c", "1", "2", "3", "c", "c") == 8
+         assert await r.lpos("a", "a") == 0
+         assert await r.lpos("a", "c") == 2
+@@ -1289,7 +1291,7 @@ class TestValkeyCommands:
+         assert await r.lpos("a", "c", count=0, maxlen=3, rank=-1) == [7, 6]
+         assert await r.lpos("a", "c", count=0, maxlen=7, rank=2) == [6]
+ 
+-    async def test_rpushx(self, r: valkey.Valkey):
++    async def test_rpushx(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.rpushx("a", "b") == 0
+         assert await r.lrange("a", 0, -1) == []
+         await r.rpush("a", "1", "2", "3")
+@@ -1299,7 +1301,7 @@ class TestValkeyCommands:
+     # SCAN COMMANDS
+     @skip_if_server_version_lt("2.8.0")
+     @pytest.mark.onlynoncluster
+-    async def test_scan(self, r: valkey.Valkey):
++    async def test_scan(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", 1)
+         await r.set("b", 2)
+         await r.set("c", 3)
+@@ -1311,7 +1313,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt(VALKEY_6_VERSION)
+     @pytest.mark.onlynoncluster
+-    async def test_scan_type(self, r: valkey.Valkey):
++    async def test_scan_type(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a-set", 1)
+         await r.hset("a-hash", "foo", 2)
+         await r.lpush("a-list", "aux", 3)
+@@ -1320,7 +1322,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.8.0")
+     @pytest.mark.onlynoncluster
+-    async def test_scan_iter(self, r: valkey.Valkey):
++    async def test_scan_iter(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("a", 1)
+         await r.set("b", 2)
+         await r.set("c", 3)
+@@ -1330,7 +1332,7 @@ class TestValkeyCommands:
+         assert set(keys) == {b"a"}
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_sscan(self, r: valkey.Valkey):
++    async def test_sscan(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", 1, 2, 3)
+         cursor, members = await r.sscan("a")
+         assert cursor == 0
+@@ -1339,7 +1341,7 @@ class TestValkeyCommands:
+         assert set(members) == {b"1"}
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_sscan_iter(self, r: valkey.Valkey):
++    async def test_sscan_iter(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", 1, 2, 3)
+         members = [k async for k in r.sscan_iter("a")]
+         assert set(members) == {b"1", b"2", b"3"}
+@@ -1347,7 +1349,7 @@ class TestValkeyCommands:
+         assert set(members) == {b"1"}
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_hscan(self, r: valkey.Valkey):
++    async def test_hscan(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
+         cursor, dic = await r.hscan("a")
+         assert cursor == 0
+@@ -1357,19 +1359,20 @@ class TestValkeyCommands:
+         _, dic = await r.hscan("a_notset", match="a")
+         assert dic == {}
+ 
++    # TODO: is that a bug?
+     @skip_if_server_version_lt("7.3.240")
+-    async def test_hscan_novalues(self, r: valkey.Valkey):
++    async def test_hscan_novalues(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
+         cursor, keys = await r.hscan("a", no_values=True)
+         assert cursor == 0
+         assert sorted(keys) == [b"a", b"b", b"c"]
+         _, keys = await r.hscan("a", match="a", no_values=True)
+-        assert keys == [b"a"]
++        assert keys == [b"a"]  # type: ignore[comparison-overlap]
+         _, keys = await r.hscan("a_notset", match="a", no_values=True)
+-        assert keys == []
++        assert keys == []  # type: ignore[comparison-overlap]
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_hscan_iter(self, r: valkey.Valkey):
++    async def test_hscan_iter(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
+         dic = {k: v async for k, v in r.hscan_iter("a")}
+         assert dic == {b"a": b"1", b"b": b"2", b"c": b"3"}
+@@ -1378,20 +1381,21 @@ class TestValkeyCommands:
+         dic = {k: v async for k, v in r.hscan_iter("a_notset", match="a")}
+         assert dic == {}
+ 
++    # TODO: is that a bug?
+     @skip_if_server_version_lt("7.3.240")
+-    async def test_hscan_iter_novalues(self, r: valkey.Valkey):
++    async def test_hscan_iter_novalues(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
+         keys = list([k async for k in r.hscan_iter("a", no_values=True)])
+-        assert sorted(keys) == [b"a", b"b", b"c"]
++        assert sorted(keys) == [b"a", b"b", b"c"]  # type: ignore[comparison-overlap]
+         keys = list([k async for k in r.hscan_iter("a", match="a", no_values=True)])
+-        assert keys == [b"a"]
++        assert keys == [b"a"]  # type: ignore[comparison-overlap]
+         keys = list(
+             [k async for k in r.hscan_iter("a", match="a_notset", no_values=True)]
+         )
+         assert keys == []
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_zscan(self, r: valkey.Valkey):
++    async def test_zscan(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a": 1, "b": 2, "c": 3})
+         cursor, pairs = await r.zscan("a")
+         assert cursor == 0
+@@ -1400,7 +1404,7 @@ class TestValkeyCommands:
+         assert set(pairs) == {(b"a", 1)}
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_zscan_iter(self, r: valkey.Valkey):
++    async def test_zscan_iter(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a": 1, "b": 2, "c": 3})
+         pairs = [k async for k in r.zscan_iter("a")]
+         assert set(pairs) == {(b"a", 1), (b"b", 2), (b"c", 3)}
+@@ -1408,78 +1412,78 @@ class TestValkeyCommands:
+         assert set(pairs) == {(b"a", 1)}
+ 
+     # SET COMMANDS
+-    async def test_sadd(self, r: valkey.Valkey):
++    async def test_sadd(self, r: valkey.asyncio.Valkey[bytes]):
+         members = {b"1", b"2", b"3"}
+         await r.sadd("a", *members)
+         assert set(await r.smembers("a")) == members
+ 
+-    async def test_scard(self, r: valkey.Valkey):
++    async def test_scard(self, r: valkey.asyncio.Valkey[str]):
+         await r.sadd("a", "1", "2", "3")
+         assert await r.scard("a") == 3
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sdiff(self, r: valkey.Valkey):
++    async def test_sdiff(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2", "3")
+         assert set(await r.sdiff("a", "b")) == {b"1", b"2", b"3"}
+         await r.sadd("b", "2", "3")
+-        assert await r.sdiff("a", "b") == [b"1"]
++        assert await r.sdiff("a", "b") == {b"1", }
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sdiffstore(self, r: valkey.Valkey):
++    async def test_sdiffstore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2", "3")
+         assert await r.sdiffstore("c", "a", "b") == 3
+         assert set(await r.smembers("c")) == {b"1", b"2", b"3"}
+         await r.sadd("b", "2", "3")
+         assert await r.sdiffstore("c", "a", "b") == 1
+-        assert await r.smembers("c") == [b"1"]
++        assert await r.smembers("c") == [b"1", ]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sinter(self, r: valkey.Valkey):
++    async def test_sinter(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2", "3")
+-        assert await r.sinter("a", "b") == []
++        assert await r.sinter("a", "b") == set()
+         await r.sadd("b", "2", "3")
+         assert set(await r.sinter("a", "b")) == {b"2", b"3"}
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sinterstore(self, r: valkey.Valkey):
++    async def test_sinterstore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2", "3")
+         assert await r.sinterstore("c", "a", "b") == 0
+-        assert await r.smembers("c") == []
++        assert await r.smembers("c") == list()
+         await r.sadd("b", "2", "3")
+         assert await r.sinterstore("c", "a", "b") == 2
+         assert set(await r.smembers("c")) == {b"2", b"3"}
+ 
+-    async def test_sismember(self, r: valkey.Valkey):
++    async def test_sismember(self, r: valkey.asyncio.Valkey[str]):
+         await r.sadd("a", "1", "2", "3")
+         assert await r.sismember("a", "1")
+         assert await r.sismember("a", "2")
+         assert await r.sismember("a", "3")
+         assert not await r.sismember("a", "4")
+ 
+-    async def test_smembers(self, r: valkey.Valkey):
++    async def test_smembers(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2", "3")
+         assert set(await r.smembers("a")) == {b"1", b"2", b"3"}
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_smove(self, r: valkey.Valkey):
++    async def test_smove(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "a1", "a2")
+         await r.sadd("b", "b1", "b2")
+         assert await r.smove("a", "b", "a1")
+-        assert await r.smembers("a") == [b"a2"]
++        assert await r.smembers("a") == [b"a2", ]
+         assert set(await r.smembers("b")) == {b"b1", b"b2", b"a1"}
+ 
+-    async def test_spop(self, r: valkey.Valkey):
++    async def test_spop(self, r: valkey.asyncio.Valkey[bytes]):
+         s = [b"1", b"2", b"3"]
+         await r.sadd("a", *s)
+-        value = await r.spop("a")
++        value: bytes = await r.spop("a")  # type: ignore[assignment]
+         assert value in s
+-        assert set(await r.smembers("a")) == set(s) - {value}
++        assert set(await r.smembers("a")) == set(s) - {value, }
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_spop_multi_value(self, r: valkey.Valkey):
++    async def test_spop_multi_value(self, r: valkey.asyncio.Valkey[bytes]):
+         s = [b"1", b"2", b"3"]
+         await r.sadd("a", *s)
+-        values = await r.spop("a", 2)
++        values: list[bytes] = await r.spop("a", 2)  # type: ignore[assignment]
+         assert len(values) == 2
+ 
+         for value in values:
+@@ -1488,42 +1492,42 @@ class TestValkeyCommands:
+         response = await r.spop("a", 1)
+         assert set(response) == set(s) - set(values)
+ 
+-    async def test_srandmember(self, r: valkey.Valkey):
++    async def test_srandmember(self, r: valkey.asyncio.Valkey[str]):
+         s = [b"1", b"2", b"3"]
+         await r.sadd("a", *s)
+         assert await r.srandmember("a") in s
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_srandmember_multi_value(self, r: valkey.Valkey):
++    async def test_srandmember_multi_value(self, r: valkey.asyncio.Valkey[str]):
+         s = [b"1", b"2", b"3"]
+         await r.sadd("a", *s)
+         randoms = await r.srandmember("a", number=2)
+         assert len(randoms) == 2
+         assert set(randoms).intersection(s) == set(randoms)
+ 
+-    async def test_srem(self, r: valkey.Valkey):
++    async def test_srem(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2", "3", "4")
+         assert await r.srem("a", "5") == 0
+         assert await r.srem("a", "2", "4") == 2
+         assert set(await r.smembers("a")) == {b"1", b"3"}
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sunion(self, r: valkey.Valkey):
++    async def test_sunion(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2")
+         await r.sadd("b", "2", "3")
+         assert set(await r.sunion("a", "b")) == {b"1", b"2", b"3"}
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sunionstore(self, r: valkey.Valkey):
++    async def test_sunionstore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.sadd("a", "1", "2")
+         await r.sadd("b", "2", "3")
+         assert await r.sunionstore("c", "a", "b") == 3
+         assert set(await r.smembers("c")) == {b"1", b"2", b"3"}
+ 
+     # SORTED SET COMMANDS
+-    async def test_zadd(self, r: valkey.Valkey):
++    async def test_zadd(self, r: valkey.asyncio.Valkey[bytes]):
+         mapping = {"a1": 1.0, "a2": 2.0, "a3": 3.0}
+-        await r.zadd("a", mapping)
++        await r.zadd("a", mapping)  # type: ignore[arg-type]
+         response = await r.zrange("a", 0, -1, withscores=True)
+         assert_resp_response(
+             r,
+@@ -1538,13 +1542,13 @@ class TestValkeyCommands:
+ 
+         # cannot use both nx and xx options
+         with pytest.raises(exceptions.DataError):
+-            await r.zadd("a", mapping, nx=True, xx=True)
++            await r.zadd("a", mapping, nx=True, xx=True)  # type: ignore[arg-type]
+ 
+         # cannot use the incr options with more than one value
+         with pytest.raises(exceptions.DataError):
+-            await r.zadd("a", mapping, incr=True)
++            await r.zadd("a", mapping, incr=True)  # type: ignore[arg-type]
+ 
+-    async def test_zadd_nx(self, r: valkey.Valkey):
++    async def test_zadd_nx(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.zadd("a", {"a1": 1}) == 1
+         assert await r.zadd("a", {"a1": 99, "a2": 2}, nx=True) == 1
+         response = await r.zrange("a", 0, -1, withscores=True)
+@@ -1552,13 +1556,13 @@ class TestValkeyCommands:
+             r, response, [(b"a1", 1.0), (b"a2", 2.0)], [[b"a1", 1.0], [b"a2", 2.0]]
+         )
+ 
+-    async def test_zadd_xx(self, r: valkey.Valkey):
++    async def test_zadd_xx(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.zadd("a", {"a1": 1}) == 1
+         assert await r.zadd("a", {"a1": 99, "a2": 2}, xx=True) == 0
+         response = await r.zrange("a", 0, -1, withscores=True)
+         assert_resp_response(r, response, [(b"a1", 99.0)], [[b"a1", 99.0]])
+ 
+-    async def test_zadd_ch(self, r: valkey.Valkey):
++    async def test_zadd_ch(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.zadd("a", {"a1": 1}) == 1
+         assert await r.zadd("a", {"a1": 99, "a2": 2}, ch=True) == 2
+         response = await r.zrange("a", 0, -1, withscores=True)
+@@ -1566,21 +1570,21 @@ class TestValkeyCommands:
+             r, response, [(b"a2", 2.0), (b"a1", 99.0)], [[b"a2", 2.0], [b"a1", 99.0]]
+         )
+ 
+-    async def test_zadd_incr(self, r: valkey.Valkey):
++    async def test_zadd_incr(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.zadd("a", {"a1": 1}) == 1
+         assert await r.zadd("a", {"a1": 4.5}, incr=True) == 5.5
+ 
+-    async def test_zadd_incr_with_xx(self, r: valkey.Valkey):
++    async def test_zadd_incr_with_xx(self, r: valkey.asyncio.Valkey[str]):
+         # this asks zadd to incr 'a1' only if it exists, but it clearly
+         # doesn't. Valkey returns a null value in this case and so should
+         # valkey-py
+         assert await r.zadd("a", {"a1": 1}, xx=True, incr=True) is None
+ 
+-    async def test_zcard(self, r: valkey.Valkey):
++    async def test_zcard(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zcard("a") == 3
+ 
+-    async def test_zcount(self, r: valkey.Valkey):
++    async def test_zcount(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zcount("a", "-inf", "+inf") == 3
+         assert await r.zcount("a", 1, 2) == 2
+@@ -1607,7 +1611,7 @@ class TestValkeyCommands:
+         response = await r.zrange("out", 0, -1, withscores=True)
+         assert_resp_response(r, response, [(b"a3", 3.0)], [[b"a3", 3.0]])
+ 
+-    async def test_zincrby(self, r: valkey.Valkey):
++    async def test_zincrby(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zincrby("a", 1, "a2") == 3.0
+         assert await r.zincrby("a", 5, "a3") == 8.0
+@@ -1615,13 +1619,13 @@ class TestValkeyCommands:
+         assert await r.zscore("a", "a3") == 8.0
+ 
+     @skip_if_server_version_lt("2.8.9")
+-    async def test_zlexcount(self, r: valkey.Valkey):
++    async def test_zlexcount(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
+         assert await r.zlexcount("a", "-", "+") == 7
+         assert await r.zlexcount("a", "[b", "[f") == 5
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zinterstore_sum(self, r: valkey.Valkey):
++    async def test_zinterstore_sum(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
+         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -1632,7 +1636,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zinterstore_max(self, r: valkey.Valkey):
++    async def test_zinterstore_max(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
+         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -1643,7 +1647,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zinterstore_min(self, r: valkey.Valkey):
++    async def test_zinterstore_min(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         await r.zadd("b", {"a1": 2, "a2": 3, "a3": 5})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -1654,7 +1658,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zinterstore_with_weight(self, r: valkey.Valkey):
++    async def test_zinterstore_with_weight(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
+         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -1665,7 +1669,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("4.9.0")
+-    async def test_zpopmax(self, r: valkey.Valkey):
++    async def test_zpopmax(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         response = await r.zpopmax("a")
+         assert_resp_response(r, response, [(b"a3", 3)], [b"a3", 3.0])
+@@ -1677,7 +1681,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("4.9.0")
+-    async def test_zpopmin(self, r: valkey.Valkey):
++    async def test_zpopmin(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         response = await r.zpopmin("a")
+         assert_resp_response(r, response, [(b"a1", 1)], [b"a1", 1.0])
+@@ -1690,7 +1694,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("4.9.0")
+     @pytest.mark.onlynoncluster
+-    async def test_bzpopmax(self, r: valkey.Valkey):
++    async def test_bzpopmax(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2})
+         await r.zadd("b", {"b1": 10, "b2": 20})
+         assert_resp_response(
+@@ -1725,7 +1729,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("4.9.0")
+     @pytest.mark.onlynoncluster
+-    async def test_bzpopmin(self, r: valkey.Valkey):
++    async def test_bzpopmin(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2})
+         await r.zadd("b", {"b1": 10, "b2": 20})
+         assert_resp_response(
+@@ -1758,7 +1762,7 @@ class TestValkeyCommands:
+             r, await r.bzpopmin("c", timeout=1), (b"c", b"c1", 100), [b"c", b"c1", 100]
+         )
+ 
+-    async def test_zrange(self, r: valkey.Valkey):
++    async def test_zrange(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zrange("a", 0, 1) == [b"a1", b"a2"]
+         assert await r.zrange("a", 1, 2) == [b"a2", b"a3"]
+@@ -1780,7 +1784,7 @@ class TestValkeyCommands:
+         # ]
+ 
+     @skip_if_server_version_lt("2.8.9")
+-    async def test_zrangebylex(self, r: valkey.Valkey):
++    async def test_zrangebylex(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
+         assert await r.zrangebylex("a", "-", "[c") == [b"a", b"b", b"c"]
+         assert await r.zrangebylex("a", "-", "(c") == [b"a", b"b"]
+@@ -1789,7 +1793,7 @@ class TestValkeyCommands:
+         assert await r.zrangebylex("a", "-", "+", start=3, num=2) == [b"d", b"e"]
+ 
+     @skip_if_server_version_lt("2.9.9")
+-    async def test_zrevrangebylex(self, r: valkey.Valkey):
++    async def test_zrevrangebylex(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
+         assert await r.zrevrangebylex("a", "[c", "-") == [b"c", b"b", b"a"]
+         assert await r.zrevrangebylex("a", "(c", "-") == [b"b", b"a"]
+@@ -1803,7 +1807,7 @@ class TestValkeyCommands:
+         assert await r.zrevrangebylex("a", "+", "[f") == [b"g", b"f"]
+         assert await r.zrevrangebylex("a", "+", "-", start=3, num=2) == [b"d", b"c"]
+ 
+-    async def test_zrangebyscore(self, r: valkey.Valkey):
++    async def test_zrangebyscore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zrangebyscore("a", 2, 4) == [b"a2", b"a3", b"a4"]
+ 
+@@ -1830,14 +1834,14 @@ class TestValkeyCommands:
+             [[b"a2", 2], [b"a3", 3], [b"a4", 4]],
+         )
+ 
+-    async def test_zrank(self, r: valkey.Valkey):
++    async def test_zrank(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zrank("a", "a1") == 0
+         assert await r.zrank("a", "a2") == 1
+         assert await r.zrank("a", "a6") is None
+ 
+     @skip_if_server_version_lt("7.2.0")
+-    async def test_zrank_withscore(self, r: valkey.Valkey):
++    async def test_zrank_withscore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zrank("a", "a1") == 0
+         assert await r.zrank("a", "a2") == 1
+@@ -1847,20 +1851,20 @@ class TestValkeyCommands:
+         )
+         assert await r.zrank("a", "a6", withscore=True) is None
+ 
+-    async def test_zrem(self, r: valkey.Valkey):
++    async def test_zrem(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zrem("a", "a2") == 1
+         assert await r.zrange("a", 0, -1) == [b"a1", b"a3"]
+         assert await r.zrem("a", "b") == 0
+         assert await r.zrange("a", 0, -1) == [b"a1", b"a3"]
+ 
+-    async def test_zrem_multiple_keys(self, r: valkey.Valkey):
++    async def test_zrem_multiple_keys(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zrem("a", "a1", "a2") == 2
+         assert await r.zrange("a", 0, 5) == [b"a3"]
+ 
+     @skip_if_server_version_lt("2.8.9")
+-    async def test_zremrangebylex(self, r: valkey.Valkey):
++    async def test_zremrangebylex(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a": 0, "b": 0, "c": 0, "d": 0, "e": 0, "f": 0, "g": 0})
+         assert await r.zremrangebylex("a", "-", "[c") == 3
+         assert await r.zrange("a", 0, -1) == [b"d", b"e", b"f", b"g"]
+@@ -1869,19 +1873,19 @@ class TestValkeyCommands:
+         assert await r.zremrangebylex("a", "[h", "+") == 0
+         assert await r.zrange("a", 0, -1) == [b"d", b"e"]
+ 
+-    async def test_zremrangebyrank(self, r: valkey.Valkey):
++    async def test_zremrangebyrank(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zremrangebyrank("a", 1, 3) == 3
+         assert await r.zrange("a", 0, 5) == [b"a1", b"a5"]
+ 
+-    async def test_zremrangebyscore(self, r: valkey.Valkey):
++    async def test_zremrangebyscore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zremrangebyscore("a", 2, 4) == 3
+         assert await r.zrange("a", 0, -1) == [b"a1", b"a5"]
+         assert await r.zremrangebyscore("a", 2, 4) == 0
+         assert await r.zrange("a", 0, -1) == [b"a1", b"a5"]
+ 
+-    async def test_zrevrange(self, r: valkey.Valkey):
++    async def test_zrevrange(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zrevrange("a", 0, 1) == [b"a3", b"a2"]
+         assert await r.zrevrange("a", 1, 2) == [b"a2", b"a1"]
+@@ -1902,7 +1906,7 @@ class TestValkeyCommands:
+             r, response, [(b"a3", 3), (b"a2", 2)], [[b"a3", 3], [b"a2", 2]]
+         )
+ 
+-    async def test_zrevrangebyscore(self, r: valkey.Valkey):
++    async def test_zrevrangebyscore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zrevrangebyscore("a", 4, 2) == [b"a4", b"a3", b"a2"]
+ 
+@@ -1929,14 +1933,14 @@ class TestValkeyCommands:
+             [[b"a4", 4], [b"a3", 3], [b"a2", 2]],
+         )
+ 
+-    async def test_zrevrank(self, r: valkey.Valkey):
++    async def test_zrevrank(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zrevrank("a", "a1") == 4
+         assert await r.zrevrank("a", "a2") == 3
+         assert await r.zrevrank("a", "a6") is None
+ 
+     @skip_if_server_version_lt("7.2.0")
+-    async def test_zrevrank_withscore(self, r: valkey.Valkey):
++    async def test_zrevrank_withscore(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert await r.zrevrank("a", "a1") == 4
+         assert await r.zrevrank("a", "a2") == 3
+@@ -1946,14 +1950,14 @@ class TestValkeyCommands:
+         )
+         assert await r.zrevrank("a", "a6", withscore=True) is None
+ 
+-    async def test_zscore(self, r: valkey.Valkey):
++    async def test_zscore(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         assert await r.zscore("a", "a1") == 1.0
+         assert await r.zscore("a", "a2") == 2.0
+         assert await r.zscore("a", "a4") is None
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zunionstore_sum(self, r: valkey.Valkey):
++    async def test_zunionstore_sum(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
+         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -1967,7 +1971,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zunionstore_max(self, r: valkey.Valkey):
++    async def test_zunionstore_max(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
+         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -1981,7 +1985,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zunionstore_min(self, r: valkey.Valkey):
++    async def test_zunionstore_min(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 2, "a3": 3})
+         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 4})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -1995,7 +1999,7 @@ class TestValkeyCommands:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_zunionstore_with_weight(self, r: valkey.Valkey):
++    async def test_zunionstore_with_weight(self, r: valkey.asyncio.Valkey[str]):
+         await r.zadd("a", {"a1": 1, "a2": 1, "a3": 1})
+         await r.zadd("b", {"a1": 2, "a2": 2, "a3": 2})
+         await r.zadd("c", {"a1": 6, "a3": 5, "a4": 4})
+@@ -2010,7 +2014,7 @@ class TestValkeyCommands:
+ 
+     # HYPERLOGLOG TESTS
+     @skip_if_server_version_lt("2.8.9")
+-    async def test_pfadd(self, r: valkey.Valkey):
++    async def test_pfadd(self, r: valkey.asyncio.Valkey[str]):
+         members = {b"1", b"2", b"3"}
+         assert await r.pfadd("a", *members) == 1
+         assert await r.pfadd("a", *members) == 0
+@@ -2018,18 +2022,18 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("2.8.9")
+     @pytest.mark.onlynoncluster
+-    async def test_pfcount(self, r: valkey.Valkey):
++    async def test_pfcount(self, r: valkey.asyncio.Valkey[str]):
+         members = {b"1", b"2", b"3"}
+         await r.pfadd("a", *members)
+         assert await r.pfcount("a") == len(members)
+         members_b = {b"2", b"3", b"4"}
+         await r.pfadd("b", *members_b)
+         assert await r.pfcount("b") == len(members_b)
+-        assert await r.pfcount("a", "b") == len(members_b.union(members))
++        assert await r.pfcount("a", "b") == len(members_b.union(members))  # type: ignore[call-arg]
+ 
+     @skip_if_server_version_lt("2.8.9")
+     @pytest.mark.onlynoncluster
+-    async def test_pfmerge(self, r: valkey.Valkey):
++    async def test_pfmerge(self, r: valkey.asyncio.Valkey[str]):
+         mema = {b"1", b"2", b"3"}
+         memb = {b"2", b"3", b"4"}
+         memc = {b"5", b"6", b"7"}
+@@ -2042,7 +2046,7 @@ class TestValkeyCommands:
+         assert await r.pfcount("d") == 7
+ 
+     # HASH COMMANDS
+-    async def test_hget_and_hset(self, r: valkey.Valkey):
++    async def test_hget_and_hset(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
+         assert await r.hget("a", "1") == b"1"
+         assert await r.hget("a", "2") == b"2"
+@@ -2060,10 +2064,10 @@ class TestValkeyCommands:
+         assert await r.hget("a", "b") is None
+ 
+         # keys with bool(key) == False
+-        assert await r.hset("a", 0, 10) == 1
++        assert await r.hset("a", 0, 10) == 1  # type: ignore[call-overload]
+         assert await r.hset("a", "", 10) == 1
+ 
+-    async def test_hset_with_multi_key_values(self, r: valkey.Valkey):
++    async def test_hset_with_multi_key_values(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
+         assert await r.hget("a", "1") == b"1"
+         assert await r.hget("a", "2") == b"2"
+@@ -2074,94 +2078,94 @@ class TestValkeyCommands:
+         assert await r.hget("b", "2") == b"2"
+         assert await r.hget("b", "foo") == b"bar"
+ 
+-    async def test_hset_without_data(self, r: valkey.Valkey):
++    async def test_hset_without_data(self, r: valkey.asyncio.Valkey[str]):
+         with pytest.raises(exceptions.DataError):
+-            await r.hset("x")
++            await r.hset("x")  # type: ignore[call-overload]
+ 
+-    async def test_hdel(self, r: valkey.Valkey):
++    async def test_hdel(self, r: valkey.asyncio.Valkey[str]):
+         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
+         assert await r.hdel("a", "2") == 1
+         assert await r.hget("a", "2") is None
+         assert await r.hdel("a", "1", "3") == 2
+         assert await r.hlen("a") == 0
+ 
+-    async def test_hexists(self, r: valkey.Valkey):
++    async def test_hexists(self, r: valkey.asyncio.Valkey[str]):
+         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
+         assert await r.hexists("a", "1")
+         assert not await r.hexists("a", "4")
+ 
+-    async def test_hgetall(self, r: valkey.Valkey):
++    async def test_hgetall(self, r: valkey.asyncio.Valkey[bytes]):
+         h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"}
+-        await r.hset("a", mapping=h)
++        await r.hset("a", mapping=h)  # type: ignore[arg-type]
+         assert await r.hgetall("a") == h
+ 
+-    async def test_hincrby(self, r: valkey.Valkey):
++    async def test_hincrby(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.hincrby("a", "1") == 1
+         assert await r.hincrby("a", "1", amount=2) == 3
+         assert await r.hincrby("a", "1", amount=-2) == 1
+ 
+     @skip_if_server_version_lt("2.6.0")
+-    async def test_hincrbyfloat(self, r: valkey.Valkey):
++    async def test_hincrbyfloat(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.hincrbyfloat("a", "1") == 1.0
+         assert await r.hincrbyfloat("a", "1") == 2.0
+         assert await r.hincrbyfloat("a", "1", 1.2) == 3.2
+ 
+-    async def test_hkeys(self, r: valkey.Valkey):
++    async def test_hkeys(self, r: valkey.asyncio.Valkey[bytes]):
+         h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"}
+-        await r.hset("a", mapping=h)
++        await r.hset("a", mapping=h)  # type: ignore[arg-type]
+         local_keys = list(h.keys())
+         remote_keys = await r.hkeys("a")
+         assert sorted(local_keys) == sorted(remote_keys)
+ 
+-    async def test_hlen(self, r: valkey.Valkey):
++    async def test_hlen(self, r: valkey.asyncio.Valkey[str]):
+         await r.hset("a", mapping={"1": 1, "2": 2, "3": 3})
+         assert await r.hlen("a") == 3
+ 
+-    async def test_hmget(self, r: valkey.Valkey):
++    async def test_hmget(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.hset("a", mapping={"a": 1, "b": 2, "c": 3})
+         assert await r.hmget("a", "a", "b", "c") == [b"1", b"2", b"3"]
+ 
+-    async def test_hmset(self, r: valkey.Valkey):
++    async def test_hmset(self, r: valkey.asyncio.Valkey[bytes]):
+         warning_message = (
+             r"^Valkey(?:Cluster)*\.hmset\(\) is deprecated\. "
+             r"Use Valkey(?:Cluster)*\.hset\(\) instead\.$"
+         )
+         h = {b"a": b"1", b"b": b"2", b"c": b"3"}
+         with pytest.warns(DeprecationWarning, match=warning_message):
+-            assert await r.hmset("a", h)
++            assert await r.hmset("a", h) # type: ignore[arg-type]
+         assert await r.hgetall("a") == h
+ 
+-    async def test_hsetnx(self, r: valkey.Valkey):
++    async def test_hsetnx(self, r: valkey.asyncio.Valkey[bytes]):
+         # Initially set the hash field
+         assert await r.hsetnx("a", "1", 1)
+         assert await r.hget("a", "1") == b"1"
+         assert not await r.hsetnx("a", "1", 2)
+         assert await r.hget("a", "1") == b"1"
+ 
+-    async def test_hvals(self, r: valkey.Valkey):
++    async def test_hvals(self, r: valkey.asyncio.Valkey[bytes]):
+         h = {b"a1": b"1", b"a2": b"2", b"a3": b"3"}
+-        await r.hset("a", mapping=h)
++        await r.hset("a", mapping=h)  # type: ignore[arg-type]
+         local_vals = list(h.values())
+         remote_vals = await r.hvals("a")
+         assert sorted(local_vals) == sorted(remote_vals)
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_hstrlen(self, r: valkey.Valkey):
++    async def test_hstrlen(self, r: valkey.asyncio.Valkey[str]):
+         await r.hset("a", mapping={"1": "22", "2": "333"})
+         assert await r.hstrlen("a", "1") == 2
+         assert await r.hstrlen("a", "2") == 3
+ 
+     # SORT
+-    async def test_sort_basic(self, r: valkey.Valkey):
++    async def test_sort_basic(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "3", "2", "1", "4")
+         assert await r.sort("a") == [b"1", b"2", b"3", b"4"]
+ 
+-    async def test_sort_limited(self, r: valkey.Valkey):
++    async def test_sort_limited(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "3", "2", "1", "4")
+         assert await r.sort("a", start=1, num=2) == [b"2", b"3"]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_by(self, r: valkey.Valkey):
++    async def test_sort_by(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("score:1", 8)
+         await r.set("score:2", 3)
+         await r.set("score:3", 5)
+@@ -2169,7 +2173,7 @@ class TestValkeyCommands:
+         assert await r.sort("a", by="score:*") == [b"2", b"3", b"1"]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_get(self, r: valkey.Valkey):
++    async def test_sort_get(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("user:1", "u1")
+         await r.set("user:2", "u2")
+         await r.set("user:3", "u3")
+@@ -2177,7 +2181,7 @@ class TestValkeyCommands:
+         assert await r.sort("a", get="user:*") == [b"u1", b"u2", b"u3"]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_get_multi(self, r: valkey.Valkey):
++    async def test_sort_get_multi(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("user:1", "u1")
+         await r.set("user:2", "u2")
+         await r.set("user:3", "u3")
+@@ -2192,19 +2196,19 @@ class TestValkeyCommands:
+         ]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_get_groups_two(self, r: valkey.Valkey):
++    async def test_sort_get_groups_two(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("user:1", "u1")
+         await r.set("user:2", "u2")
+         await r.set("user:3", "u3")
+         await r.rpush("a", "2", "3", "1")
+-        assert await r.sort("a", get=("user:*", "#"), groups=True) == [
++        assert await r.sort("a", get=("user:*", "#"), groups=True) == [  # type: ignore[comparison-overlap]
+             (b"u1", b"1"),
+             (b"u2", b"2"),
+             (b"u3", b"3"),
+         ]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_groups_string_get(self, r: valkey.Valkey):
++    async def test_sort_groups_string_get(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("user:1", "u1")
+         await r.set("user:2", "u2")
+         await r.set("user:3", "u3")
+@@ -2213,7 +2217,7 @@ class TestValkeyCommands:
+             await r.sort("a", get="user:*", groups=True)
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_groups_just_one_get(self, r: valkey.Valkey):
++    async def test_sort_groups_just_one_get(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("user:1", "u1")
+         await r.set("user:2", "u2")
+         await r.set("user:3", "u3")
+@@ -2221,7 +2225,7 @@ class TestValkeyCommands:
+         with pytest.raises(exceptions.DataError):
+             await r.sort("a", get=["user:*"], groups=True)
+ 
+-    async def test_sort_groups_no_get(self, r: valkey.Valkey):
++    async def test_sort_groups_no_get(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("user:1", "u1")
+         await r.set("user:2", "u2")
+         await r.set("user:3", "u3")
+@@ -2230,7 +2234,7 @@ class TestValkeyCommands:
+             await r.sort("a", groups=True)
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_groups_three_gets(self, r: valkey.Valkey):
++    async def test_sort_groups_three_gets(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("user:1", "u1")
+         await r.set("user:2", "u2")
+         await r.set("user:3", "u3")
+@@ -2238,28 +2242,28 @@ class TestValkeyCommands:
+         await r.set("door:2", "d2")
+         await r.set("door:3", "d3")
+         await r.rpush("a", "2", "3", "1")
+-        assert await r.sort("a", get=("user:*", "door:*", "#"), groups=True) == [
++        assert await r.sort("a", get=("user:*", "door:*", "#"), groups=True) == [  # type: ignore[comparison-overlap]
+             (b"u1", b"d1", b"1"),
+             (b"u2", b"d2", b"2"),
+             (b"u3", b"d3", b"3"),
+         ]
+ 
+-    async def test_sort_desc(self, r: valkey.Valkey):
++    async def test_sort_desc(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "2", "3", "1")
+         assert await r.sort("a", desc=True) == [b"3", b"2", b"1"]
+ 
+-    async def test_sort_alpha(self, r: valkey.Valkey):
++    async def test_sort_alpha(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "e", "c", "b", "d", "a")
+         assert await r.sort("a", alpha=True) == [b"a", b"b", b"c", b"d", b"e"]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_store(self, r: valkey.Valkey):
++    async def test_sort_store(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.rpush("a", "2", "3", "1")
+         assert await r.sort("a", store="sorted_values") == 3
+         assert await r.lrange("sorted_values", 0, -1) == [b"1", b"2", b"3"]
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_sort_all_options(self, r: valkey.Valkey):
++    async def test_sort_all_options(self, r: valkey.asyncio.Valkey[bytes]):
+         await r.set("user:1:username", "zeus")
+         await r.set("user:2:username", "titan")
+         await r.set("user:3:username", "hermes")
+@@ -2297,7 +2301,7 @@ class TestValkeyCommands:
+             b"apple juice",
+         ]
+ 
+-    async def test_sort_issue_924(self, r: valkey.Valkey):
++    async def test_sort_issue_924(self, r: valkey.asyncio.Valkey[str]):
+         # Tests for issue https://github.com/andymccurdy/redis-py/issues/924
+         await r.execute_command("SADD", "issue#924", 1)
+         await r.execute_command("SORT", "issue#924")
+@@ -2374,12 +2378,12 @@ class TestValkeyCommands:
+     @skip_if_server_version_lt("3.0.0")
+     @skip_if_server_version_gte("7.0.0")
+     @pytest.mark.onlynoncluster
+-    async def test_readwrite(self, r: valkey.Valkey):
++    async def test_readwrite(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.readwrite()
+ 
+     @skip_if_server_version_lt("3.0.0")
+     @pytest.mark.onlynoncluster
+-    async def test_readonly(self, r: valkey.Valkey, valkey_version: Version):
++    async def test_readonly(self, r: valkey.asyncio.Valkey[str], valkey_version: Version):
+         # NOTE: Valkey 8.0.0 changes the behaviour of READONLY
+         # See https://github.com/valkey-io/valkey/pull/325
+         if valkey_version < Version("8.0.0"):
+@@ -2395,7 +2399,7 @@ class TestValkeyCommands:
+ 
+     # GEO COMMANDS
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geoadd(self, r: valkey.Valkey):
++    async def test_geoadd(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2406,12 +2410,12 @@ class TestValkeyCommands:
+         assert await r.zcard("barcelona") == 2
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geoadd_invalid_params(self, r: valkey.Valkey):
++    async def test_geoadd_invalid_params(self, r: valkey.asyncio.Valkey[str]):
+         with pytest.raises(exceptions.ValkeyError):
+             await r.geoadd("barcelona", (1, 2))
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geodist(self, r: valkey.Valkey):
++    async def test_geodist(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2422,7 +2426,7 @@ class TestValkeyCommands:
+         assert await r.geodist("barcelona", "place1", "place2") == 3067.4157
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geodist_units(self, r: valkey.Valkey):
++    async def test_geodist_units(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2433,18 +2437,18 @@ class TestValkeyCommands:
+         assert await r.geodist("barcelona", "place1", "place2", "km") == 3.0674
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geodist_missing_one_member(self, r: valkey.Valkey):
++    async def test_geodist_missing_one_member(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1")
+         await r.geoadd("barcelona", values)
+         assert await r.geodist("barcelona", "place1", "missing_member", "km") is None
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geodist_invalid_units(self, r: valkey.Valkey):
++    async def test_geodist_invalid_units(self, r: valkey.asyncio.Valkey[str]):
+         with pytest.raises(exceptions.ValkeyError):
+             assert await r.geodist("x", "y", "z", "inches")
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geohash(self, r: valkey.Valkey):
++    async def test_geohash(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2460,7 +2464,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_geopos(self, r: valkey.Valkey):
++    async def test_geopos(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2483,16 +2487,16 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("4.0.0")
+-    async def test_geopos_no_value(self, r: valkey.Valkey):
++    async def test_geopos_no_value(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.geopos("barcelona", "place1", "place2") == [None, None]
+ 
+     @skip_if_server_version_lt("3.2.0")
+     @skip_if_server_version_gte("4.0.0")
+-    async def test_old_geopos_no_value(self, r: valkey.Valkey):
++    async def test_old_geopos_no_value(self, r: valkey.asyncio.Valkey[str]):
+         assert await r.geopos("barcelona", "place1", "place2") == []
+ 
+     @skip_if_server_version_lt("6.2.0")
+-    async def test_geosearch(self, r: valkey.Valkey):
++    async def test_geosearch(self, r: valkey.asyncio.Valkey[str]):
+         values = (
+             (2.1909389952632, 41.433791470673, "place1")
+             + (2.1873744593677, 41.406342043777, b"\x80place2")
+@@ -2520,13 +2524,13 @@ class TestValkeyCommands:
+             "barcelona", member="place3", radius=100, unit="km", count=2
+         ) == [b"place3", b"\x80place2"]
+         search_res = await r.geosearch(
+-            "barcelona", member="place3", radius=100, unit="km", count=1, any=1
++            "barcelona", member="place3", radius=100, unit="km", count=1, any=True
+         )
+         assert search_res[0] in [b"place1", b"place3", b"\x80place2"]
+ 
+     @skip_unless_arch_bits(64)
+     @skip_if_server_version_lt("6.2.0")
+-    async def test_geosearch_member(self, r: valkey.Valkey):
++    async def test_geosearch_member(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2564,7 +2568,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("6.2.0")
+-    async def test_geosearch_sort(self, r: valkey.Valkey):
++    async def test_geosearch_sort(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2603,9 +2607,9 @@ class TestValkeyCommands:
+     )
+     async def test_geosearch_with(
+         self,
+-        r: valkey.Valkey,
+-        geosearch_kwargs: Dict[str, Any],
+-        expected_geosearch_result: List[Any],
++        r: valkey.asyncio.Valkey[str],
++        geosearch_kwargs: dict[str, Any],
++        expected_geosearch_result: list[Any],
+     ):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+@@ -2646,7 +2650,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("6.2.0")
+-    async def test_geosearch_negative(self, r: valkey.Valkey):
++    async def test_geosearch_negative(self, r: valkey.asyncio.Valkey[str]):
+         # not specifying member nor longitude and latitude
+         with pytest.raises(exceptions.DataError):
+             assert await r.geosearch("barcelona")
+@@ -2689,11 +2693,11 @@ class TestValkeyCommands:
+ 
+         # use any without count
+         with pytest.raises(exceptions.DataError):
+-            assert await r.geosearch("barcelona", member="place3", radius=100, any=1)
++            assert await r.geosearch("barcelona", member="place3", radius=100, any=True)
+ 
+     @pytest.mark.onlynoncluster
+     @skip_if_server_version_lt("6.2.0")
+-    async def test_geosearchstore(self, r: valkey.Valkey):
++    async def test_geosearchstore(self, r: valkey.asyncio.Valkey[bytes]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2713,7 +2717,7 @@ class TestValkeyCommands:
+     @pytest.mark.onlynoncluster
+     @skip_unless_arch_bits(64)
+     @skip_if_server_version_lt("6.2.0")
+-    async def test_geosearchstore_dist(self, r: valkey.Valkey):
++    async def test_geosearchstore_dist(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2731,10 +2735,11 @@ class TestValkeyCommands:
+         )
+         # instead of save the geo score, the distance is saved.
+         score = await r.zscore("places_barcelona", "place1")
++        assert score is not None
+         assert math.isclose(score, 88.05060698409301)
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_georadius(self, r: valkey.Valkey):
++    async def test_georadius(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2746,7 +2751,7 @@ class TestValkeyCommands:
+         assert await r.georadius("barcelona", 2.187, 41.406, 1000) == [b"\x80place2"]
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_georadius_no_values(self, r: valkey.Valkey):
++    async def test_georadius_no_values(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2757,7 +2762,7 @@ class TestValkeyCommands:
+         assert await r.georadius("barcelona", 1, 2, 1000) == []
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_georadius_units(self, r: valkey.Valkey):
++    async def test_georadius_units(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2793,7 +2798,7 @@ class TestValkeyCommands:
+         ],
+     )
+     async def test_georadius_with(
+-        self, r: valkey.Valkey, georadius_kwargs, expected_georadius_result
++        self, r: valkey.asyncio.Valkey[str], georadius_kwargs, expected_georadius_result
+     ):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+@@ -2837,7 +2842,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_georadius_count(self, r: valkey.Valkey):
++    async def test_georadius_count(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2850,7 +2855,7 @@ class TestValkeyCommands:
+         ]
+ 
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_georadius_sort(self, r: valkey.Valkey):
++    async def test_georadius_sort(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2869,7 +2874,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("3.2.0")
+     @pytest.mark.onlynoncluster
+-    async def test_georadius_store(self, r: valkey.Valkey):
++    async def test_georadius_store(self, r: valkey.asyncio.Valkey[bytes]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2883,7 +2888,7 @@ class TestValkeyCommands:
+     @skip_unless_arch_bits(64)
+     @skip_if_server_version_lt("3.2.0")
+     @pytest.mark.onlynoncluster
+-    async def test_georadius_store_dist(self, r: valkey.Valkey):
++    async def test_georadius_store_dist(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2896,11 +2901,11 @@ class TestValkeyCommands:
+         )
+         # instead of save the geo score, the distance is saved.
+         z_score = await r.zscore("places_barcelona", "place1")
+-        assert math.isclose(z_score, 88.05060698409301)
++        assert math.isclose(z_score, 88.05060698409301)  # type: ignore[arg-type]
+ 
+     @skip_unless_arch_bits(64)
+     @skip_if_server_version_lt("3.2.0")
+-    async def test_georadiusmember(self, r: valkey.Valkey):
++    async def test_georadiusmember(self, r: valkey.asyncio.Valkey[str]):
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+@@ -2933,7 +2938,7 @@ class TestValkeyCommands:
+         ]
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xack(self, r: valkey.Valkey):
++    async def test_xack(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         consumer = "consumer"
+@@ -2954,7 +2959,7 @@ class TestValkeyCommands:
+         assert await r.xack(stream, group, m2, m3) == 2
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xadd(self, r: valkey.Valkey):
++    async def test_xadd(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         message_id = await r.xadd(stream, {"foo": "bar"})
+         assert re.match(rb"[0-9]+\-[0-9]+", message_id)
+@@ -2968,7 +2973,7 @@ class TestValkeyCommands:
+         assert await r.xlen(stream) == 2
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xclaim(self, r: valkey.Valkey):
++    async def test_xclaim(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         consumer1 = "consumer1"
+@@ -3006,7 +3011,7 @@ class TestValkeyCommands:
+         ) == [message_id]
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    async def test_xclaim_trimmed(self, r: valkey.Valkey):
++    async def test_xclaim_trimmed(self, r: valkey.asyncio.Valkey[str]):
+         # xclaim should not raise an exception if the item is not there
+         stream = "stream"
+         group = "group"
+@@ -3030,7 +3035,7 @@ class TestValkeyCommands:
+         assert item[0][0] == sid2
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xdel(self, r: valkey.Valkey):
++    async def test_xdel(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+ 
+         # deleting from an empty stream doesn't do anything
+@@ -3045,7 +3050,7 @@ class TestValkeyCommands:
+         assert await r.xdel(stream, m2, m3) == 2
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    async def test_xgroup_create(self, r: valkey.Valkey):
++    async def test_xgroup_create(self, r: valkey.asyncio.Valkey[str]):
+         # tests xgroup_create and xinfo_groups
+         stream = "stream"
+         group = "group"
+@@ -3068,7 +3073,7 @@ class TestValkeyCommands:
+         assert await r.xinfo_groups(stream) == expected
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    async def test_xgroup_create_mkstream(self, r: valkey.Valkey):
++    async def test_xgroup_create_mkstream(self, r: valkey.asyncio.Valkey[str]):
+         # tests xgroup_create and xinfo_groups
+         stream = "stream"
+         group = "group"
+@@ -3094,7 +3099,7 @@ class TestValkeyCommands:
+         assert await r.xinfo_groups(stream) == expected
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xgroup_delconsumer(self, r: valkey.Valkey):
++    async def test_xgroup_delconsumer(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         consumer = "consumer"
+@@ -3112,7 +3117,7 @@ class TestValkeyCommands:
+         assert await r.xgroup_delconsumer(stream, group, consumer) == 2
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xgroup_destroy(self, r: valkey.Valkey):
++    async def test_xgroup_destroy(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         await r.xadd(stream, {"foo": "bar"})
+@@ -3124,7 +3129,7 @@ class TestValkeyCommands:
+         assert await r.xgroup_destroy(stream, group)
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    async def test_xgroup_setid(self, r: valkey.Valkey):
++    async def test_xgroup_setid(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         message_id = await r.xadd(stream, {"foo": "bar"})
+@@ -3145,7 +3150,7 @@ class TestValkeyCommands:
+         assert await r.xinfo_groups(stream) == expected
+ 
+     @skip_if_server_version_lt("7.2.0")
+-    async def test_xinfo_consumers(self, r: valkey.Valkey):
++    async def test_xinfo_consumers(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         consumer1 = "consumer1"
+@@ -3172,7 +3177,7 @@ class TestValkeyCommands:
+         assert info == expected
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xinfo_stream(self, r: valkey.Valkey):
++    async def test_xinfo_stream(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         m1 = await r.xadd(stream, {"foo": "bar"})
+         m2 = await r.xadd(stream, {"foo": "bar"})
+@@ -3189,7 +3194,7 @@ class TestValkeyCommands:
+         assert info["last-entry"] is None
+ 
+     @skip_if_server_version_lt("6.0.0")
+-    async def test_xinfo_stream_full(self, r: valkey.Valkey):
++    async def test_xinfo_stream_full(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+ 
+@@ -3208,7 +3213,7 @@ class TestValkeyCommands:
+         assert isinstance(consumer, dict)
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xlen(self, r: valkey.Valkey):
++    async def test_xlen(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         assert await r.xlen(stream) == 0
+         await r.xadd(stream, {"foo": "bar"})
+@@ -3216,7 +3221,7 @@ class TestValkeyCommands:
+         assert await r.xlen(stream) == 2
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xpending(self, r: valkey.Valkey):
++    async def test_xpending(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         consumer1 = "consumer1"
+@@ -3226,7 +3231,7 @@ class TestValkeyCommands:
+         await r.xgroup_create(stream, group, 0)
+ 
+         # xpending on a group that has no consumers yet
+-        expected = {"pending": 0, "min": None, "max": None, "consumers": []}
++        expected: dict[str, int | None | list[Any]] = {"pending": 0, "min": None, "max": None, "consumers": []}
+         assert await r.xpending(stream, group) == expected
+ 
+         # read 1 message from the group with each consumer
+@@ -3245,7 +3250,7 @@ class TestValkeyCommands:
+         assert await r.xpending(stream, group) == expected
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xpending_range(self, r: valkey.Valkey):
++    async def test_xpending_range(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         consumer1 = "consumer1"
+@@ -3269,7 +3274,7 @@ class TestValkeyCommands:
+         assert response[1]["consumer"] == consumer2.encode()
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xrange(self, r: valkey.Valkey):
++    async def test_xrange(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         m1 = await r.xadd(stream, {"foo": "bar"})
+         m2 = await r.xadd(stream, {"foo": "bar"})
+@@ -3292,7 +3297,7 @@ class TestValkeyCommands:
+         assert get_ids(results) == [m1]
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xread(self, r: valkey.Valkey):
++    async def test_xread(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         m1 = await r.xadd(stream, {"foo": "bar"})
+         m2 = await r.xadd(stream, {"bing": "baz"})
+@@ -3323,7 +3328,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xreadgroup(self, r: valkey.Valkey):
++    async def test_xreadgroup(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         consumer = "consumer"
+@@ -3390,7 +3395,7 @@ class TestValkeyCommands:
+         )
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xrevrange(self, r: valkey.Valkey):
++    async def test_xrevrange(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+         m1 = await r.xadd(stream, {"foo": "bar"})
+         m2 = await r.xadd(stream, {"foo": "bar"})
+@@ -3413,7 +3418,7 @@ class TestValkeyCommands:
+         assert get_ids(results) == [m4]
+ 
+     @skip_if_server_version_lt("5.0.0")
+-    async def test_xtrim(self, r: valkey.Valkey):
++    async def test_xtrim(self, r: valkey.asyncio.Valkey[str]):
+         stream = "stream"
+ 
+         # trimming an empty key doesn't do anything
+@@ -3432,7 +3437,7 @@ class TestValkeyCommands:
+         assert await r.xtrim(stream, 3, approximate=False) == 1
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_bitfield_operations(self, r: valkey.Valkey):
++    async def test_bitfield_operations(self, r: valkey.asyncio.Valkey[str]):
+         # comments show affected bits
+         await r.execute_command("SELECT", 10)
+         bf = r.bitfield("a")
+@@ -3502,7 +3507,7 @@ class TestValkeyCommands:
+         assert resp == [0, None, 255]
+ 
+     @skip_if_server_version_lt("6.0.0")
+-    async def test_bitfield_ro(self, r: valkey.Valkey):
++    async def test_bitfield_ro(self, r: valkey.asyncio.Valkey[str]):
+         bf = r.bitfield("a")
+         resp = await bf.set("u8", 8, 255).execute()
+         assert resp == [0]
+@@ -3515,7 +3520,7 @@ class TestValkeyCommands:
+         assert resp == [0, 15, 15, 14]
+ 
+     @skip_if_server_version_lt("4.0.0")
+-    async def test_memory_stats(self, r: valkey.Valkey):
++    async def test_memory_stats(self, r: valkey.asyncio.Valkey[str]):
+         # put a key into the current db to make sure that "db.<current-db>"
+         # has data
+         await r.set("foo", "bar")
+@@ -3526,18 +3531,18 @@ class TestValkeyCommands:
+                 assert not isinstance(value, list)
+ 
+     @skip_if_server_version_lt("4.0.0")
+-    async def test_memory_usage(self, r: valkey.Valkey):
++    async def test_memory_usage(self, r: valkey.asyncio.Valkey[str]):
+         await r.set("foo", "bar")
+         assert isinstance(await r.memory_usage("foo"), int)
+ 
+     @skip_if_server_version_lt("4.0.0")
+-    async def test_module_list(self, r: valkey.Valkey):
++    async def test_module_list(self, r: valkey.asyncio.Valkey[str]):
+         assert isinstance(await r.module_list(), list)
+         for x in await r.module_list():
+             assert isinstance(x, dict)
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_interrupted_command(self, r: valkey.Valkey):
++    async def test_interrupted_command(self, r: valkey.asyncio.Valkey[str]):
+         """
+         Regression test for issue #1128:  An Un-handled BaseException
+         will leave the socket with un-read response to a previous
+@@ -3554,7 +3559,7 @@ class TestValkeyCommands:
+             # because the timeout won't catch its Cancelled Error if the task
+             # has a pending cancel.  Python documentation probably should reflect this.
+             if sys.version_info >= (3, 11):
+-                asyncio.current_task().uncancel()
++                asyncio.current_task().uncancel()  # type: ignore[union-attr]
+             # if all is well, we can continue.  The following should not hang.
+             await r.set("status", "down")
+ 
+@@ -3570,7 +3575,7 @@ class TestValkeyCommands:
+ 
+ @pytest.mark.onlynoncluster
+ class TestBinarySave:
+-    async def test_binary_get_set(self, r: valkey.Valkey):
++    async def test_binary_get_set(self, r: valkey.asyncio.Valkey[bytes]):
+         assert await r.set(" foo bar ", "123")
+         assert await r.get(" foo bar ") == b"123"
+ 
+@@ -3590,7 +3595,7 @@ class TestBinarySave:
+         assert await r.delete(" foo\r\nbar\r\n ")
+         assert await r.delete(" \r\n\t\x07\x13 ")
+ 
+-    async def test_binary_lists(self, r: valkey.Valkey):
++    async def test_binary_lists(self, r: valkey.asyncio.Valkey[bytes]):
+         mapping = {
+             b"foo bar": [b"1", b"2", b"3"],
+             b"foo\r\nbar\r\n": [b"4", b"5", b"6"],
+@@ -3607,7 +3612,7 @@ class TestBinarySave:
+         for key, value in mapping.items():
+             assert await r.lrange(key, 0, -1) == value
+ 
+-    async def test_22_info(self, r: valkey.Valkey):
++    async def test_22_info(self, r: valkey.asyncio.Valkey[str]):
+         info = (
+             "allocation_stats:6=1,7=1,8=7141,9=180,10=92,11=116,12=5330,"
+             "13=123,14=3091,15=11048,16=225842,17=1784,18=814,19=12020,"
+@@ -3639,14 +3644,14 @@ class TestBinarySave:
+         assert "6" in parsed["allocation_stats"]
+         assert ">=256" in parsed["allocation_stats"]
+ 
+-    async def test_large_responses(self, r: valkey.Valkey):
++    async def test_large_responses(self, r: valkey.asyncio.Valkey[bytes]):
+         """The PythonParser has some special cases for return values > 1MB"""
+         # load up 5MB of data into a key
+         data = "".join([ascii_letters] * (5000000 // len(ascii_letters)))
+         await r.set("a", data)
+         assert await r.get("a") == data.encode()
+ 
+-    async def test_floating_point_encoding(self, r: valkey.Valkey):
++    async def test_floating_point_encoding(self, r: valkey.asyncio.Valkey[str]):
+         """
+         High precision floating point values sent to the server should keep
+         precision.
+diff --git a/tests/test_asyncio/test_pipeline.py b/tests/test_asyncio/test_pipeline.py
+index 5021f91..cb28b0f 100644
+--- a/tests/test_asyncio/test_pipeline.py
++++ b/tests/test_asyncio/test_pipeline.py
+@@ -1,3 +1,5 @@
++from __future__ import annotations
++
+ import pytest
+ import valkey
+ from tests.conftest import skip_if_server_version_lt
+@@ -308,7 +310,7 @@ class TestPipeline:
+     async def test_transaction_callable(self, r):
+         await r.set("a", 1)
+         await r.set("b", 2)
+-        has_run = []
++        has_run: list[str] = []
+ 
+         async def my_transaction(pipe):
+             a_value = await pipe.get("a")
+diff --git a/tests/test_asyncio/test_pubsub.py b/tests/test_asyncio/test_pubsub.py
+index 8afb225..517177e 100644
+--- a/tests/test_asyncio/test_pubsub.py
++++ b/tests/test_asyncio/test_pubsub.py
+@@ -8,9 +8,9 @@ from unittest.mock import patch
+ # the functionality is available in 3.11.x but has a major issue before
+ # 3.11.3. See https://github.com/redis/redis-py/issues/2633
+ if sys.version_info >= (3, 11, 3):
+-    from asyncio import timeout as async_timeout
++    from asyncio import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found,attr-defined]
+ else:
+-    from async_timeout import timeout as async_timeout
++    from async_timeout import timeout as async_timeout  # type: ignore[unused-ignore,assignment,no-redef,import-not-found]
+ 
+ import pytest
+ import pytest_asyncio
+@@ -23,7 +23,7 @@ from valkey.utils import LIBVALKEY_AVAILABLE
+ from .compat import aclosing, create_task, mock
+ 
+ 
+-def with_timeout(t):
++def with_timeout(t: int):
+     def wrapper(corofunc):
+         @functools.wraps(corofunc)
+         async def run(*args, **kwargs):
+@@ -83,7 +83,7 @@ def make_subscribe_test_data(pubsub, type):
+ 
+ 
+ @pytest_asyncio.fixture()
+-async def pubsub(r: valkey.Valkey):
++async def pubsub(r: valkey.Valkey[bytes]):
+     async with r.pubsub() as p:
+         yield p
+ 
+@@ -214,7 +214,7 @@ class TestPubSubSubscribeUnsubscribe:
+         kwargs = make_subscribe_test_data(pubsub, "pattern")
+         await self._test_subscribed_property(**kwargs)
+ 
+-    async def test_aclosing(self, r: valkey.Valkey):
++    async def test_aclosing(self, r: valkey.Valkey[str]):
+         p = r.pubsub()
+         async with aclosing(p):
+             assert p.subscribed is False
+@@ -222,7 +222,7 @@ class TestPubSubSubscribeUnsubscribe:
+             assert p.subscribed is True
+         assert p.subscribed is False
+ 
+-    async def test_context_manager(self, r: valkey.Valkey):
++    async def test_context_manager(self, r: valkey.Valkey[str]):
+         p = r.pubsub()
+         async with p:
+             assert p.subscribed is False
+@@ -230,7 +230,7 @@ class TestPubSubSubscribeUnsubscribe:
+             assert p.subscribed is True
+         assert p.subscribed is False
+ 
+-    async def test_close_is_aclose(self, r: valkey.Valkey):
++    async def test_close_is_aclose(self, r: valkey.Valkey[str]):
+         """
+         Test backwards compatible close method
+         """
+@@ -242,7 +242,7 @@ class TestPubSubSubscribeUnsubscribe:
+             await p.close()
+         assert p.subscribed is False
+ 
+-    async def test_reset_is_aclose(self, r: valkey.Valkey):
++    async def test_reset_is_aclose(self, r: valkey.Valkey[str]):
+         """
+         Test backwards compatible reset method
+         """
+@@ -254,7 +254,7 @@ class TestPubSubSubscribeUnsubscribe:
+             await p.reset()
+         assert p.subscribed is False
+ 
+-    async def test_ignore_all_subscribe_messages(self, r: valkey.Valkey):
++    async def test_ignore_all_subscribe_messages(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+ 
+         checks = (
+@@ -347,7 +347,7 @@ class TestPubSubMessages:
+     async def async_message_handler(self, message):
+         self.async_message = message
+ 
+-    async def test_published_message_to_channel(self, r: valkey.Valkey, pubsub):
++    async def test_published_message_to_channel(self, r: valkey.Valkey[str], pubsub):
+         p = pubsub
+         await p.subscribe("foo")
+         assert await wait_for_message(p) == make_message("subscribe", "foo", 1)
+@@ -357,7 +357,7 @@ class TestPubSubMessages:
+         assert isinstance(message, dict)
+         assert message == make_message("message", "foo", "test message")
+ 
+-    async def test_published_message_to_pattern(self, r: valkey.Valkey, pubsub):
++    async def test_published_message_to_pattern(self, r: valkey.Valkey[str], pubsub):
+         p = pubsub
+         await p.subscribe("foo")
+         await p.psubscribe("f*")
+@@ -380,7 +380,7 @@ class TestPubSubMessages:
+         assert message2 in expected
+         assert message1 != message2
+ 
+-    async def test_channel_message_handler(self, r: valkey.Valkey):
++    async def test_channel_message_handler(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         await p.subscribe(foo=self.message_handler)
+         assert await wait_for_message(p) is None
+@@ -411,7 +411,7 @@ class TestPubSubMessages:
+         await p.aclose()
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_pattern_message_handler(self, r: valkey.Valkey):
++    async def test_pattern_message_handler(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         await p.psubscribe(**{"f*": self.message_handler})
+         assert await wait_for_message(p) is None
+@@ -422,7 +422,7 @@ class TestPubSubMessages:
+         )
+         await p.aclose()
+ 
+-    async def test_unicode_channel_message_handler(self, r: valkey.Valkey):
++    async def test_unicode_channel_message_handler(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         channel = "uni" + chr(4456) + "code"
+         channels = {channel: self.message_handler}
+@@ -436,7 +436,7 @@ class TestPubSubMessages:
+     @pytest.mark.onlynoncluster
+     # see: https://valkey-py-cluster.readthedocs.io/en/stable/pubsub.html
+     # #known-limitations-with-pubsub
+-    async def test_unicode_pattern_message_handler(self, r: valkey.Valkey):
++    async def test_unicode_pattern_message_handler(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         pattern = "uni" + chr(4456) + "*"
+         channel = "uni" + chr(4456) + "code"
+@@ -449,7 +449,7 @@ class TestPubSubMessages:
+         )
+         await p.aclose()
+ 
+-    async def test_get_message_without_subscribe(self, r: valkey.Valkey, pubsub):
++    async def test_get_message_without_subscribe(self, r: valkey.Valkey[str], pubsub):
+         p = pubsub
+         with pytest.raises(RuntimeError) as info:
+             await p.get_message()
+@@ -522,7 +522,7 @@ class TestPubSubAutoDecoding:
+             "punsubscribe", self.pattern, 0
+         )
+ 
+-    async def test_channel_publish(self, r: valkey.Valkey, pubsub):
++    async def test_channel_publish(self, r: valkey.Valkey[str], pubsub):
+         p = pubsub
+         await p.subscribe(self.channel)
+         assert await wait_for_message(p) == self.make_message(
+@@ -534,7 +534,7 @@ class TestPubSubAutoDecoding:
+         )
+ 
+     @pytest.mark.onlynoncluster
+-    async def test_pattern_publish(self, r: valkey.Valkey, pubsub):
++    async def test_pattern_publish(self, r: valkey.Valkey[str], pubsub):
+         p = pubsub
+         await p.psubscribe(self.pattern)
+         assert await wait_for_message(p) == self.make_message(
+@@ -545,7 +545,7 @@ class TestPubSubAutoDecoding:
+             "pmessage", self.channel, self.data, pattern=self.pattern
+         )
+ 
+-    async def test_channel_message_handler(self, r: valkey.Valkey):
++    async def test_channel_message_handler(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         await p.subscribe(**{self.channel: self.message_handler})
+         assert await wait_for_message(p) is None
+@@ -563,7 +563,7 @@ class TestPubSubAutoDecoding:
+         assert self.message == self.make_message("message", self.channel, new_data)
+         await p.aclose()
+ 
+-    async def test_pattern_message_handler(self, r: valkey.Valkey):
++    async def test_pattern_message_handler(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         await p.psubscribe(**{self.pattern: self.message_handler})
+         assert await wait_for_message(p) is None
+@@ -585,7 +585,7 @@ class TestPubSubAutoDecoding:
+         )
+         await p.aclose()
+ 
+-    async def test_context_manager(self, r: valkey.Valkey):
++    async def test_context_manager(self, r: valkey.Valkey[str]):
+         async with r.pubsub() as pubsub:
+             await pubsub.subscribe("foo")
+             assert pubsub.connection is not None
+@@ -598,7 +598,7 @@ class TestPubSubAutoDecoding:
+ 
+ @pytest.mark.onlynoncluster
+ class TestPubSubValkeyDown:
+-    async def test_channel_subscribe(self, r: valkey.Valkey):
++    async def test_channel_subscribe(self):
+         r = valkey.Valkey(host="localhost", port=6390)
+         p = r.pubsub()
+         with pytest.raises(ConnectionError):
+@@ -609,17 +609,17 @@ class TestPubSubValkeyDown:
+ class TestPubSubSubcommands:
+     @pytest.mark.onlynoncluster
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_pubsub_channels(self, r: valkey.Valkey, pubsub):
++    async def test_pubsub_channels(self, r: valkey.Valkey[bytes], pubsub):
+         p = pubsub
+         await p.subscribe("foo", "bar", "baz", "quux")
+         for i in range(4):
+             assert (await wait_for_message(p))["type"] == "subscribe"
+         expected = [b"bar", b"baz", b"foo", b"quux"]
+-        assert all([channel in await r.pubsub_channels() for channel in expected])
++        assert all([channel in await r.pubsub_channels() for channel in expected])  # type: ignore[comparison-overlap]
+ 
+     @pytest.mark.onlynoncluster
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_pubsub_numsub(self, r: valkey.Valkey):
++    async def test_pubsub_numsub(self, r: valkey.Valkey[bytes]):
+         p1 = r.pubsub()
+         await p1.subscribe("foo", "bar", "baz")
+         for i in range(3):
+@@ -633,13 +633,13 @@ class TestPubSubSubcommands:
+         assert (await wait_for_message(p3))["type"] == "subscribe"
+ 
+         channels = [(b"foo", 1), (b"bar", 2), (b"baz", 3)]
+-        assert await r.pubsub_numsub("foo", "bar", "baz") == channels
++        assert await r.pubsub_numsub("foo", "bar", "baz") == channels  # type: ignore[comparison-overlap]
+         await p1.aclose()
+         await p2.aclose()
+         await p3.aclose()
+ 
+     @skip_if_server_version_lt("2.8.0")
+-    async def test_pubsub_numpat(self, r: valkey.Valkey):
++    async def test_pubsub_numpat(self, r: valkey.Valkey[str]):
+         p = r.pubsub()
+         await p.psubscribe("*oo", "*ar", "b*z")
+         for i in range(3):
+@@ -651,7 +651,7 @@ class TestPubSubSubcommands:
+ @pytest.mark.onlynoncluster
+ class TestPubSubPings:
+     @skip_if_server_version_lt("3.0.0")
+-    async def test_send_pubsub_ping(self, r: valkey.Valkey):
++    async def test_send_pubsub_ping(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         await p.subscribe("foo")
+         await p.ping()
+@@ -661,7 +661,7 @@ class TestPubSubPings:
+         await p.aclose()
+ 
+     @skip_if_server_version_lt("3.0.0")
+-    async def test_send_pubsub_ping_message(self, r: valkey.Valkey):
++    async def test_send_pubsub_ping_message(self, r: valkey.Valkey[str]):
+         p = r.pubsub(ignore_subscribe_messages=True)
+         await p.subscribe("foo")
+         await p.ping(message="hello world")
+@@ -675,7 +675,7 @@ class TestPubSubPings:
+ class TestPubSubConnectionKilled:
+     @skip_if_server_version_lt("3.0.0")
+     async def test_connection_error_raised_when_connection_dies(
+-        self, r: valkey.Valkey, pubsub
++        self, r: valkey.Valkey[str], pubsub
+     ):
+         p = pubsub
+         await p.subscribe("foo")
+@@ -698,13 +698,13 @@ class TestPubSubTimeouts:
+ 
+ @pytest.mark.onlynoncluster
+ class TestPubSubReconnect:
+-    @with_timeout(2)
+-    async def test_reconnect_listen(self, r: valkey.Valkey, pubsub):
++    @with_timeout(2)  # type: ignore[misc]
++    async def test_reconnect_listen(self, r: valkey.Valkey[str], pubsub):
+         """
+         Test that a loop processing PubSub messages can survive
+         a disconnect, by issuing a connect() call.
+         """
+-        messages = asyncio.Queue()
++        messages = asyncio.Queue()  # type: ignore[var-annotated]
+         interrupt = False
+ 
+         async def loop():
+@@ -775,11 +775,11 @@ class TestPubSubRun:
+             ):
+                 return
+ 
+-    async def test_callbacks(self, r: valkey.Valkey, pubsub):
++    async def test_callbacks(self, r: valkey.Valkey[str], pubsub):
+         def callback(message):
+             messages.put_nowait(message)
+ 
+-        messages = asyncio.Queue()
++        messages = asyncio.Queue()  # type: ignore[var-annotated]
+         p = pubsub
+         await self._subscribe(p, foo=callback)
+         task = asyncio.get_running_loop().create_task(p.run())
+@@ -797,12 +797,12 @@ class TestPubSubRun:
+             "type": "message",
+         }
+ 
+-    async def test_exception_handler(self, r: valkey.Valkey, pubsub):
++    async def test_exception_handler(self, r: valkey.Valkey[str], pubsub):
+         def exception_handler_callback(e, pubsub) -> None:
+             assert pubsub == p
+             exceptions.put_nowait(e)
+ 
+-        exceptions = asyncio.Queue()
++        exceptions = asyncio.Queue()  # type: ignore[var-annotated]
+         p = pubsub
+         await self._subscribe(p, foo=lambda x: None)
+         with mock.patch.object(p, "get_message", side_effect=Exception("error")):
+@@ -817,11 +817,11 @@ class TestPubSubRun:
+                 pass
+         assert str(e) == "error"
+ 
+-    async def test_late_subscribe(self, r: valkey.Valkey, pubsub):
++    async def test_late_subscribe(self, r: valkey.Valkey[str], pubsub):
+         def callback(message):
+             messages.put_nowait(message)
+ 
+-        messages = asyncio.Queue()
++        messages = asyncio.Queue()  # type: ignore[var-annotated]
+         p = pubsub
+         task = asyncio.get_running_loop().create_task(p.run())
+         # wait until loop gets settled.  Add a subscription
+@@ -856,7 +856,7 @@ class TestPubSubAutoReconnect:
+     timeout = 2
+ 
+     async def mysetup(self, r, method):
+-        self.messages = asyncio.Queue()
++        self.messages = asyncio.Queue()  # type: ignore[var-annotated]
+         self.pubsub = r.pubsub()
+         # State: 0 = initial state , 1 = after disconnect, 2 = ConnectionError is seen,
+         # 3=successfully reconnected 4 = exit
+@@ -892,7 +892,7 @@ class TestPubSubAutoReconnect:
+             self.state = 4  # quit
+         await self.task
+ 
+-    async def test_reconnect_socket_error(self, r: valkey.Valkey, method):
++    async def test_reconnect_socket_error(self, r: valkey.Valkey[str], method):
+         """
+         Test that a socket error will cause reconnect
+         """
+@@ -921,7 +921,7 @@ class TestPubSubAutoReconnect:
+         finally:
+             await self.mykill()
+ 
+-    async def test_reconnect_disconnect(self, r: valkey.Valkey, method):
++    async def test_reconnect_disconnect(self, r: valkey.Valkey[str], method):
+         """
+         Test that a manual disconnect() will cause reconnect
+         """
+@@ -992,7 +992,7 @@ class TestBaseException:
+     @pytest.mark.skipif(
+         sys.version_info < (3, 8), reason="requires python 3.8 or higher"
+     )
+-    async def test_outer_timeout(self, r: valkey.Valkey):
++    async def test_outer_timeout(self, r: valkey.Valkey[str]):
+         """
+         Using asyncio_timeout manually outside the inner method timeouts works.
+         This works on Python versions 3.8 and greater, at which time asyncio.
+@@ -1026,7 +1026,7 @@ class TestBaseException:
+     @pytest.mark.skipif(
+         sys.version_info < (3, 8), reason="requires python 3.8 or higher"
+     )
+-    async def test_base_exception(self, r: valkey.Valkey):
++    async def test_base_exception(self, r: valkey.Valkey[str]):
+         """
+         Manually trigger a BaseException inside the parser's .read_response method
+         and verify that it isn't caught
+diff --git a/tests/test_cache.py b/tests/test_cache.py
+index 6378410..25792fa 100644
+--- a/tests/test_cache.py
++++ b/tests/test_cache.py
+@@ -8,7 +8,9 @@ import valkey
+ from tests.conftest import _get_client
+ from valkey import ValkeyError
+ from valkey._cache import AbstractCache, EvictionPolicy, _LocalCache
+-from valkey.typing import KeyT, ResponseT
++
++# It is defined, just not in __all__
++from valkey.typing import KeyT, ResponseT  # type: ignore[attr-defined]
+ from valkey.utils import LIBVALKEY_AVAILABLE
+ 
+ 
+@@ -529,7 +531,7 @@ class TestSentinelLocalCache:
+ class TestCustomCache:
+     class _CustomCache(AbstractCache):
+         def __init__(self):
+-            self.responses = cachetools.LRUCache(maxsize=1000)
++            self.responses = cachetools.LRUCache(maxsize=1000)  # type: ignore[var-annotated]
+             self.keys_to_commands = defaultdict(list)
+             self.commands_to_keys = defaultdict(list)
+ 
+diff --git a/tests/test_commands.py b/tests/test_commands.py
+index ec8074f..593f2bb 100644
+--- a/tests/test_commands.py
++++ b/tests/test_commands.py
+@@ -1,3 +1,5 @@
++from __future__ import annotations
++
+ import binascii
+ import datetime
+ import math
+@@ -6,6 +8,7 @@ import threading
+ import time
+ from asyncio import CancelledError
+ from string import ascii_letters
++from typing import Any
+ from unittest import mock
+ from unittest.mock import patch
+ 
+@@ -545,7 +548,7 @@ class TestValkeyCommands:
+         assert_resp_response(r, r.client_getname(), "valkey_py_test", b"valkey_py_test")
+ 
+     @skip_if_server_version_lt("7.2.0")
+-    def test_client_setinfo(self, r: valkey.Valkey):
++    def test_client_setinfo(self, r: valkey.Valkey[str]):
+         r.ping()
+         info = r.client_info()
+         assert info["lib-name"] == "valkey-py"
+@@ -776,7 +779,7 @@ class TestValkeyCommands:
+         # assert data['maxmemory'].isdigit()
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    def test_config_get_multi_params(self, r: valkey.Valkey):
++    def test_config_get_multi_params(self, r: valkey.Valkey[str]):
+         res = r.config_get("*max-*-entries*", "maxmemory")
+         assert "maxmemory" in res
+         assert "hash-max-listpack-entries" in res
+@@ -797,7 +800,7 @@ class TestValkeyCommands:
+         assert r.config_get()["timeout"] == "0"
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    def test_config_set_multi_params(self, r: valkey.Valkey):
++    def test_config_set_multi_params(self, r: valkey.Valkey[str]):
+         r.config_set("timeout", 70, "maxmemory", 100)
+         assert r.config_get()["timeout"] == "70"
+         assert r.config_get()["maxmemory"] == "100"
+@@ -960,13 +963,13 @@ class TestValkeyCommands:
+         time.sleep(0.3)
+         assert r.bgsave(True)
+ 
+-    def test_never_decode_option(self, r: valkey.Valkey):
+-        opts = {NEVER_DECODE: []}
++    def test_never_decode_option(self, r: valkey.Valkey[str]):
++        opts: dict[str, list[Any]] = {NEVER_DECODE: []}
+         r.delete("a")
+         assert r.execute_command("EXISTS", "a", **opts) == 0
+ 
+-    def test_empty_response_option(self, r: valkey.Valkey):
+-        opts = {EMPTY_RESPONSE: []}
++    def test_empty_response_option(self, r: valkey.Valkey[str]):
++        opts: dict[str, list[Any]] = {EMPTY_RESPONSE: []}
+         r.delete("a")
+         assert r.execute_command("EXISTS", "a", **opts) == 0
+ 
+@@ -2839,7 +2842,7 @@ class TestValkeyCommands:
+         assert r.zrank("a", "a6") is None
+ 
+     @skip_if_server_version_lt("7.2.0")
+-    def test_zrank_withscore(self, r: valkey.Valkey):
++    def test_zrank_withscore(self, r: valkey.Valkey[str]):
+         r.zadd("a", {"a1": 1, "a2": 2, "a3": 3, "a4": 4, "a5": 5})
+         assert r.zrank("a", "a1") == 0
+         assert r.zrank("a", "a2") == 1
+@@ -3457,7 +3460,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("6.2.0")
+     def test_geoadd_nx(self, r):
+-        values = (2.1909389952632, 41.433791470673, "place1") + (
++        values: Any = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+             41.406342043777,
+             "place2",
+@@ -3473,7 +3476,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("6.2.0")
+     def test_geoadd_xx(self, r):
+-        values = (2.1909389952632, 41.433791470673, "place1")
++        values: Any = (2.1909389952632, 41.433791470673, "place1")
+         assert r.geoadd("a", values) == 1
+         values = (2.1909389952632, 41.433791470673, "place1") + (
+             2.1873744593677,
+@@ -3485,7 +3488,7 @@ class TestValkeyCommands:
+ 
+     @skip_if_server_version_lt("6.2.0")
+     def test_geoadd_ch(self, r):
+-        values = (2.1909389952632, 41.433791470673, "place1")
++        values: Any = (2.1909389952632, 41.433791470673, "place1")
+         assert r.geoadd("a", values) == 1
+         values = (2.1909389952632, 31.433791470673, "place1") + (
+             2.1873744593677,
+@@ -4106,7 +4109,7 @@ class TestValkeyCommands:
+         assert r.xadd(stream, {"foo": "bar"}, approximate=True, minid=m3)
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    def test_xadd_explicit_ms(self, r: valkey.Valkey):
++    def test_xadd_explicit_ms(self, r: valkey.Valkey[str]):
+         stream = "stream"
+         message_id = r.xadd(stream, {"foo": "bar"}, "9999999999999999999-*")
+         ms = message_id[: message_id.index(b"-")]
+@@ -4283,7 +4286,7 @@ class TestValkeyCommands:
+         assert r.xinfo_groups(stream) == expected
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    def test_xgroup_create_entriesread(self, r: valkey.Valkey):
++    def test_xgroup_create_entriesread(self, r: valkey.Valkey[str]):
+         stream = "stream"
+         group = "group"
+         r.xadd(stream, {"foo": "bar"})
+@@ -4462,7 +4465,7 @@ class TestValkeyCommands:
+         r.xgroup_create(stream, group, 0)
+ 
+         # xpending on a group that has no consumers yet
+-        expected = {"pending": 0, "min": None, "max": None, "consumers": []}
++        expected: dict[str, Any] = {"pending": 0, "min": None, "max": None, "consumers": []}
+         assert r.xpending(stream, group) == expected
+ 
+         # read 1 message from the group with each consumer
+@@ -4841,7 +4844,7 @@ class TestValkeyCommands:
+         assert resp == [0, None, 255]
+ 
+     @skip_if_server_version_lt("6.0.0")
+-    def test_bitfield_ro(self, r: valkey.Valkey):
++    def test_bitfield_ro(self, r: valkey.Valkey[str]):
+         bf = r.bitfield("a")
+         resp = bf.set("u8", 8, 255).execute()
+         assert resp == [0]
+@@ -4885,25 +4888,25 @@ class TestValkeyCommands:
+         assert isinstance(r.memory_usage("foo"), int)
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    def test_latency_histogram_not_implemented(self, r: valkey.Valkey):
++    def test_latency_histogram_not_implemented(self, r: valkey.Valkey[str]):
+         with pytest.raises(NotImplementedError):
+             r.latency_histogram()
+ 
+-    def test_latency_graph_not_implemented(self, r: valkey.Valkey):
++    def test_latency_graph_not_implemented(self, r: valkey.Valkey[str]):
+         with pytest.raises(NotImplementedError):
+             r.latency_graph()
+ 
+-    def test_latency_doctor_not_implemented(self, r: valkey.Valkey):
++    def test_latency_doctor_not_implemented(self, r: valkey.Valkey[str]):
+         with pytest.raises(NotImplementedError):
+             r.latency_doctor()
+ 
+-    def test_latency_history(self, r: valkey.Valkey):
++    def test_latency_history(self, r: valkey.Valkey[str]):
+         assert r.latency_history("command") == []
+ 
+-    def test_latency_latest(self, r: valkey.Valkey):
++    def test_latency_latest(self, r: valkey.Valkey[str]):
+         assert r.latency_latest() == []
+ 
+-    def test_latency_reset(self, r: valkey.Valkey):
++    def test_latency_reset(self, r: valkey.Valkey[str]):
+         assert r.latency_reset() == 0
+ 
+     @skip_if_server_version_lt("4.0.0")
+@@ -4924,7 +4927,7 @@ class TestValkeyCommands:
+             r.command_docs("set")
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    def test_command_list(self, r: valkey.Valkey):
++    def test_command_list(self, r: valkey.Valkey[str]):
+         assert len(r.command_list()) > 300
+         assert len(r.command_list(module="fakemod")) == 0
+         assert len(r.command_list(category="list")) > 15
+@@ -4963,7 +4966,7 @@ class TestValkeyCommands:
+ 
+     @pytest.mark.onlynoncluster
+     @skip_if_server_version_lt("7.0.0")
+-    def test_command_getkeysandflags(self, r: valkey.Valkey):
++    def test_command_getkeysandflags(self, r: valkey.Valkey[str]):
+         res = r.command_getkeysandflags("LMOVE", "mylist1", "mylist2", "left", "left")
+         assert res == [
+             [b"mylist1", [b"RW", b"access", b"delete"]],
+@@ -4983,7 +4986,7 @@ class TestValkeyCommands:
+ 
+     @pytest.mark.onlynoncluster
+     @skip_if_server_version_lt("7.0.0")
+-    def test_module_loadex(self, r: valkey.Valkey):
++    def test_module_loadex(self, r: valkey.Valkey[str]):
+         with pytest.raises(valkey.exceptions.ModuleError) as excinfo:
+             r.module_loadex("/some/fake/path")
+             assert "Error loading the extension." in str(excinfo.value)
+@@ -5042,14 +5045,14 @@ class TestValkeyCommands:
+             assert r.replicaof("NO ONE")
+         assert r.replicaof("NO", "ONE")
+ 
+-    def test_shutdown(self, r: valkey.Valkey):
+-        r.execute_command = mock.MagicMock()
++    def test_shutdown(self, r: valkey.Valkey[str]):
++        r.execute_command = mock.MagicMock()  # type: ignore[method-assign]
+         r.execute_command("SHUTDOWN", "NOSAVE")
+         r.execute_command.assert_called_once_with("SHUTDOWN", "NOSAVE")
+ 
+     @skip_if_server_version_lt("7.0.0")
+-    def test_shutdown_with_params(self, r: valkey.Valkey):
+-        r.execute_command = mock.MagicMock()
++    def test_shutdown_with_params(self, r: valkey.Valkey[str]):
++        r.execute_command = mock.MagicMock()  # type: ignore[method-assign]
+         r.execute_command("SHUTDOWN", "SAVE", "NOW", "FORCE")
+         r.execute_command.assert_called_once_with("SHUTDOWN", "SAVE", "NOW", "FORCE")
+         r.execute_command("SHUTDOWN", "ABORT")
+@@ -5073,7 +5076,7 @@ class TestValkeyCommands:
+         assert b"FULLRESYNC" in res
+ 
+     @pytest.mark.onlynoncluster
+-    def test_interrupted_command(self, r: valkey.Valkey):
++    def test_interrupted_command(self, r: valkey.Valkey[str]):
+         """
+         Regression test for issue #1128:  An Un-handled BaseException
+         will leave the socket with un-read response to a previous
+diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py
+index 065f898..9fd7665 100644
+--- a/tests/test_pipeline.py
++++ b/tests/test_pipeline.py
+@@ -1,3 +1,5 @@
++from __future__ import annotations
++
+ from contextlib import closing
+ from unittest import mock
+ 
+@@ -309,7 +311,7 @@ class TestPipeline:
+     def test_transaction_callable(self, r):
+         r["a"] = 1
+         r["b"] = 2
+-        has_run = []
++        has_run: list[str] = []
+ 
+         def my_transaction(pipe):
+             a_value = pipe.get("a")
+diff --git a/tests/test_pubsub.py b/tests/test_pubsub.py
+index 01b5dee..76809fe 100644
+--- a/tests/test_pubsub.py
++++ b/tests/test_pubsub.py
+@@ -115,7 +115,7 @@ class TestPubSubSubscribeUnsubscribe:
+     @pytest.mark.onlycluster
+     @skip_if_server_version_lt("7.0.0")
+     def test_shard_channel_subscribe_unsubscribe_cluster(self, r):
+-        node_channels = defaultdict(int)
++        node_channels = defaultdict(int)  # type: ignore[var-annotated]
+         p = r.pubsub()
+         keys = {
+             "foo": r.get_node_from_key("foo"),
+@@ -632,7 +632,7 @@ class TestPubSubAutoDecoding:
+ 
+     @pytest.fixture()
+     def r(self, request):
+-        return _get_client(valkey.Valkey, request=request, decode_responses=True)
++        return _get_client(valkey.Valkey[str], request=request, decode_responses=True)
+ 
+     def test_channel_subscribe_unsubscribe(self, r):
+         p = r.pubsub()
+@@ -768,7 +768,7 @@ class TestPubSubAutoDecoding:
+ 
+ class TestPubSubValkeyDown:
+     def test_channel_subscribe(self, r):
+-        r = valkey.Valkey(host="localhost", port=6390)
++        r = valkey.Valkey[str](host="localhost", port=6390)
+         p = r.pubsub()
+         with pytest.raises(ConnectionError):
+             p.subscribe("foo")
+@@ -845,7 +845,7 @@ class TestPubSubSubcommands:
+ 
+     @pytest.mark.onlycluster
+     @skip_if_server_version_lt("7.0.0")
+-    def test_pubsub_shardnumsub(self, r):
++    def test_pubsub_shardnumsub(self, r: valkey.ValkeyCluster[bytes]):
+         channels = {
+             b"foo": r.get_node_from_key("foo"),
+             b"bar": r.get_node_from_key("bar"),
+@@ -866,8 +866,8 @@ class TestPubSubSubcommands:
+         p3.ssubscribe("baz")
+         assert wait_for_message(p3, node=channels[b"baz"])["type"] == "ssubscribe"
+ 
+-        channels = [(b"foo", 1), (b"bar", 2), (b"baz", 3)]
+-        assert r.pubsub_shardnumsub("foo", "bar", "baz", target_nodes="all") == channels
++        channels_names = [(b"foo", 1), (b"bar", 2), (b"baz", 3)]
++        assert r.pubsub_shardnumsub("foo", "bar", "baz", target_nodes="all") == channels_names  # type: ignore[attr-defined]
+ 
+ 
+ class TestPubSubPings:
+@@ -972,7 +972,7 @@ class TestPubSubDeadlock:
+     @pytest.mark.timeout(30, method="thread")
+     def test_pubsub_deadlock(self, master_host):
+         pool = valkey.ConnectionPool(host=master_host[0], port=master_host[1])
+-        r = valkey.Valkey(connection_pool=pool)
++        r = valkey.Valkey[str](connection_pool=pool)
+ 
+         for i in range(60):
+             p = r.pubsub()
+@@ -985,7 +985,7 @@ class TestPubSubDeadlock:
+ @pytest.mark.onlynoncluster
+ class TestPubSubAutoReconnect:
+     def mysetup(self, r, method):
+-        self.messages = queue.Queue()
++        self.messages = queue.Queue()  # type: ignore[var-annotated]
+         self.pubsub = r.pubsub()
+         self.state = 0
+         self.cond = threading.Condition()
+@@ -1026,7 +1026,7 @@ class TestPubSubAutoReconnect:
+             self.cond.notify()
+         self.thread.join()
+ 
+-    def test_reconnect_socket_error(self, r: valkey.Valkey, method):
++    def test_reconnect_socket_error(self, r: valkey.Valkey[str], method):
+         """
+         Test that a socket error will cause reconnect
+         """
+@@ -1048,7 +1048,7 @@ class TestPubSubAutoReconnect:
+         finally:
+             self.mycleanup()
+ 
+-    def test_reconnect_disconnect(self, r: valkey.Valkey, method):
++    def test_reconnect_disconnect(self, r: valkey.Valkey[str], method):
+         """
+         Test that a manual disconnect() will cause reconnect
+         """
+@@ -1107,7 +1107,7 @@ class TestPubSubAutoReconnect:
+ 
+ @pytest.mark.onlynoncluster
+ class TestBaseException:
+-    def test_base_exception(self, r: valkey.Valkey):
++    def test_base_exception(self, r: valkey.Valkey[str]):
+         """
+         Manually trigger a BaseException inside the parser's .read_response method
+         and verify that it isn't caught
+diff --git a/valkey/__init__.py b/valkey/__init__.py
+index e4202fb..1feaac7 100644
+--- a/valkey/__init__.py
++++ b/valkey/__init__.py
+@@ -1,4 +1,5 @@
+ from importlib import metadata
++from typing import Tuple, Union
+ 
+ from valkey import asyncio  # noqa
+ from valkey.backoff import default_backoff
+@@ -44,6 +45,9 @@ def int_or_str(value):
+         return value
+ 
+ 
++__version__: str
++VERSION: Tuple[Union[int, str], ...]
++
+ try:
+     __version__ = metadata.version("valkey")
+ except metadata.PackageNotFoundError:
+diff --git a/valkey/asyncio/__init__.pyi b/valkey/asyncio/__init__.pyi
+new file mode 100644
+index 0000000..7d45bb0
+--- /dev/null
++++ b/valkey/asyncio/__init__.pyi
+@@ -0,0 +1,64 @@
++from valkey.asyncio.client import Valkey as Valkey, StrictValkey as StrictValkey
++from valkey.asyncio.cluster import ValkeyCluster as ValkeyCluster
++from valkey.asyncio.connection import (
++    BlockingConnectionPool as BlockingConnectionPool,
++    Connection as Connection,
++    ConnectionPool as ConnectionPool,
++    SSLConnection as SSLConnection,
++    UnixDomainSocketConnection as UnixDomainSocketConnection,
++)
++from valkey.asyncio.parser import CommandsParser as CommandsParser
++from valkey.asyncio.sentinel import (
++    Sentinel as Sentinel,
++    SentinelConnectionPool as SentinelConnectionPool,
++    SentinelManagedConnection as SentinelManagedConnection,
++    SentinelManagedSSLConnection as SentinelManagedSSLConnection,
++)
++from valkey.asyncio.utils import from_url as from_url
++from valkey.backoff import default_backoff as default_backoff
++from valkey.exceptions import (
++    AuthenticationError as AuthenticationError,
++    AuthenticationWrongNumberOfArgsError as AuthenticationWrongNumberOfArgsError,
++    BusyLoadingError as BusyLoadingError,
++    ChildDeadlockedError as ChildDeadlockedError,
++    ConnectionError as ConnectionError,
++    DataError as DataError,
++    InvalidResponse as InvalidResponse,
++    PubSubError as PubSubError,
++    ReadOnlyError as ReadOnlyError,
++    ValkeyError as ValkeyError,
++    ResponseError as ResponseError,
++    TimeoutError as TimeoutError,
++    WatchError as WatchError,
++)
++
++__all__ = [
++    "AuthenticationError",
++    "AuthenticationWrongNumberOfArgsError",
++    "BlockingConnectionPool",
++    "BusyLoadingError",
++    "ChildDeadlockedError",
++    "CommandsParser",
++    "Connection",
++    "ConnectionError",
++    "ConnectionPool",
++    "DataError",
++    "from_url",
++    "default_backoff",
++    "InvalidResponse",
++    "PubSubError",
++    "ReadOnlyError",
++    "Valkey",
++    "ValkeyCluster",
++    "ValkeyError",
++    "ResponseError",
++    "Sentinel",
++    "SentinelConnectionPool",
++    "SentinelManagedConnection",
++    "SentinelManagedSSLConnection",
++    "SSLConnection",
++    "StrictValkey",
++    "TimeoutError",
++    "UnixDomainSocketConnection",
++    "WatchError",
++]
+diff --git a/valkey/asyncio/client.pyi b/valkey/asyncio/client.pyi
+new file mode 100644
+index 0000000..7cb11b2
+--- /dev/null
++++ b/valkey/asyncio/client.pyi
+@@ -0,0 +1,1102 @@
++from _typeshed import Incomplete, Unused
++from collections.abc import AsyncIterator, Awaitable, Callable, Generator, Iterable, Mapping, MutableMapping, Sequence
++from datetime import datetime, timedelta
++from types import TracebackType
++from typing import Any, ClassVar, Literal, NoReturn, Protocol, TypedDict, overload
++from typing_extensions import Self, TypeAlias
++
++from valkey import ValkeyError
++from valkey.asyncio.connection import ConnectCallbackT, Connection, ConnectionPool
++from valkey.asyncio.lock import Lock
++from valkey.asyncio.retry import Retry
++from valkey.client import AbstractValkey, _CommandOptions, _Key, _StrType, _Value
++from valkey.commands import AsyncCoreCommands, AsyncSentinelCommands, ValkeyModuleCommands
++from valkey.credentials import CredentialProvider
++from valkey.typing import ChannelT, EncodableT, KeyT, PatternT, StreamIdT
++
++PubSubHandler: TypeAlias = Callable[[dict[str, str]], Awaitable[None]]
++
++class ResponseCallbackProtocol(Protocol):
++    def __call__(self, response: Any, **kwargs): ...
++
++class AsyncResponseCallbackProtocol(Protocol):
++    async def __call__(self, response: Any, **kwargs): ...
++
++ResponseCallbackT: TypeAlias = ResponseCallbackProtocol | AsyncResponseCallbackProtocol
++
++class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], AsyncSentinelCommands):
++    response_callbacks: MutableMapping[str | bytes, ResponseCallbackT]
++    auto_close_connection_pool: bool
++    connection_pool: Any
++    single_connection_client: Any
++    connection: Any
++    @overload
++    @classmethod
++    def from_url(
++        cls,
++        url: str,
++        *,
++        host: str = "localhost",
++        port: int = 6379,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool | None = None,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        connection_pool: ConnectionPool[Any] | None = None,
++        unix_socket_path: str | None = None,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: Literal[True],
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | None = None,
++        ssl: bool = False,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: str = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: str | None = None,
++        ssl_check_hostname: bool = False,
++        max_connections: int | None = None,
++        single_connection_client: bool = False,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        lib_name: str | None = None,
++        lib_version: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        auto_close_connection_pool: bool = True,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        credential_provider: CredentialProvider | None = None,
++    ) -> Valkey[str]: ...
++    @overload
++    @classmethod
++    def from_url(
++        cls,
++        url: str,
++        *,
++        host: str = "localhost",
++        port: int = 6379,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool | None = None,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        connection_pool: ConnectionPool[Any] | None = None,
++        unix_socket_path: str | None = None,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: Literal[False] = False,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | None = None,
++        ssl: bool = False,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: str = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: str | None = None,
++        ssl_check_hostname: bool = False,
++        max_connections: int | None = None,
++        single_connection_client: bool = False,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        lib_name: str | None = None,
++        lib_version: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        auto_close_connection_pool: bool = True,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        credential_provider: CredentialProvider | None = None,
++    ) -> Valkey[bytes]: ...
++    @overload
++    def __init__(
++        self: Valkey[str],
++        *,
++        host: str = "localhost",
++        port: int = 6379,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool | None = None,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        connection_pool: ConnectionPool[Any] | None = None,
++        unix_socket_path: str | None = None,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: Literal[True],
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | None = None,
++        ssl: bool = False,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: str = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: str | None = None,
++        ssl_check_hostname: bool = False,
++        max_connections: int | None = None,
++        single_connection_client: bool = False,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        lib_name: str | None = None,
++        lib_version: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        auto_close_connection_pool: bool = True,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    @overload
++    def __init__(
++        self: Valkey[bytes],
++        *,
++        host: str = "localhost",
++        port: int = 6379,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool | None = None,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        connection_pool: ConnectionPool[Any] | None = None,
++        unix_socket_path: str | None = None,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: Literal[False] = False,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | None = None,
++        ssl: bool = False,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: str = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: str | None = None,
++        ssl_check_hostname: bool = False,
++        max_connections: int | None = None,
++        single_connection_client: bool = False,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        lib_name: str | None = None,
++        lib_version: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        auto_close_connection_pool: bool = True,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    def __await__(self) -> Generator[Any, None, Self]: ...
++    async def initialize(self) -> Self: ...
++    def set_response_callback(self, command: str, callback: ResponseCallbackT): ...
++    def load_external_module(self, funcname, func) -> None: ...
++    def pipeline(self, transaction: bool = True, shard_hint: str | None = None) -> Pipeline[_StrType]: ...
++    async def transaction(
++        self,
++        func: Callable[[Pipeline[_StrType]], Any | Awaitable[Any]],
++        *watches: KeyT,
++        shard_hint: str | None = None,
++        value_from_callable: bool = False,
++        watch_delay: float | None = None,
++    ): ...
++    def lock(
++        self,
++        name: KeyT,
++        timeout: float | None = None,
++        sleep: float = 0.1,
++        blocking: bool = True,
++        blocking_timeout: float | None = None,
++        lock_class: type[Lock] | None = None,
++        thread_local: bool = True,
++    ) -> Lock: ...
++    def pubsub(self, **kwargs) -> PubSub: ...
++    def monitor(self) -> Monitor: ...
++    def client(self) -> Valkey[_StrType]: ...
++    async def __aenter__(self) -> Self: ...
++    async def __aexit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __del__(self, _warnings: Any = ...) -> None: ...
++    async def aclose(self, close_connection_pool: bool | None = None) -> None: ...
++    async def close(self, close_connection_pool: bool | None = None) -> None: ...
++    async def execute_command(self, *args, **options): ...
++    async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ...
++
++StrictValkey = Valkey
++
++class MonitorCommandInfo(TypedDict):
++    time: float
++    db: int
++    client_address: str
++    client_port: str
++    client_type: str
++    command: str
++
++class Monitor:
++    monitor_re: Any
++    command_re: Any
++    connection_pool: Any
++    connection: Any
++    def __init__(self, connection_pool: ConnectionPool[Any]) -> None: ...
++    async def connect(self) -> None: ...
++    async def __aenter__(self) -> Self: ...
++    async def __aexit__(self, *args: Unused) -> None: ...
++    async def next_command(self) -> MonitorCommandInfo: ...
++    def listen(self) -> AsyncIterator[MonitorCommandInfo]: ...
++
++class PubSub:
++    PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, ...]]
++    UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, ...]]
++    HEALTH_CHECK_MESSAGE: ClassVar[str]
++    connection_pool: Any
++    shard_hint: str | None
++    ignore_subscribe_messages: bool
++    connection: Any
++    encoder: Any
++    health_check_response: Iterable[str | bytes]
++    channels: Any
++    pending_unsubscribe_channels: Any
++    patterns: Any
++    pending_unsubscribe_patterns: Any
++    def __init__(
++        self,
++        connection_pool: ConnectionPool[Any],
++        shard_hint: str | None = None,
++        ignore_subscribe_messages: bool = False,
++        encoder: Incomplete | None = None,
++    ) -> None: ...
++    async def __aenter__(self) -> Self: ...
++    async def __aexit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __del__(self) -> None: ...
++    async def reset(self) -> None: ...
++    async def aclose(self) -> None: ...
++    def close(self) -> Awaitable[NoReturn]: ...
++    async def on_connect(self, connection: Connection): ...
++    @property
++    def subscribed(self) -> bool: ...
++    async def execute_command(self, *args: EncodableT): ...
++    async def parse_response(self, block: bool = True, timeout: float = 0): ...
++    async def check_health(self) -> None: ...
++    async def psubscribe(self, *args: ChannelT, **kwargs: PubSubHandler): ...
++    def punsubscribe(self, *args: ChannelT) -> Awaitable[Any]: ...
++    async def subscribe(self, *args: ChannelT, **kwargs: Callable[..., Any]): ...
++    def unsubscribe(self, *args) -> Awaitable[Any]: ...
++    def listen(self) -> AsyncIterator[Any]: ...
++    async def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0): ...
++    def ping(self, message: Incomplete | None = None) -> Awaitable[Any]: ...
++    async def handle_message(self, response, ignore_subscribe_messages: bool = False): ...
++    async def run(self, *, exception_handler: PSWorkerThreadExcHandlerT | None = None, poll_timeout: float = 1.0) -> None: ...
++
++class PubsubWorkerExceptionHandler(Protocol):
++    def __call__(self, e: BaseException, pubsub: PubSub): ...
++
++class AsyncPubsubWorkerExceptionHandler(Protocol):
++    async def __call__(self, e: BaseException, pubsub: PubSub): ...
++
++PSWorkerThreadExcHandlerT: TypeAlias = PubsubWorkerExceptionHandler | AsyncPubsubWorkerExceptionHandler
++CommandT: TypeAlias = tuple[tuple[str | bytes, ...], Mapping[str, Any]]
++CommandStackT: TypeAlias = list[CommandT]
++
++class Pipeline(Valkey[_StrType]):
++    UNWATCH_COMMANDS: ClassVar[set[str]]
++    connection_pool: Any
++    connection: Any
++    response_callbacks: Any
++    is_transaction: bool
++    shard_hint: str | None
++    watching: bool
++    command_stack: Any
++    scripts: Any
++    explicit_transaction: bool
++    def __init__(
++        self,
++        connection_pool: ConnectionPool[Any],
++        response_callbacks: MutableMapping[str | bytes, ResponseCallbackT],
++        transaction: bool,
++        shard_hint: str | None,
++    ) -> None: ...
++    async def __aenter__(self) -> Self: ...
++    async def __aexit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __await__(self) -> Generator[Any, None, Self]: ...
++    def __len__(self) -> int: ...
++    def __bool__(self) -> bool: ...
++    async def reset(self) -> None: ...
++    async def aclose(self) -> None: ...  # type: ignore[override]
++    def multi(self) -> None: ...
++    def execute_command(self, *args, **kwargs) -> Pipeline[_StrType] | Awaitable[Pipeline[_StrType]]: ...
++    async def immediate_execute_command(self, *args, **options): ...
++    def pipeline_execute_command(self, *args, **options): ...
++    def raise_first_error(self, commands: CommandStackT, response: Iterable[Any]): ...
++    def annotate_exception(self, exception: Exception, number: int, command: Iterable[object]) -> None: ...
++    async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ...
++    async def load_scripts(self) -> None: ...
++    async def execute(self, raise_on_error: bool = True): ...
++    async def discard(self) -> None: ...
++    async def watch(self, *names: KeyT) -> bool: ...
++    async def unwatch(self) -> bool: ...
++    # region acl commands
++    def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> Any: ...
++    def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> Any: ...
++    def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> Any: ...
++    def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any: ...
++    def acl_help(self, **kwargs: _CommandOptions) -> Any: ...
++    def acl_list(self, **kwargs: _CommandOptions) -> Any: ...
++    def acl_log(self, count: int | None = None, **kwargs: _CommandOptions) -> Any: ...
++    def acl_log_reset(self, **kwargs: _CommandOptions) -> Any: ...
++    def acl_load(self, **kwargs: _CommandOptions) -> Any: ...
++    def acl_save(self, **kwargs: _CommandOptions) -> Any: ...
++    def acl_setuser(  # type: ignore[override]
++        self,
++        username: str,
++        enabled: bool = False,
++        nopass: bool = False,
++        passwords: Sequence[str] | None = None,
++        hashed_passwords: Sequence[str] | None = None,
++        categories: Sequence[str] | None = None,
++        commands: Sequence[str] | None = None,
++        keys: Sequence[str] | None = None,
++        channels: Iterable[ChannelT] | None = None,
++        selectors: Iterable[tuple[str, KeyT]] | None = None,
++        reset: bool = False,
++        reset_keys: bool = False,
++        reset_channels: bool = False,
++        reset_passwords: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> Pipeline[_StrType]: ...
++    def acl_users(self, **kwargs: _CommandOptions) -> Any: ...
++    def acl_whoami(self, **kwargs: _CommandOptions) -> Any: ...
++    # endregion
++    # region cluster commands
++    def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions) -> Any: ...
++    def readwrite(self, **kwargs: _CommandOptions) -> Any: ...
++    def readonly(self, **kwargs: _CommandOptions) -> Any: ...
++    # endregion
++    # region BasicKey commands
++    def append(self, key, value) -> Any: ...
++    def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ...
++    def bitfield(self, key, default_overflow: Incomplete | None = None) -> Any: ...
++    def bitop(self, operation, dest, *keys) -> Any: ...
++    def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ...
++    def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False) -> Any: ...
++    def decr(self, name, amount: int = 1) -> Any: ...
++    def decrby(self, name, amount: int = 1) -> Any: ...
++    def delete(self, *names: _Key) -> Any: ...
++    def dump(self, name: _Key) -> Any: ...
++    def exists(self, *names: _Key) -> Any: ...
++    def expire(
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Any: ...
++    def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False) -> Any: ...
++    def get(self, name: _Key) -> Any: ...
++    def getdel(self, name: _Key) -> Any: ...
++    def getex(
++        self,
++        name,
++        ex: Incomplete | None = None,
++        px: Incomplete | None = None,
++        exat: Incomplete | None = None,
++        pxat: Incomplete | None = None,
++        persist: bool = False,
++    ) -> Any: ...
++    def getbit(self, name: _Key, offset: int) -> Any: ...
++    def getrange(self, key, start, end) -> Any: ...
++    def getset(self, name, value) -> Any: ...
++    def incr(self, name: _Key, amount: int = 1) -> Any: ...
++    def incrby(self, name: _Key, amount: int = 1) -> Any: ...
++    def incrbyfloat(self, name: _Key, amount: float = 1.0) -> Any: ...
++    def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ...
++    def lmove(
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> Any: ...
++    def blmove(
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        timeout: float,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> Any: ...
++    def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    def mset(self, mapping: Mapping[_Key, _Value]) -> Any: ...
++    def msetnx(self, mapping: Mapping[_Key, _Value]) -> Any: ...
++    def move(self, name: _Key, db: int) -> Any: ...
++    def persist(self, name: _Key) -> Any: ...
++    def pexpire(
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Any: ...
++    def pexpireat(
++        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Any: ...
++    def psetex(self, name, time_ms, value) -> Any: ...
++    def pttl(self, name: _Key) -> Any: ...
++    def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False) -> Any: ...
++    def randomkey(self, **kwargs: _CommandOptions) -> Any: ...
++    def rename(self, src, dst) -> Any: ...
++    def renamenx(self, src, dst) -> Any: ...
++    def restore(
++        self,
++        name,
++        ttl,
++        value,
++        replace: bool = False,
++        absttl: bool = False,
++        idletime: Incomplete | None = None,
++        frequency: Incomplete | None = None,
++    ) -> Any: ...
++    def set(  # type: ignore[override]
++        self,
++        name: _Key,
++        value: _Value,
++        ex: None | int | timedelta = None,
++        px: None | int | timedelta = None,
++        nx: bool = False,
++        xx: bool = False,
++        keepttl: bool = False,
++        get: bool = False,
++        exat: Incomplete | None = None,
++        pxat: Incomplete | None = None,
++    ) -> Any: ...
++    def setbit(self, name: _Key, offset: int, value: int) -> Any: ...
++    def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Any: ...
++    def setnx(self, name: _Key, value: _Value) -> Any: ...
++    def setrange(self, name, offset, value) -> Any: ...
++    def stralgo(
++        self,
++        algo,
++        value1,
++        value2,
++        specific_argument: str = "strings",
++        len: bool = False,
++        idx: bool = False,
++        minmatchlen: Incomplete | None = None,
++        withmatchlen: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> Any: ...
++    def strlen(self, name) -> Any: ...
++    def substr(self, name, start, end: int = -1) -> Any: ...
++    def touch(self, *args) -> Any: ...
++    def ttl(self, name: _Key) -> Any: ...
++    def type(self, name) -> Any: ...
++    def unlink(self, *names: _Key) -> Any: ...
++    # endregion
++    # region hyperlog commands
++    def pfadd(self, name: _Key, *values: _Value) -> Any: ...
++    def pfcount(self, name: _Key) -> Any: ...
++    def pfmerge(self, dest: _Key, *sources: _Key) -> Any: ...
++    # endregion
++    # region hash commands
++    def hdel(self, name: _Key, *keys: _Key) -> Any: ...
++    def hexists(self, name: _Key, key: _Key) -> Any: ...
++    def hget(self, name: _Key, key: _Key) -> Any: ...
++    def hgetall(self, name: _Key) -> Any: ...
++    def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Any: ...
++    def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Any: ...
++    def hkeys(self, name: _Key) -> Any: ...
++    def hlen(self, name: _Key) -> Any: ...
++    @overload
++    def hset(
++        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
++    ) -> Any: ...
++    @overload
++    def hset(
++        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
++    ) -> Any: ...
++    @overload
++    def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Any: ...
++    def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Any: ...
++    def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Any: ...
++    def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    def hvals(self, name: _Key) -> Any: ...
++    def hstrlen(self, name, key) -> Any: ...
++    # endregion
++    # region geo commands
++    def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False) -> Any: ...
++    def geodist(self, name, place1, place2, unit: Incomplete | None = None) -> Any: ...
++    def geohash(self, name, *values) -> Any: ...
++    def geopos(self, name, *values) -> Any: ...
++    def georadius(
++        self,
++        name,
++        longitude,
++        latitude,
++        radius,
++        unit: Incomplete | None = None,
++        withdist: bool = False,
++        withcoord: bool = False,
++        withhash: bool = False,
++        count: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        store: Incomplete | None = None,
++        store_dist: Incomplete | None = None,
++        any: bool = False,
++    ) -> Any: ...
++    def georadiusbymember(
++        self,
++        name,
++        member,
++        radius,
++        unit: Incomplete | None = None,
++        withdist: bool = False,
++        withcoord: bool = False,
++        withhash: bool = False,
++        count: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        store: Incomplete | None = None,
++        store_dist: Incomplete | None = None,
++        any: bool = False,
++    ) -> Any: ...
++    def geosearch(
++        self,
++        name,
++        member: Incomplete | None = None,
++        longitude: Incomplete | None = None,
++        latitude: Incomplete | None = None,
++        unit: str = "m",
++        radius: Incomplete | None = None,
++        width: Incomplete | None = None,
++        height: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        count: Incomplete | None = None,
++        any: bool = False,
++        withcoord: bool = False,
++        withdist: bool = False,
++        withhash: bool = False,
++    ) -> Any: ...
++    def geosearchstore(
++        self,
++        dest,
++        name,
++        member: Incomplete | None = None,
++        longitude: Incomplete | None = None,
++        latitude: Incomplete | None = None,
++        unit: str = "m",
++        radius: Incomplete | None = None,
++        width: Incomplete | None = None,
++        height: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        count: Incomplete | None = None,
++        any: bool = False,
++        storedist: bool = False,
++    ) -> Any: ...
++    # endregion
++    # region list commands
++    @overload
++    def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ...
++    @overload
++    def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ...
++    @overload
++    def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ...
++    @overload
++    def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ...
++    def brpoplpush(self, src, dst, timeout: int | None = 0) -> Any: ...
++    def lindex(self, name: _Key, index: int | str) -> Any: ...
++    def linsert(
++        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
++    ) -> Any: ...
++    def llen(self, name: _Key) -> Any: ...
++    def lpop(self, name, count: int | None = None) -> Any: ...
++    def lpush(self, name: _Value, *values: _Value) -> Any: ...
++    def lpushx(self, name, value) -> Any: ...
++    def lrange(self, name: _Key, start: int, end: int) -> Any: ...
++    def lrem(self, name: _Key, count: int, value: _Value) -> Any: ...
++    def lset(self, name: _Key, index: int, value: _Value) -> Any: ...
++    def ltrim(self, name: _Key, start: int, end: int) -> Any: ...
++    def rpop(self, name, count: int | None = None) -> Any: ...
++    def rpoplpush(self, src, dst) -> Any: ...
++    def rpush(self, name: _Value, *values: _Value) -> Any: ...
++    def rpushx(self, name, value) -> Any: ...
++    def lpos(
++        self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None
++    ) -> Any: ...
++    @overload  # type: ignore[override]
++    def sort(
++        self,
++        name: _Key,
++        start: int | None = None,
++        num: int | None = None,
++        by: _Key | None = None,
++        get: _Key | Sequence[_Key] | None = None,
++        desc: bool = False,
++        alpha: bool = False,
++        store: None = None,
++        groups: bool = False,
++    ) -> list[_StrType]: ...
++    @overload
++    def sort(
++        self,
++        name: _Key,
++        start: int | None = None,
++        num: int | None = None,
++        by: _Key | None = None,
++        get: _Key | Sequence[_Key] | None = None,
++        desc: bool = False,
++        alpha: bool = False,
++        *,
++        store: _Key,
++        groups: bool = False,
++    ) -> Any: ...
++    @overload
++    def sort(
++        self,
++        name: _Key,
++        start: int | None,
++        num: int | None,
++        by: _Key | None,
++        get: _Key | Sequence[_Key] | None,
++        desc: bool,
++        alpha: bool,
++        store: _Key,
++        groups: bool = False,
++    ) -> Any: ...
++    # endregion
++    # region scan commands
++    def scan(
++        self,
++        cursor: int = 0,
++        match: _Key | None = None,
++        count: int | None = None,
++        _type: str | None = None,
++        **kwargs: _CommandOptions,
++    ) -> Any: ...
++    def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ...
++    def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Any: ...
++    @overload
++    def zscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ...
++    @overload
++    def zscan(
++        self,
++        name: _Key,
++        cursor: int = 0,
++        match: _Key | None = None,
++        count: int | None = None,
++        *,
++        score_cast_func: Callable[[_StrType], Any],
++    ) -> Any: ...
++    @overload
++    def zscan(
++        self, name: _Key, cursor: int, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], Any]
++    ) -> Any: ...
++    # endregion
++    # region set commands
++    def sadd(self, name: _Key, *values: _Value) -> Any: ...
++    def scard(self, name: _Key) -> Any: ...
++    def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    def sismember(self, name: _Key, value: _Value) -> Any: ...
++    def smembers(self, name: _Key) -> Any: ...
++    def smismember(self, name, values, *args) -> Any: ...
++    def smove(self, src: _Key, dst: _Key, value: _Value) -> Any: ...
++    @overload
++    def spop(self, name: _Key, count: None = None) -> Any: ...
++    @overload
++    def spop(self, name: _Key, count: int) -> Any: ...
++    @overload
++    def srandmember(self, name: _Key, number: None = None) -> Any: ...
++    @overload
++    def srandmember(self, name: _Key, number: int) -> Any: ...
++    def srem(self, name: _Key, *values: _Value) -> Any: ...
++    def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
++    # endregion
++    # region stream commands
++    def xack(self, name, groupname, *ids) -> Any: ...
++    def xadd(
++        self,
++        name,
++        fields,
++        id: str | int | bytes | memoryview = "*",
++        maxlen=None,
++        approximate: bool = True,
++        nomkstream: bool = False,
++        minid: Incomplete | None = None,
++        limit: Incomplete | None = None,
++    ) -> Any: ...
++    def xautoclaim(
++        self,
++        name,
++        groupname,
++        consumername,
++        min_idle_time,
++        start_id: StreamIdT = "0-0",
++        count: Incomplete | None = None,
++        justid: bool = False,
++    ) -> Any: ...
++    def xclaim(
++        self,
++        name,
++        groupname,
++        consumername,
++        min_idle_time,
++        message_ids,
++        idle=None,
++        time=None,
++        retrycount=None,
++        force=False,
++        justid=False,
++    ) -> Any: ...
++    def xdel(self, name, *ids) -> Any: ...
++    def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None) -> Any: ...
++    def xgroup_delconsumer(self, name, groupname, consumername) -> Any: ...
++    def xgroup_destroy(self, name, groupname) -> Any: ...
++    def xgroup_createconsumer(self, name, groupname, consumername) -> Any: ...
++    def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Any: ...
++    def xinfo_consumers(self, name, groupname) -> Any: ...
++    def xinfo_groups(self, name) -> Any: ...
++    def xinfo_stream(self, name, full: bool = False) -> Any: ...
++    def xlen(self, name: _Key) -> Any: ...
++    def xpending(self, name, groupname) -> Any: ...
++    def xpending_range(
++        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
++    ) -> Any: ...
++    def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None) -> Any: ...
++    def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None) -> Any: ...
++    def xreadgroup(
++        self,
++        groupname,
++        consumername,
++        streams,
++        count: Incomplete | None = None,
++        block: Incomplete | None = None,
++        noack: bool = False,
++    ) -> Any: ...
++    def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None) -> Any: ...
++    def xtrim(
++        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
++    ) -> Any: ...
++    # endregion
++    # region sorted set commands
++    def zadd(
++        self,
++        name: _Key,
++        mapping: Mapping[_Key, _Value],
++        nx: bool = False,
++        xx: bool = False,
++        ch: bool = False,
++        incr: bool = False,
++        gt: Incomplete | None = False,
++        lt: Incomplete | None = False,
++    ) -> Any: ...
++    def zcard(self, name: _Key) -> Any: ...
++    def zcount(self, name: _Key, min: _Value, max: _Value) -> Any: ...
++    def zdiff(self, keys, withscores: bool = False) -> Any: ...
++    def zdiffstore(self, dest, keys) -> Any: ...
++    def zincrby(self, name: _Key, amount: float, value: _Value) -> Any: ...
++    def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ...
++    def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ...
++    def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Any: ...
++    def zpopmax(self, name: _Key, count: int | None = None) -> Any: ...
++    def zpopmin(self, name: _Key, count: int | None = None) -> Any: ...
++    def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False) -> Any: ...
++    @overload
++    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ...
++    @overload
++    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ...
++    @overload
++    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ...
++    @overload
++    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ...
++    @overload  # type: ignore[override]
++    def zrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], Any],
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> Any: ...
++    @overload
++    def zrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], float] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> Any: ...
++    @overload
++    def zrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], None],
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> Any: ...
++    @overload
++    def zrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], float] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> Any: ...
++    @overload
++    def zrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool = False,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> Any: ...
++    @overload  # type: ignore[override]
++    def zrevrange(
++        self, name: _Key, start: int, end: int, withscores: Literal[True], score_cast_func: Callable[[_StrType], None]
++    ) -> Any: ...
++    @overload
++    def zrevrange(self, name: _Key, start: int, end: int, withscores: Literal[True]) -> Any: ...
++    @overload
++    def zrevrange(
++        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ...
++    ) -> Any: ...
++    def zrangestore(
++        self,
++        dest,
++        name,
++        start,
++        end,
++        byscore: bool = False,
++        bylex: bool = False,
++        desc: bool = False,
++        offset: Incomplete | None = None,
++        num: Incomplete | None = None,
++    ) -> Any: ...
++    def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None) -> Any: ...
++    def zrevrangebylex(self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None) -> Any: ...
++    @overload  # type: ignore[override]
++    def zrangebyscore(
++        self,
++        name: _Key,
++        min: _Value,
++        max: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], None],
++    ) -> Any: ...
++    @overload
++    def zrangebyscore(
++        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
++    ) -> Any: ...
++    @overload
++    def zrangebyscore(
++        self,
++        name: _Key,
++        min: _Value,
++        max: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> Any: ...
++    @overload
++    def zrevrangebyscore(
++        self,
++        name: _Key,
++        max: _Value,
++        min: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], Any],
++    ) -> Any: ...
++    @overload
++    def zrevrangebyscore(
++        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
++    ) -> Any: ...
++    @overload
++    def zrevrangebyscore(
++        self,
++        name: _Key,
++        max: _Value,
++        min: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> Any: ...
++    def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ...
++    def zrem(self, name: _Key, *values: _Value) -> Any: ...
++    def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Any: ...
++    def zremrangebyrank(self, name: _Key, min: int, max: int) -> Any: ...
++    def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Any: ...
++    def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ...
++    def zscore(self, name: _Key, value: _Value) -> Any: ...
++    def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ...
++    def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ...
++    def zmscore(self, key, members) -> Any: ...
++    # endregion
++    # region management commands
++    def bgrewriteaof(self, **kwargs: _CommandOptions) -> Any: ...
++    def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions) -> Any: ...
++    def role(self) -> Any: ...
++    def client_kill(self, address: str, **kwargs: _CommandOptions) -> Any: ...
++    def client_kill_filter(
++        self,
++        _id: Incomplete | None = None,
++        _type: Incomplete | None = None,
++        addr: Incomplete | None = None,
++        skipme: Incomplete | None = None,
++        laddr: Incomplete | None = None,
++        user: Incomplete | None = None,
++        **kwargs: _CommandOptions,
++    ) -> Any: ...
++    def client_info(self, **kwargs: _CommandOptions) -> Any: ...
++    def client_list(self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions) -> Any: ...
++    def client_getname(self, **kwargs: _CommandOptions) -> Any: ...
++    def client_getredir(self, **kwargs: _CommandOptions) -> Any: ...
++    def client_reply(self, reply, **kwargs: _CommandOptions) -> Any: ...
++    def client_id(self, **kwargs: _CommandOptions) -> Any: ...
++    def client_tracking_on(
++        self,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++    ) -> Any: ...
++    def client_tracking_off(
++        self,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++    ) -> Any: ...
++    def client_tracking(
++        self,
++        on: bool = True,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> Any: ...
++    def client_trackinginfo(self, **kwargs: _CommandOptions) -> Any: ...
++    def client_setname(self, name: str, **kwargs: _CommandOptions) -> Any: ...
++    def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions) -> Any: ...
++    def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions) -> Any: ...
++    def client_unpause(self, **kwargs: _CommandOptions) -> Any: ...
++    def command(self, **kwargs: _CommandOptions) -> Any: ...
++    def command_info(self, **kwargs: _CommandOptions) -> Any: ...
++    def command_count(self, **kwargs: _CommandOptions) -> Any: ...
++    def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Any: ...
++    def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions) -> Any: ...
++    def config_resetstat(self, **kwargs: _CommandOptions) -> Any: ...
++    def config_rewrite(self, **kwargs: _CommandOptions) -> Any: ...
++    def dbsize(self, **kwargs: _CommandOptions) -> Any: ...
++    def debug_object(self, key, **kwargs: _CommandOptions) -> Any: ...
++    def debug_segfault(self, **kwargs: _CommandOptions) -> Any: ...
++    def echo(self, value: _Value, **kwargs: _CommandOptions) -> Any: ...
++    def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ...
++    def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ...
++    def sync(self) -> Any: ...
++    def psync(self, replicationid, offset) -> Any: ...
++    def swapdb(self, first, second, **kwargs: _CommandOptions) -> Any: ...
++    def select(self, index, **kwargs: _CommandOptions) -> Any: ...
++    def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Any: ...
++    def lastsave(self, **kwargs: _CommandOptions) -> Any: ...
++    def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> Any: ...
++    def migrate(
++        self,
++        host,
++        port,
++        keys,
++        destination_db,
++        timeout,
++        copy: bool = False,
++        replace: bool = False,
++        auth: Incomplete | None = None,
++        **kwargs: _CommandOptions,
++    ) -> Any: ...
++    def object(self, infotype, key, **kwargs: _CommandOptions) -> Any: ...
++    def memory_doctor(self, **kwargs: _CommandOptions) -> Any: ...
++    def memory_help(self, **kwargs: _CommandOptions) -> Any: ...
++    def memory_stats(self, **kwargs: _CommandOptions) -> Any: ...
++    def memory_malloc_stats(self, **kwargs: _CommandOptions) -> Any: ...
++    def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ...
++    def memory_purge(self, **kwargs: _CommandOptions) -> Any: ...
++    def ping(self, **kwargs: _CommandOptions) -> Any: ...
++    def quit(self, **kwargs: _CommandOptions) -> Any: ...
++    def replicaof(self, *args, **kwargs: _CommandOptions) -> Any: ...
++    def save(self, **kwargs: _CommandOptions) -> Any: ...
++    def shutdown(
++        self,
++        save: bool = False,
++        nosave: bool = False,
++        now: bool = False,
++        force: bool = False,
++        abort: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> Any: ...
++    def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ...
++    def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ...
++    def slowlog_len(self, **kwargs: _CommandOptions) -> Any: ...
++    def slowlog_reset(self, **kwargs: _CommandOptions) -> Any: ...
++    def time(self, **kwargs: _CommandOptions) -> Any: ...
++    def wait(self, num_replicas, timeout, **kwargs: _CommandOptions) -> Any: ...
++    # endregion
++    # region module commands
++    def module_load(self, path, *args) -> Any: ...
++    def module_unload(self, name) -> Any: ...
++    def module_list(self) -> Any: ...
++    def command_getkeys(self, *args) -> Any: ...
++    # endregion
++    # region pubsub commands
++    def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> Any: ...
++    def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ...
++    def pubsub_numpat(self, **kwargs: _CommandOptions) -> Any: ...
++    def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> Any: ...
++    # endregion
++    # region script commands
++    def eval(self, script, numkeys, *keys_and_args) -> Any: ...
++    def evalsha(self, sha, numkeys, *keys_and_args) -> Any: ...
++    def script_exists(self, *args) -> Any: ...
++    def script_debug(self, *args) -> Any: ...
++    def script_flush(self, sync_type: Incomplete | None = None) -> Any: ...
++    def script_kill(self) -> Any: ...
++    def script_load(self, script) -> Any: ...
++    def register_script(self, script: str | _StrType) -> Any: ...  # type: ignore[override]
++    # endregion
+diff --git a/valkey/asyncio/cluster.pyi b/valkey/asyncio/cluster.pyi
+new file mode 100644
+index 0000000..257769d
+--- /dev/null
++++ b/valkey/asyncio/cluster.pyi
+@@ -0,0 +1,229 @@
++from _typeshed import Incomplete
++from collections.abc import Awaitable, Callable, Mapping
++from types import TracebackType
++from typing import Any, Generic, TypeVar
++from typing_extensions import Self
++
++from valkey.asyncio.client import ResponseCallbackT
++from valkey.asyncio.connection import AbstractConnection, BaseParser, Connection, Encoder
++from valkey.asyncio.parser import CommandsParser
++from valkey.client import AbstractValkey
++from valkey.cluster import AbstractValkeyCluster, LoadBalancer
++
++# TODO: add  AsyncValkeyClusterCommands stubs
++# from valkey.commands import AsyncValkeyClusterCommands
++from valkey.commands.core import _StrType
++from valkey.credentials import CredentialProvider
++from valkey.exceptions import ResponseError
++from valkey.retry import Retry
++from valkey.typing import AnyKeyT, EncodableT, KeyT
++
++TargetNodesT = TypeVar("TargetNodesT", str, ClusterNode, list[ClusterNode], dict[Any, ClusterNode])  # noqa: Y001
++
++# It uses `DefaultParser` in real life, but it is a dynamic base class.
++class ClusterParser(BaseParser):
++    def on_disconnect(self) -> None: ...
++    def on_connect(self, connection: AbstractConnection) -> None: ...
++    async def can_read_destructive(self) -> bool: ...
++    async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ...
++
++class ValkeyCluster(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]):  # TODO: AsyncValkeyClusterCommands
++    @classmethod
++    def from_url(
++        cls,
++        url: str,
++        *,
++        host: str | None = None,
++        port: str | int = 6379,
++        # Cluster related kwargs
++        startup_nodes: list[ClusterNode] | None = None,
++        require_full_coverage: bool = True,
++        read_from_replicas: bool = False,
++        reinitialize_steps: int = 5,
++        cluster_error_retry_attempts: int = 3,
++        connection_error_retry_attempts: int = 3,
++        max_connections: int = 2147483648,
++        # Client related kwargs
++        db: str | int = 0,
++        path: str | None = None,
++        credential_provider: CredentialProvider | None = None,
++        username: str | None = None,
++        password: str | None = None,
++        client_name: str | None = None,
++        # Encoding related kwargs
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        # Connection related kwargs
++        health_check_interval: float = 0,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        socket_timeout: float | None = None,
++        retry: Retry | None = None,
++        retry_on_error: list[Exception] | None = None,
++        # SSL related kwargs
++        ssl: bool = False,
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: str | None = None,
++        ssl_cert_reqs: str = "required",
++        ssl_certfile: str | None = None,
++        ssl_check_hostname: bool = False,
++        ssl_keyfile: str | None = None,
++        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
++    ) -> Self: ...
++
++    retry: Retry | None
++    connection_kwargs: dict[str, Any]
++    nodes_manager: NodesManager
++    encoder: Encoder
++    read_from_replicas: bool
++    reinitialize_steps: int
++    cluster_error_retry_attempts: int
++    reinitialize_counter: int
++    commands_parser: CommandsParser
++    node_flags: set[str]
++    command_flags: dict[str, str]
++    response_callbacks: Incomplete
++    result_callbacks: dict[str, Callable[[Incomplete, Incomplete], Incomplete]]
++
++    def __init__(
++        self,
++        host: str | None = None,
++        port: str | int = 6379,
++        # Cluster related kwargs
++        startup_nodes: list[ClusterNode] | None = None,
++        require_full_coverage: bool = True,
++        read_from_replicas: bool = False,
++        reinitialize_steps: int = 5,
++        cluster_error_retry_attempts: int = 3,
++        connection_error_retry_attempts: int = 3,
++        max_connections: int = 2147483648,
++        # Client related kwargs
++        db: str | int = 0,
++        path: str | None = None,
++        credential_provider: CredentialProvider | None = None,
++        username: str | None = None,
++        password: str | None = None,
++        client_name: str | None = None,
++        # Encoding related kwargs
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        # Connection related kwargs
++        health_check_interval: float = 0,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        socket_timeout: float | None = None,
++        retry: Retry | None = None,
++        retry_on_error: list[Exception] | None = None,
++        # SSL related kwargs
++        ssl: bool = False,
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: str | None = None,
++        ssl_cert_reqs: str = "required",
++        ssl_certfile: str | None = None,
++        ssl_check_hostname: bool = False,
++        ssl_keyfile: str | None = None,
++        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
++    ) -> None: ...
++    async def initialize(self) -> Self: ...
++    async def close(self) -> None: ...
++    async def __aenter__(self) -> Self: ...
++    async def __aexit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __await__(self) -> Awaitable[Self]: ...
++    def __del__(self) -> None: ...
++    async def on_connect(self, connection: Connection) -> None: ...
++    def get_nodes(self) -> list[ClusterNode]: ...
++    def get_primaries(self) -> list[ClusterNode]: ...
++    def get_replicas(self) -> list[ClusterNode]: ...
++    def get_random_node(self) -> ClusterNode: ...
++    def get_default_node(self) -> ClusterNode: ...
++    def set_default_node(self, node: ClusterNode) -> None: ...
++    def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ...
++    def get_node_from_key(self, key: str, replica: bool = False) -> ClusterNode | None: ...
++    def keyslot(self, key: EncodableT) -> int: ...
++    def get_encoder(self) -> Encoder: ...
++    def get_connection_kwargs(self) -> dict[str, Any | None]: ...
++    def set_response_callback(self, command: str, callback: ResponseCallbackT) -> None: ...
++    async def execute_command(self, *args: EncodableT, **kwargs: Any) -> Any: ...
++    def pipeline(self, transaction: Any | None = None, shard_hint: Any | None = None) -> ClusterPipeline[_StrType]: ...
++
++class ClusterNode:
++    host: str
++    port: str | int
++    name: str
++    server_type: str | None
++    max_connections: int
++    connection_class: type[Connection]
++    connection_kwargs: dict[str, Any]
++    response_callbacks: dict[Incomplete, Incomplete]
++    def __init__(
++        self,
++        host: str,
++        port: str | int,
++        server_type: str | None = None,
++        *,
++        max_connections: int = 2147483648,
++        connection_class: type[Connection] = ...,
++        **connection_kwargs: Any,
++    ) -> None: ...
++    def __eq__(self, obj: object) -> bool: ...
++    def __del__(self) -> None: ...
++    async def disconnect(self) -> None: ...
++    def acquire_connection(self) -> Connection: ...
++    async def parse_response(self, connection: Connection, command: str, **kwargs: Any) -> Any: ...
++    async def execute_command(self, *args: Any, **kwargs: Any) -> Any: ...
++    async def execute_pipeline(self, commands: list[PipelineCommand]) -> bool: ...
++
++class NodesManager:
++    startup_nodes: dict[str, ClusterNode]
++    require_full_coverage: bool
++    connection_kwargs: dict[str, Any]
++    default_node: ClusterNode | None
++    nodes_cache: dict[str, ClusterNode]
++    slots_cache: dict[int, list[ClusterNode]]
++    read_load_balancer: LoadBalancer
++    address_remap: Callable[[str, int], tuple[str, int]] | None
++    def __init__(
++        self,
++        startup_nodes: list[ClusterNode],
++        require_full_coverage: bool,
++        connection_kwargs: dict[str, Any],
++        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
++    ) -> None: ...
++    def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ...
++    def set_nodes(self, old: dict[str, ClusterNode], new: dict[str, ClusterNode], remove_old: bool = False) -> None: ...
++    def get_node_from_slot(self, slot: int, read_from_replicas: bool = False) -> ClusterNode: ...
++    def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ...
++    async def initialize(self) -> None: ...
++    async def close(self, attr: str = "nodes_cache") -> None: ...
++    def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ...
++
++class ClusterPipeline(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]):  # TODO: AsyncValkeyClusterCommands
++    def __init__(self, client: ValkeyCluster[_StrType]) -> None: ...
++    async def initialize(self) -> Self: ...
++    async def __aenter__(self) -> Self: ...
++    async def __aexit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __await__(self) -> Awaitable[Self]: ...
++    def __enter__(self) -> Self: ...
++    def __exit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __bool__(self) -> bool: ...
++    def __len__(self) -> int: ...
++    def execute_command(self, *args: KeyT | EncodableT, **kwargs: Any) -> Self: ...
++    async def execute(self, raise_on_error: bool = True, allow_redirections: bool = True) -> list[Any]: ...
++    def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> Self: ...
++
++class PipelineCommand:
++    args: Any
++    kwargs: Any
++    position: int
++    result: Exception | None | Any
++    def __init__(self, position: int, *args: Any, **kwargs: Any) -> None: ...
+diff --git a/valkey/asyncio/connection.pyi b/valkey/asyncio/connection.pyi
+new file mode 100644
+index 0000000..b0525ff
+--- /dev/null
++++ b/valkey/asyncio/connection.pyi
+@@ -0,0 +1,363 @@
++import asyncio
++import enum
++import ssl
++from _typeshed import Unused
++from abc import abstractmethod
++from collections.abc import Callable, Iterable, Mapping
++from types import MappingProxyType
++from typing import Any, Final, Generic, Literal, Protocol, TypedDict, TypeVar, overload
++from typing_extensions import Self, TypeAlias
++
++from valkey.asyncio.retry import Retry
++from valkey.credentials import CredentialProvider
++from valkey.exceptions import AuthenticationError, ValkeyError, ResponseError
++from valkey.typing import EncodableT, EncodedT
++
++_SSLVerifyMode: TypeAlias = Literal["none", "optional", "required"]
++
++SYM_STAR: Final[bytes]
++SYM_DOLLAR: Final[bytes]
++SYM_CRLF: Final[bytes]
++SYM_LF: Final[bytes]
++SYM_EMPTY: Final[bytes]
++
++SERVER_CLOSED_CONNECTION_ERROR: Final[str]
++
++class _Sentinel(enum.Enum):
++    sentinel = object()
++
++SENTINEL: Final[object]
++MODULE_LOAD_ERROR: Final[str]
++NO_SUCH_MODULE_ERROR: Final[str]
++MODULE_UNLOAD_NOT_POSSIBLE_ERROR: Final[str]
++MODULE_EXPORTS_DATA_TYPES_ERROR: Final[str]
++NO_AUTH_SET_ERROR: Final[dict[str, type[AuthenticationError]]]
++
++class Encoder:
++    encoding: str
++    encoding_errors: str
++    decode_responses: bool
++    def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ...
++    def encode(self, value: EncodableT) -> EncodedT: ...
++    def decode(self, value: EncodableT, force: bool = False) -> EncodableT: ...
++
++ExceptionMappingT: TypeAlias = Mapping[str, type[Exception] | Mapping[str, type[Exception]]]
++
++class BaseParser:
++    EXCEPTION_CLASSES: ExceptionMappingT
++    def __init__(self, socket_read_size: int) -> None: ...
++    @classmethod
++    def parse_error(cls, response: str) -> ResponseError: ...
++    @abstractmethod
++    def on_disconnect(self) -> None: ...
++    @abstractmethod
++    def on_connect(self, connection: AbstractConnection) -> None: ...
++    @abstractmethod
++    async def can_read_destructive(self) -> bool: ...
++    @abstractmethod
++    async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ...
++
++class PythonParser(BaseParser):
++    encoder: Encoder | None
++    def __init__(self, socket_read_size: int) -> None: ...
++    def on_connect(self, connection: AbstractConnection) -> None: ...
++    def on_disconnect(self) -> None: ...
++    async def can_read_destructive(self) -> bool: ...
++    async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | None: ...
++
++class LibvalkeyParser(BaseParser):
++    def __init__(self, socket_read_size: int) -> None: ...
++    def on_connect(self, connection: AbstractConnection) -> None: ...
++    def on_disconnect(self) -> None: ...
++    async def can_read_destructive(self) -> bool: ...
++    async def read_from_socket(self) -> Literal[True]: ...
++    async def read_response(self, disable_decoding: bool = False) -> EncodableT | list[EncodableT]: ...
++
++DefaultParser: type[PythonParser | LibvalkeyParser]
++
++class ConnectCallbackProtocol(Protocol):
++    def __call__(self, connection: Connection): ...
++
++class AsyncConnectCallbackProtocol(Protocol):
++    async def __call__(self, connection: Connection): ...
++
++ConnectCallbackT: TypeAlias = ConnectCallbackProtocol | AsyncConnectCallbackProtocol
++
++class AbstractConnection:
++    pid: int
++    db: str | int
++    client_name: str | None
++    credential_provider: CredentialProvider | None
++    password: str | None
++    username: str | None
++    socket_timeout: float | None
++    socket_connect_timeout: float | None
++    retry_on_timeout: bool
++    retry_on_error: list[type[Exception]]
++    retry: Retry
++    health_check_interval: float
++    next_health_check: float
++    encoder: Encoder
++    valkey_connect_func: ConnectCallbackT | None
++
++    def __init__(
++        self,
++        *,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        encoder_class: type[Encoder] = ...,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    @abstractmethod
++    def repr_pieces(self) -> list[tuple[str, Any]]: ...
++    @property
++    def is_connected(self) -> bool: ...
++    def register_connect_callback(self, callback: ConnectCallbackT) -> None: ...
++    def clear_connect_callbacks(self) -> None: ...
++    def set_parser(self, parser_class: type[BaseParser]) -> None: ...
++    async def connect(self) -> None: ...
++    async def on_connect(self) -> None: ...
++    async def disconnect(self, nowait: bool = False) -> None: ...
++    async def check_health(self) -> None: ...
++    async def send_packed_command(self, command: bytes | str | Iterable[bytes], check_health: bool = True) -> None: ...
++    async def send_command(self, *args: Any, **kwargs: Any) -> None: ...
++    async def can_read_destructive(self) -> bool: ...
++    async def read_response(
++        self, disable_decoding: bool = False, timeout: float | None = None, *, disconnect_on_error: bool = True
++    ) -> EncodableT | list[EncodableT] | None: ...
++    def pack_command(self, *args: EncodableT) -> list[bytes]: ...
++    def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> list[bytes]: ...
++
++class Connection(AbstractConnection):
++    host: str
++    port: int
++    socket_keepalive: bool
++    socket_keepalive_options: Mapping[int, int | bytes] | None
++    socket_type: int
++
++    def __init__(
++        self,
++        *,
++        host: str = "localhost",
++        port: str | int = 6379,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        socket_type: int = 0,
++        # **kwargs forwarded to AbstractConnection.
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        encoder_class: type[Encoder] = ...,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    def repr_pieces(self) -> list[tuple[str, Any]]: ...
++
++class SSLConnection(Connection):
++    ssl_context: ValkeySSLContext
++    def __init__(
++        self,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: _SSLVerifyMode = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: str | None = None,
++        ssl_check_hostname: bool = False,
++        *,
++        # **kwargs forwarded to Connection.
++        host: str = "localhost",
++        port: str | int = 6379,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        socket_type: int = 0,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        encoder_class: type[Encoder] = ...,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    @property
++    def keyfile(self) -> str | None: ...
++    @property
++    def certfile(self) -> str | None: ...
++    @property
++    def cert_reqs(self) -> ssl.VerifyMode: ...
++    @property
++    def ca_certs(self) -> str | None: ...
++    @property
++    def ca_data(self) -> str | None: ...
++    @property
++    def check_hostname(self) -> bool: ...
++
++class ValkeySSLContext:
++    keyfile: str | None
++    certfile: str | None
++    cert_reqs: ssl.VerifyMode
++    ca_certs: str | None
++    ca_data: str | None
++    check_hostname: bool
++    context: ssl.SSLContext | None
++    def __init__(
++        self,
++        keyfile: str | None = None,
++        certfile: str | None = None,
++        cert_reqs: _SSLVerifyMode | None = None,
++        ca_certs: str | None = None,
++        ca_data: str | None = None,
++        check_hostname: bool = False,
++    ) -> None: ...
++    def get(self) -> ssl.SSLContext: ...
++
++class UnixDomainSocketConnection(Connection):
++    path: str
++    def __init__(
++        self,
++        *,
++        path: str = "",
++        # **kwargs forwarded to AbstractConnection.
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        encoder_class: type[Encoder] = ...,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    def repr_pieces(self) -> list[tuple[str, Any]]: ...
++
++FALSE_STRINGS: Final[tuple[str, ...]]
++
++def to_bool(value: object) -> bool | None: ...
++
++URL_QUERY_ARGUMENT_PARSERS: MappingProxyType[str, Callable[[str], Any]]
++
++class ConnectKwargs(TypedDict):
++    username: str
++    password: str
++    connection_class: type[AbstractConnection]
++    host: str
++    port: int
++    db: int
++    path: str
++
++def parse_url(url: str) -> ConnectKwargs: ...
++
++_ConnectionT = TypeVar("_ConnectionT", bound=AbstractConnection)
++
++class ConnectionPool(Generic[_ConnectionT]):
++    # kwargs accepts all arguments from the connection class chosen for
++    # the given URL, except those encoded in the URL itself.
++    @classmethod
++    def from_url(cls, url: str, **kwargs: Any) -> Self: ...
++
++    connection_class: type[_ConnectionT]
++    connection_kwargs: Mapping[str, Any]
++    max_connections: int
++    encoder_class: type[Encoder]
++    pid: int
++
++    @overload
++    def __init__(
++        self: ConnectionPool[_ConnectionT],  # pyright: ignore[reportInvalidTypeVarUse]  #11780
++        connection_class: type[_ConnectionT],
++        max_connections: int | None = None,
++        # **kwargs are passed to the constructed connection instances.
++        **connection_kwargs: Any,
++    ) -> None: ...
++    @overload
++    def __init__(self: ConnectionPool[Connection], *, max_connections: int | None = None, **connection_kwargs) -> None: ...
++    def reset(self) -> None: ...
++    async def get_connection(self, command_name: Unused, *keys: Unused, **options: Unused) -> _ConnectionT: ...
++    def get_encoder(self) -> Encoder: ...
++    def make_connection(self) -> _ConnectionT: ...
++    async def release(self, connection: AbstractConnection) -> None: ...
++    def owns_connection(self, connection: AbstractConnection) -> bool: ...
++    async def disconnect(self, inuse_connections: bool = True) -> None: ...
++    def set_retry(self, retry: Retry) -> None: ...
++
++class BlockingConnectionPool(ConnectionPool[_ConnectionT]):
++    queue_class: type[asyncio.Queue[_ConnectionT | None]]
++    timeout: int | None
++    pool: asyncio.Queue[_ConnectionT | None]
++
++    @overload
++    def __init__(
++        self: BlockingConnectionPool[_ConnectionT],  # pyright: ignore[reportInvalidTypeVarUse]  #11780
++        max_connections: int,
++        timeout: int | None,
++        connection_class: type[_ConnectionT],
++        queue_class: type[asyncio.Queue[_ConnectionT | None]] = ...,
++        # **kwargs are passed to the constructed connection instances.
++        **connection_kwargs: Any,
++    ) -> None: ...
++    @overload
++    def __init__(
++        self: BlockingConnectionPool[_ConnectionT],  # pyright: ignore[reportInvalidTypeVarUse]  #11780
++        max_connections: int = 50,
++        timeout: int | None = 20,
++        *,
++        connection_class: type[_ConnectionT],
++        queue_class: type[asyncio.Queue[_ConnectionT | None]] = ...,
++        # **kwargs are passed to the constructed connection instances.
++        **connection_kwargs: Any,
++    ) -> None: ...
++    @overload
++    def __init__(
++        self: BlockingConnectionPool[Connection],
++        max_connections: int = 50,
++        timeout: int | None = 20,
++        *,
++        queue_class: type[asyncio.Queue[Connection | None]] = ...,
++        # **kwargs are passed to the constructed connection instances.
++        **connection_kwargs: Any,
++    ) -> None: ...
+diff --git a/valkey/asyncio/lock.pyi b/valkey/asyncio/lock.pyi
+new file mode 100644
+index 0000000..018591c
+--- /dev/null
++++ b/valkey/asyncio/lock.pyi
+@@ -0,0 +1,51 @@
++import threading
++from collections.abc import Awaitable
++from types import SimpleNamespace, TracebackType
++from typing import Any, ClassVar
++from typing_extensions import Self
++
++from valkey.asyncio import Valkey
++from valkey.commands.core import AsyncScript
++
++class Lock:
++    lua_release: ClassVar[AsyncScript | None]
++    lua_extend: ClassVar[AsyncScript | None]
++    lua_reacquire: ClassVar[AsyncScript | None]
++    LUA_RELEASE_SCRIPT: ClassVar[str]
++    LUA_EXTEND_SCRIPT: ClassVar[str]
++    LUA_REACQUIRE_SCRIPT: ClassVar[str]
++    valkey: Valkey[Any]
++    name: str | bytes | memoryview
++    timeout: float | None
++    sleep: float
++    blocking: bool
++    blocking_timeout: float | None
++    thread_local: bool
++    local: threading.local | SimpleNamespace
++    def __init__(
++        self,
++        valkey: Valkey[Any],
++        name: str | bytes | memoryview,
++        timeout: float | None = None,
++        sleep: float = 0.1,
++        blocking: bool = True,
++        blocking_timeout: float | None = None,
++        thread_local: bool = True,
++    ) -> None: ...
++    def register_scripts(self) -> None: ...
++    async def __aenter__(self) -> Self: ...
++    async def __aexit__(
++        self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
++    ) -> None: ...
++    async def acquire(
++        self, blocking: bool | None = None, blocking_timeout: float | None = None, token: str | bytes | None = None
++    ) -> bool: ...
++    async def do_acquire(self, token: str | bytes) -> bool: ...
++    async def locked(self) -> bool: ...
++    async def owned(self) -> bool: ...
++    def release(self) -> Awaitable[None]: ...
++    async def do_release(self, expected_token: bytes) -> None: ...
++    def extend(self, additional_time: float, replace_ttl: bool = False) -> Awaitable[bool]: ...
++    async def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ...
++    def reacquire(self) -> Awaitable[bool]: ...
++    async def do_reacquire(self) -> bool: ...
+diff --git a/valkey/asyncio/parser.pyi b/valkey/asyncio/parser.pyi
+new file mode 100644
+index 0000000..fe5139a
+--- /dev/null
++++ b/valkey/asyncio/parser.pyi
+@@ -0,0 +1,9 @@
++from _typeshed import Incomplete
++from typing import Any
++
++# TODO: define and use:
++# from valkey.asyncio.cluster import ClusterNode
++
++class CommandsParser:
++    async def initialize(self, node: Incomplete | None = None) -> None: ...  # TODO: ClusterNode
++    async def get_keys(self, *args: Any) -> tuple[str, ...] | None: ...
+diff --git a/valkey/asyncio/retry.pyi b/valkey/asyncio/retry.pyi
+new file mode 100644
+index 0000000..0970df7
+--- /dev/null
++++ b/valkey/asyncio/retry.pyi
+@@ -0,0 +1,12 @@
++from collections.abc import Awaitable, Callable, Iterable
++from typing import TypeVar
++
++from valkey.backoff import AbstractBackoff
++from valkey.exceptions import ValkeyError
++
++_T = TypeVar("_T")
++
++class Retry:
++    def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[ValkeyError], ...] = ...) -> None: ...
++    def update_supported_errors(self, specified_errors: Iterable[type[ValkeyError]]) -> None: ...
++    async def call_with_retry(self, do: Callable[[], Awaitable[_T]], fail: Callable[[ValkeyError], Awaitable[object]]) -> _T: ...
+diff --git a/valkey/asyncio/sentinel.pyi b/valkey/asyncio/sentinel.pyi
+new file mode 100644
+index 0000000..1fa9e5f
+--- /dev/null
++++ b/valkey/asyncio/sentinel.pyi
+@@ -0,0 +1,162 @@
++from collections.abc import AsyncIterator, Iterable, Mapping
++from typing import Any, Literal, TypedDict, TypeVar, overload
++
++from valkey.asyncio.client import Valkey
++from valkey.asyncio.connection import (
++    BaseParser,
++    ConnectCallbackT,
++    Connection,
++    ConnectionPool,
++    Encoder,
++    SSLConnection,
++    _ConnectionT,
++    _Sentinel,
++)
++from valkey.asyncio.retry import Retry
++from valkey.commands import AsyncSentinelCommands
++from valkey.credentials import CredentialProvider
++from valkey.exceptions import ConnectionError, ValkeyError
++
++_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any])
++
++class MasterNotFoundError(ConnectionError): ...
++class SlaveNotFoundError(ConnectionError): ...
++
++class SentinelManagedConnection(Connection):
++    connection_pool: ConnectionPool[Any] | None
++    def __init__(
++        self,
++        *,
++        connection_pool: ConnectionPool[Any] | None,
++        # **kwargs forwarded to Connection.
++        host: str = "localhost",
++        port: str | int = 6379,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        socket_type: int = 0,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        encoder_class: type[Encoder] = ...,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    async def connect_to(self, address: tuple[str, int]) -> None: ...
++    async def connect(self) -> None: ...
++
++class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ...
++
++class SentinelConnectionPool(ConnectionPool[_ConnectionT]):
++    is_master: bool
++    check_connection: bool
++    service_name: str
++    sentinel_manager: Sentinel
++    master_address: tuple[str, int] | None
++    slave_rr_counter: int | None
++
++    def __init__(
++        self,
++        service_name: str,
++        sentinel_manager: Sentinel,
++        *,
++        ssl: bool = False,
++        connection_class: type[SentinelManagedConnection] = ...,
++        is_master: bool = True,
++        check_connection: bool = False,
++        # **kwargs ultimately forwarded to construction Connection instances.
++        host: str = "localhost",
++        port: str | int = 6379,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
++        socket_type: int = 0,
++        db: str | int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: ConnectCallbackT | None = None,
++        encoder_class: type[Encoder] = ...,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    async def get_master_address(self) -> tuple[str, int]: ...
++    async def rotate_slaves(self) -> AsyncIterator[tuple[str, int]]: ...
++
++_State = TypedDict(
++    "_State", {"ip": str, "port": int, "is_master": bool, "is_sdown": bool, "is_odown": bool, "num-other-sentinels": int}
++)
++
++class Sentinel(AsyncSentinelCommands):
++    sentinel_kwargs: Mapping[str, Any]
++    sentinels: list[Valkey[Any]]
++    min_other_sentinels: int
++    connection_kwargs: Mapping[str, Any]
++    def __init__(
++        self,
++        sentinels: Iterable[tuple[str, int]],
++        min_other_sentinels: int = 0,
++        sentinel_kwargs: Mapping[str, Any] | None = None,
++        **connection_kwargs: Any,
++    ) -> None: ...
++    async def execute_command(self, *args: Any, once: bool = False, **kwargs: Any) -> Literal[True]: ...
++    def check_master_state(self, state: _State, service_name: str) -> bool: ...
++    async def discover_master(self, service_name: str) -> tuple[str, int]: ...
++    def filter_slaves(self, slaves: Iterable[_State]) -> list[tuple[str, int]]: ...
++    async def discover_slaves(self, service_name: str) -> list[tuple[str, int]]: ...
++    @overload
++    def master_for(
++        self,
++        service_name: str,
++        valkey_class: type[_ValkeyT],
++        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
++        # Forwarded to the connection pool constructor.
++        **kwargs: Any,
++    ) -> _ValkeyT: ...
++    @overload
++    def master_for(
++        self,
++        service_name: str,
++        *,
++        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
++        # Forwarded to the connection pool constructor.
++        **kwargs: Any,
++    ) -> Valkey[Any]: ...
++    @overload
++    def slave_for(
++        self,
++        service_name: str,
++        valkey_class: type[_ValkeyT],
++        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
++        # Forwarded to the connection pool constructor.
++        **kwargs: Any,
++    ) -> _ValkeyT: ...
++    @overload
++    def slave_for(
++        self,
++        service_name: str,
++        *,
++        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
++        # Forwarded to the connection pool constructor.
++        **kwargs: Any,
++    ) -> Valkey[Any]: ...
+diff --git a/valkey/asyncio/utils.pyi b/valkey/asyncio/utils.pyi
+new file mode 100644
+index 0000000..cd3b14d
+--- /dev/null
++++ b/valkey/asyncio/utils.pyi
+@@ -0,0 +1,15 @@
++from types import TracebackType
++from typing import Any, Generic
++
++from valkey.asyncio.client import Pipeline, Valkey
++from valkey.client import _StrType
++
++def from_url(url: str, **kwargs) -> Valkey[Any]: ...
++
++class pipeline(Generic[_StrType]):
++    p: Pipeline[_StrType]
++    def __init__(self, valkey_obj: Valkey[_StrType]) -> None: ...
++    async def __aenter__(self) -> Pipeline[_StrType]: ...
++    async def __aexit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
+diff --git a/valkey/backoff.pyi b/valkey/backoff.pyi
+new file mode 100644
+index 0000000..40230a1
+--- /dev/null
++++ b/valkey/backoff.pyi
+@@ -0,0 +1,31 @@
++from abc import ABC, abstractmethod
++
++class AbstractBackoff(ABC):
++    def reset(self) -> None: ...
++    @abstractmethod
++    def compute(self, failures: int) -> float: ...
++
++class ConstantBackoff(AbstractBackoff):
++    def __init__(self, backoff: int) -> None: ...
++    def compute(self, failures: int) -> float: ...
++
++class NoBackoff(ConstantBackoff):
++    def __init__(self) -> None: ...
++
++class ExponentialBackoff(AbstractBackoff):
++    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
++    def compute(self, failures: int) -> float: ...
++
++class FullJitterBackoff(AbstractBackoff):
++    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
++    def compute(self, failures: int) -> float: ...
++
++class EqualJitterBackoff(AbstractBackoff):
++    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
++    def compute(self, failures: int) -> float: ...
++
++class DecorrelatedJitterBackoff(AbstractBackoff):
++    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
++    def compute(self, failures: int) -> float: ...
++
++def default_backoff() -> EqualJitterBackoff: ...
+diff --git a/valkey/client.pyi b/valkey/client.pyi
+new file mode 100644
+index 0000000..d55b234
+--- /dev/null
++++ b/valkey/client.pyi
+@@ -0,0 +1,806 @@
++import threading
++from _typeshed import Incomplete, SupportsItems, Unused
++from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
++from datetime import datetime, timedelta
++from re import Pattern
++from types import TracebackType
++from typing import Any, ClassVar, Literal, TypeVar, overload
++from typing_extensions import Self, TypeAlias
++
++from valkey import ValkeyError
++
++from .commands import CoreCommands, ValkeyModuleCommands, SentinelCommands
++from .connection import ConnectionPool, _ConnectFunc, _ConnectionPoolOptions
++from .credentials import CredentialProvider
++from .lock import Lock
++from .retry import Retry
++from .typing import ChannelT, EncodableT, KeyT, PatternT
++
++_Value: TypeAlias = bytes | float | int | str
++_Key: TypeAlias = str | bytes
++
++# Lib returns str or bytes depending on value of decode_responses
++_StrType = TypeVar("_StrType", bound=str | bytes)
++
++_VT = TypeVar("_VT")
++_T = TypeVar("_T")
++
++# Keyword arguments that are passed to Valkey.parse_response().
++_ParseResponseOptions: TypeAlias = Any
++# Keyword arguments that are passed to Valkey.execute_command().
++_CommandOptions: TypeAlias = _ConnectionPoolOptions | _ParseResponseOptions
++
++SYM_EMPTY: bytes
++EMPTY_RESPONSE: str
++NEVER_DECODE: str
++
++class CaseInsensitiveDict(dict[_StrType, _VT]):
++    def __init__(self, data: SupportsItems[_StrType, _VT]) -> None: ...
++    def update(self, data: SupportsItems[_StrType, _VT]) -> None: ...  # type: ignore[override]
++    @overload
++    def get(self, k: _StrType, default: None = None) -> _VT | None: ...
++    @overload
++    def get(self, k: _StrType, default: _VT | _T) -> _VT | _T: ...
++    # Overrides many other methods too, but without changing signature
++
++def list_or_args(keys, args): ...
++def timestamp_to_datetime(response): ...
++def string_keys_to_dict(key_string, callback): ...
++def parse_debug_object(response): ...
++def parse_object(response, infotype): ...
++def parse_info(response): ...
++
++SENTINEL_STATE_TYPES: dict[str, type[int]]
++
++def parse_sentinel_state(item): ...
++def parse_sentinel_master(response): ...
++def parse_sentinel_masters(response): ...
++def parse_sentinel_slaves_and_sentinels(response): ...
++def parse_sentinel_get_master(response): ...
++def pairs_to_dict(response, decode_keys: bool = False, decode_string_values: bool = False): ...
++def pairs_to_dict_typed(response, type_info): ...
++def zset_score_pairs(response, **options): ...
++def sort_return_tuples(response, **options): ...
++def int_or_none(response): ...
++def float_or_none(response): ...
++def bool_ok(response): ...
++def parse_client_list(response, **options): ...
++def parse_config_get(response, **options): ...
++def parse_scan(response, **options): ...
++def parse_hscan(response, **options): ...
++def parse_zscan(response, **options): ...
++def parse_slowlog_get(response, **options): ...
++
++_LockType = TypeVar("_LockType")
++
++class AbstractValkey:
++    RESPONSE_CALLBACKS: dict[str, Any]
++
++class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], SentinelCommands):
++    @overload
++    @classmethod
++    def from_url(
++        cls,
++        url: str,
++        *,
++        host: str | None = ...,
++        port: int | None = ...,
++        db: int | None = ...,
++        password: str | None = ...,
++        socket_timeout: float | None = ...,
++        socket_connect_timeout: float | None = ...,
++        socket_keepalive: bool | None = ...,
++        socket_keepalive_options: Mapping[str, int | str] | None = ...,
++        connection_pool: ConnectionPool | None = ...,
++        unix_socket_path: str | None = ...,
++        encoding: str = ...,
++        encoding_errors: str = ...,
++        charset: str | None = ...,
++        errors: str | None = ...,
++        decode_responses: Literal[True],
++        retry_on_timeout: bool = ...,
++        retry_on_error: list[type[ValkeyError]] | None = ...,
++        ssl: bool = ...,
++        ssl_keyfile: str | None = ...,
++        ssl_certfile: str | None = ...,
++        ssl_cert_reqs: str | int | None = ...,
++        ssl_ca_certs: str | None = ...,
++        ssl_check_hostname: bool = ...,
++        max_connections: int | None = ...,
++        single_connection_client: bool = ...,
++        health_check_interval: float = ...,
++        client_name: str | None = ...,
++        username: str | None = ...,
++        retry: Retry | None = ...,
++    ) -> Valkey[str]: ...
++    @overload
++    @classmethod
++    def from_url(
++        cls,
++        url: str,
++        *,
++        host: str | None = ...,
++        port: int | None = ...,
++        db: int | None = ...,
++        password: str | None = ...,
++        socket_timeout: float | None = ...,
++        socket_connect_timeout: float | None = ...,
++        socket_keepalive: bool | None = ...,
++        socket_keepalive_options: Mapping[str, int | str] | None = ...,
++        connection_pool: ConnectionPool | None = ...,
++        unix_socket_path: str | None = ...,
++        encoding: str = ...,
++        encoding_errors: str = ...,
++        charset: str | None = ...,
++        errors: str | None = ...,
++        decode_responses: Literal[False] = False,
++        retry_on_timeout: bool = ...,
++        retry_on_error: list[type[ValkeyError]] | None = ...,
++        ssl: bool = ...,
++        ssl_keyfile: str | None = ...,
++        ssl_certfile: str | None = ...,
++        ssl_cert_reqs: str | int | None = ...,
++        ssl_ca_certs: str | None = ...,
++        ssl_check_hostname: bool = ...,
++        max_connections: int | None = ...,
++        single_connection_client: bool = ...,
++        health_check_interval: float = ...,
++        client_name: str | None = ...,
++        username: str | None = ...,
++        retry: Retry | None = ...,
++    ) -> Valkey[bytes]: ...
++    connection_pool: Any
++    connection: Any
++    response_callbacks: Any
++    @overload
++    def __init__(
++        self: Valkey[str],
++        host: str,
++        port: int,
++        db: int,
++        password: str | None,
++        socket_timeout: float | None,
++        socket_connect_timeout: float | None,
++        socket_keepalive: bool | None,
++        socket_keepalive_options: Mapping[str, int | str] | None,
++        connection_pool: ConnectionPool | None,
++        unix_socket_path: str | None,
++        encoding: str,
++        encoding_errors: str,
++        charset: str | None,
++        errors: str | None,
++        decode_responses: Literal[True],
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | None = None,
++        ssl: bool = False,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: str | int | None = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_path: Incomplete | None = None,
++        ssl_ca_data: Incomplete | None = None,
++        ssl_check_hostname: bool = False,
++        ssl_password: Incomplete | None = None,
++        ssl_validate_ocsp: bool = False,
++        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
++        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
++        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
++        max_connections: int | None = None,
++        single_connection_client: bool = False,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        lib_name: str | None = None,
++        lib_version: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: _ConnectFunc | None = None,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    @overload
++    def __init__(
++        self: Valkey[str],
++        host: str = "localhost",
++        port: int = 6379,
++        db: int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool | None = None,
++        socket_keepalive_options: Mapping[str, int | str] | None = None,
++        connection_pool: ConnectionPool | None = None,
++        unix_socket_path: str | None = None,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        charset: str | None = None,
++        errors: str | None = None,
++        *,
++        decode_responses: Literal[True],
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | None = None,
++        ssl: bool = False,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: str | int | None = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: Incomplete | None = None,
++        ssl_check_hostname: bool = False,
++        ssl_password: Incomplete | None = None,
++        ssl_validate_ocsp: bool = False,
++        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
++        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
++        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
++        max_connections: int | None = None,
++        single_connection_client: bool = False,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        lib_name: str | None = None,
++        lib_version: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: _ConnectFunc | None = None,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    @overload
++    def __init__(
++        self: Valkey[bytes],
++        host: str = "localhost",
++        port: int = 6379,
++        db: int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool | None = None,
++        socket_keepalive_options: Mapping[str, int | str] | None = None,
++        connection_pool: ConnectionPool | None = None,
++        unix_socket_path: str | None = None,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        charset: str | None = None,
++        errors: str | None = None,
++        decode_responses: Literal[False] = False,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[ValkeyError]] | None = None,
++        ssl: bool = False,
++        ssl_keyfile: str | None = None,
++        ssl_certfile: str | None = None,
++        ssl_cert_reqs: str | int | None = "required",
++        ssl_ca_certs: str | None = None,
++        ssl_ca_data: Incomplete | None = None,
++        ssl_check_hostname: bool = False,
++        ssl_password: Incomplete | None = None,
++        ssl_validate_ocsp: bool = False,
++        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
++        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
++        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
++        max_connections: int | None = None,
++        single_connection_client: bool = False,
++        health_check_interval: float = 0,
++        client_name: str | None = None,
++        lib_name: str | None = None,
++        lib_version: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: _ConnectFunc | None = None,
++        credential_provider: CredentialProvider | None = None,
++    ) -> None: ...
++    def get_encoder(self): ...
++    def get_connection_kwargs(self): ...
++    def set_response_callback(self, command, callback): ...
++    def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ...
++    def transaction(self, func, *watches, **kwargs): ...
++    @overload
++    def lock(
++        self,
++        name: _Key,
++        timeout: float | None = None,
++        sleep: float = 0.1,
++        blocking: bool = True,
++        blocking_timeout: float | None = None,
++        lock_class: None = None,
++        thread_local: bool = True,
++    ) -> Lock: ...
++    @overload
++    def lock(
++        self,
++        name: _Key,
++        timeout: float | None,
++        sleep: float,
++        blocking: bool,
++        blocking_timeout: float | None,
++        lock_class: type[_LockType],
++        thread_local: bool = True,
++    ) -> _LockType: ...
++    @overload
++    def lock(
++        self,
++        name: _Key,
++        timeout: float | None = None,
++        sleep: float = 0.1,
++        blocking: bool = True,
++        blocking_timeout: float | None = None,
++        *,
++        lock_class: type[_LockType],
++        thread_local: bool = True,
++    ) -> _LockType: ...
++    def pubsub(self, *, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ...
++    def execute_command(self, *args, **options: _CommandOptions): ...
++    def parse_response(self, connection, command_name, **options: _ParseResponseOptions): ...
++    def monitor(self) -> Monitor: ...
++    def __enter__(self) -> Valkey[_StrType]: ...
++    def __exit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __del__(self) -> None: ...
++    def close(self) -> None: ...
++    def client(self) -> Valkey[_StrType]: ...
++
++StrictValkey = Valkey
++
++class PubSub:
++    PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, str]]
++    UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, str]]
++    HEALTH_CHECK_MESSAGE: ClassVar[str]
++    connection_pool: Any
++    shard_hint: Any
++    ignore_subscribe_messages: Any
++    connection: Any
++    subscribed_event: threading.Event
++    encoder: Any
++    health_check_response_b: bytes
++    health_check_response: list[str] | list[bytes]
++    def __init__(
++        self,
++        connection_pool,
++        shard_hint: Incomplete | None = None,
++        ignore_subscribe_messages: bool = False,
++        encoder: Incomplete | None = None,
++    ) -> None: ...
++    def __enter__(self) -> Self: ...
++    def __exit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __del__(self): ...
++    channels: Any
++    patterns: Any
++    def reset(self): ...
++    def close(self) -> None: ...
++    def on_connect(self, connection): ...
++    @property
++    def subscribed(self): ...
++    def execute_command(self, *args): ...
++    def clean_health_check_responses(self) -> None: ...
++    def parse_response(self, block: bool = True, timeout: float = 0): ...
++    def is_health_check_response(self, response) -> bool: ...
++    def check_health(self) -> None: ...
++    def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ...
++    def punsubscribe(self, *args: _Key) -> None: ...
++    def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ...
++    def unsubscribe(self, *args: _Key) -> None: ...
++    def listen(self): ...
++    def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0) -> dict[str, Any] | None: ...
++    def handle_message(self, response, ignore_subscribe_messages: bool = False) -> dict[str, Any] | None: ...
++    def run_in_thread(self, sleep_time: float = 0, daemon: bool = False, exception_handler: Incomplete | None = None): ...
++    def ping(self, message: _Value | None = None) -> None: ...
++
++class PubSubWorkerThread(threading.Thread):
++    daemon: Any
++    pubsub: Any
++    sleep_time: Any
++    exception_handler: Any
++    def __init__(self, pubsub, sleep_time, daemon: bool = False, exception_handler: Incomplete | None = None) -> None: ...
++    def run(self) -> None: ...
++    def stop(self) -> None: ...
++
++class Pipeline(Valkey[_StrType]):
++    UNWATCH_COMMANDS: Any
++    connection_pool: Any
++    connection: Any
++    response_callbacks: Any
++    transaction: bool
++    shard_hint: Any
++    watching: bool
++
++    command_stack: Any
++    scripts: Any
++    explicit_transaction: Any
++    def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ...
++    def __enter__(self) -> Pipeline[_StrType]: ...
++    def __exit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __del__(self) -> None: ...
++    def __len__(self) -> int: ...
++    def __bool__(self) -> bool: ...
++    def discard(self) -> None: ...
++    def reset(self) -> None: ...
++    def multi(self) -> None: ...
++    def execute_command(self, *args, **options): ...
++    def immediate_execute_command(self, *args, **options): ...
++    def pipeline_execute_command(self, *args, **options): ...
++    def raise_first_error(self, commands, response): ...
++    def annotate_exception(self, exception, number, command): ...
++    def parse_response(self, connection, command_name, **options): ...
++    def load_scripts(self): ...
++    def execute(self, raise_on_error: bool = True) -> list[Any]: ...
++    def watch(self, *names: _Key) -> bool: ...
++    def unwatch(self) -> bool: ...
++    # in the Valkey implementation, the following methods are inherited from client.
++    def set_response_callback(self, command, callback): ...
++    def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ...
++    def acl_cat(self, category: str | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def acl_deluser(self, username: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def acl_genpass(self, bits: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def acl_getuser(self, username: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def acl_list(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def acl_load(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def acl_setuser(  # type: ignore[override]
++        self,
++        username: str,
++        enabled: bool = False,
++        nopass: bool = False,
++        passwords: Sequence[str] | None = None,
++        hashed_passwords: Sequence[str] | None = None,
++        categories: Sequence[str] | None = None,
++        commands: Sequence[str] | None = None,
++        keys: Sequence[str] | None = None,
++        channels: Iterable[ChannelT] | None = None,
++        selectors: Iterable[tuple[str, KeyT]] | None = None,
++        reset: bool = False,
++        reset_keys: bool = False,
++        reset_channels: bool = False,
++        reset_passwords: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> Pipeline[_StrType]: ...
++    def acl_users(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def acl_whoami(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def bgrewriteaof(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def bgsave(self, schedule: bool = True) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def client_id(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def client_kill(self, address: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def client_list(self, _type: str | None = None, client_id: list[str] = []) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def client_getname(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def client_setname(self, name: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def readwrite(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def readonly(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ...
++    def config_set(
++        self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions
++    ) -> Pipeline[_StrType]: ...
++    def config_resetstat(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def config_rewrite(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def dbsize(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def debug_object(self, key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def echo(self, value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def flushall(self, asynchronous: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def flushdb(self, asynchronous: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def lastsave(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def object(self, infotype, key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def ping(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def save(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ...
++    def sentinel_master(self, service_name) -> Pipeline[_StrType]: ...
++    def sentinel_masters(self) -> Pipeline[_StrType]: ...
++    def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ...
++    def sentinel_remove(self, name) -> Pipeline[_StrType]: ...
++    def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ...
++    def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ...
++    def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ...
++    def slaveof(self, host=None, port=None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def slowlog_get(self, num=None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def slowlog_len(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def slowlog_reset(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def time(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def append(self, key, value) -> Pipeline[_StrType]: ...
++    def bitcount(  # type: ignore[override]
++        self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None
++    ) -> Pipeline[_StrType]: ...
++    def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ...
++    def bitpos(self, key, bit, start=None, end=None, mode: str | None = None) -> Pipeline[_StrType]: ...
++    def decr(self, name, amount=1) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def delete(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def __delitem__(self, _Key) -> None: ...
++    def dump(self, name) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def exists(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def expire(  # type: ignore[override]
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Pipeline[_StrType]: ...
++    def expireat(
++        self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Pipeline[_StrType]: ...
++    def get(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def __getitem__(self, name) -> Pipeline[_StrType]: ...
++    def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def getrange(self, key, start, end) -> Pipeline[_StrType]: ...
++    def getset(self, name, value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def incr(self, name, amount=1) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def incrby(self, name, amount=1) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def incrbyfloat(self, name, amount=1.0) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def keys(self, pattern: _Key = "*") -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def persist(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def pexpire(  # type: ignore[override]
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Pipeline[_StrType]: ...
++    def pexpireat(  # type: ignore[override]
++        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Pipeline[_StrType]: ...
++    def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ...
++    def pttl(self, name) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def randomkey(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def rename(self, src, dst) -> Pipeline[_StrType]: ...
++    def renamenx(self, src, dst) -> Pipeline[_StrType]: ...
++    def restore(
++        self,
++        name,
++        ttl,
++        value,
++        replace: bool = False,
++        absttl: bool = False,
++        idletime: Incomplete | None = None,
++        frequency: Incomplete | None = None,
++    ) -> Pipeline[_StrType]: ...
++    def set(  # type: ignore[override]
++        self,
++        name: _Key,
++        value: _Value,
++        ex: None | int | timedelta = None,
++        px: None | int | timedelta = None,
++        nx: bool = False,
++        xx: bool = False,
++        keepttl: bool = False,
++        get: bool = False,
++        exat: Incomplete | None = None,
++        pxat: Incomplete | None = None,
++    ) -> Pipeline[_StrType]: ...
++    def __setitem__(self, name, value) -> None: ...
++    def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def setnx(self, name, value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def setrange(self, name, offset, value) -> Pipeline[_StrType]: ...
++    def strlen(self, name) -> Pipeline[_StrType]: ...
++    def substr(self, name, start, end=-1) -> Pipeline[_StrType]: ...
++    def ttl(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def type(self, name) -> Pipeline[_StrType]: ...
++    def unlink(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def blmove(  # type: ignore[override]
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        timeout: float,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> Pipeline[_StrType]: ...
++    def blpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def brpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def brpoplpush(self, src, dst, timeout=0) -> Pipeline[_StrType]: ...
++    def lindex(self, name: _Key, index: int | str) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def linsert(  # type: ignore[override]
++        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
++    ) -> Pipeline[_StrType]: ...
++    def llen(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def lmove(  # type: ignore[override]
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> Pipeline[_StrType]: ...
++    def lpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ...
++    def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def lpushx(self, name, value) -> Pipeline[_StrType]: ...
++    def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def rpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ...
++    def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ...
++    def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def rpushx(self, name, value) -> Pipeline[_StrType]: ...
++    def sort(  # type: ignore[override]
++        self,
++        name: _Key,
++        start: int | None = None,
++        num: int | None = None,
++        by: _Key | None = None,
++        get: _Key | Sequence[_Key] | None = None,
++        desc: bool = False,
++        alpha: bool = False,
++        store: _Key | None = None,
++        groups: bool = False,
++    ) -> Pipeline[_StrType]: ...
++    def scan(  # type: ignore[override]
++        self, cursor: int = 0, match: _Key | None = None, count: int | None = None, _type: str | None = None
++    ) -> Pipeline[_StrType]: ...
++    def scan_iter(self, match: _Key | None = None, count: int | None = None, _type: str | None = None) -> Iterator[Any]: ...  # type: ignore[override]
++    def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ...
++    def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hscan_iter(self, name, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Iterator[Any]: ...
++    def zscan_iter(
++        self, name: _Key, match: _Key | None = None, count: int | None = None, score_cast_func: Callable[[_StrType], Any] = ...
++    ) -> Iterator[Any]: ...
++    def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def scard(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def smembers(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def spop(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def srandmember(self, name: _Key, number: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ...
++    def xadd(
++        self,
++        name,
++        fields,
++        id="*",
++        maxlen=None,
++        approximate: bool = True,
++        nomkstream: bool = False,
++        minid: Incomplete | None = None,
++        limit: int | None = None,
++    ) -> Pipeline[_StrType]: ...
++    def xclaim(
++        self,
++        name,
++        groupname,
++        consumername,
++        min_idle_time,
++        message_ids,
++        idle=None,
++        time=None,
++        retrycount=None,
++        force=False,
++        justid=False,
++    ) -> Pipeline[_StrType]: ...
++    def xdel(self, name, *ids) -> Pipeline[_StrType]: ...
++    def xgroup_create(self, name, groupname, id="$", mkstream=False, entries_read: int | None = None) -> Pipeline[_StrType]: ...
++    def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ...
++    def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ...
++    def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Pipeline[_StrType]: ...
++    def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ...
++    def xinfo_groups(self, name) -> Pipeline[_StrType]: ...
++    def xinfo_stream(self, name, full: bool = False) -> Pipeline[_StrType]: ...
++    def xlen(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def xpending(self, name, groupname) -> Pipeline[_StrType]: ...
++    def xpending_range(
++        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
++    ) -> Pipeline[_StrType]: ...
++    def xrange(self, name, min="-", max="+", count=None) -> Pipeline[_StrType]: ...
++    def xread(self, streams, count=None, block=None) -> Pipeline[_StrType]: ...
++    def xreadgroup(self, groupname, consumername, streams, count=None, block=None, noack=False) -> Pipeline[_StrType]: ...
++    def xrevrange(self, name, max="+", min="-", count=None) -> Pipeline[_StrType]: ...
++    def xtrim(
++        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
++    ) -> Pipeline[_StrType]: ...
++    def zadd(  # type: ignore[override]
++        self,
++        name: _Key,
++        mapping: Mapping[_Key, _Value],
++        nx: bool = False,
++        xx: bool = False,
++        ch: bool = False,
++        incr: bool = False,
++        gt: Incomplete | None = False,
++        lt: Incomplete | None = False,
++    ) -> Pipeline[_StrType]: ...
++    def zcard(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zinterstore(  # type: ignore[override]
++        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
++    ) -> Pipeline[_StrType]: ...
++    def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zpopmax(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zpopmin(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zrange(  # type: ignore[override]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool = False,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> Pipeline[_StrType]: ...
++    def zrangebylex(  # type: ignore[override]
++        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None
++    ) -> Pipeline[_StrType]: ...
++    def zrangebyscore(  # type: ignore[override]
++        self,
++        name: _Key,
++        min: _Value,
++        max: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> Pipeline[_StrType]: ...
++    def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zrevrange(  # type: ignore[override]
++        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[_StrType], Any] = ...
++    ) -> Pipeline[_StrType]: ...
++    def zrevrangebyscore(  # type: ignore[override]
++        self,
++        name: _Key,
++        max: _Value,
++        min: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> Pipeline[_StrType]: ...
++    def zrevrangebylex(  # type: ignore[override]
++        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None
++    ) -> Pipeline[_StrType]: ...
++    def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def zunionstore(  # type: ignore[override]
++        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
++    ) -> Pipeline[_StrType]: ...
++    def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def pfcount(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hgetall(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hkeys(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hlen(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    @overload  # type: ignore[override]
++    def hset(
++        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
++    ) -> Pipeline[_StrType]: ...
++    @overload
++    def hset(
++        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
++    ) -> Pipeline[_StrType]: ...
++    @overload
++    def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Pipeline[_StrType]: ...
++    def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def hvals(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ...
++    def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ...
++    def script_exists(self, *args) -> Pipeline[_StrType]: ...
++    def script_flush(self, sync_type: Incomplete | None = None) -> Pipeline[_StrType]: ...
++    def script_kill(self) -> Pipeline[_StrType]: ...
++    def script_load(self, script) -> Pipeline[_StrType]: ...
++    def pubsub_channels(self, pattern: _Key = "*") -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def pubsub_numpat(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def monitor(self) -> Monitor: ...
++    def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ...  # type: ignore[override]
++    def client(self) -> Any: ...
++
++class Monitor:
++    command_re: Pattern[str]
++    monitor_re: Pattern[str]
++    def __init__(self, connection_pool) -> None: ...
++    def __enter__(self) -> Self: ...
++    def __exit__(self, *args: Unused) -> None: ...
++    def next_command(self) -> dict[str, Any]: ...
++    def listen(self) -> Iterable[dict[str, Any]]: ...
+diff --git a/valkey/cluster.pyi b/valkey/cluster.pyi
+new file mode 100644
+index 0000000..f6bb7b6
+--- /dev/null
++++ b/valkey/cluster.pyi
+@@ -0,0 +1,265 @@
++from _typeshed import Incomplete, Unused
++from collections.abc import Callable, Iterable, Sequence
++from threading import Lock
++from types import TracebackType
++from typing import Any, ClassVar, Literal, NoReturn, Protocol
++from typing_extensions import Self
++
++from valkey.client import CaseInsensitiveDict, PubSub, Valkey, _ParseResponseOptions
++from valkey.commands import CommandsParser, ValkeyClusterCommands
++from valkey.commands.core import _StrType
++from valkey.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable
++from valkey.exceptions import MovedError, ValkeyError
++from valkey.retry import Retry
++from valkey.typing import EncodableT
++
++def get_node_name(host: str, port: str | int) -> str: ...
++def get_connection(valkey_node: Valkey[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ...
++def parse_scan_result(command: Unused, res, **options): ...
++def parse_pubsub_numsub(command: Unused, res, **options: Unused): ...
++def parse_cluster_slots(resp, **options) -> dict[tuple[int, int], dict[str, Any]]: ...
++def parse_cluster_myshardid(resp: bytes, **options: Unused) -> str: ...
++
++PRIMARY: str
++REPLICA: str
++SLOT_ID: str
++VALKEY_ALLOWED_KEYS: tuple[str, ...]
++KWARGS_DISABLED_KEYS: tuple[str, ...]
++PIPELINE_BLOCKED_COMMANDS: tuple[str, ...]
++
++def cleanup_kwargs(**kwargs: Any) -> dict[str, Any]: ...
++
++# It uses `DefaultParser` in real life, but it is a dynamic base class.
++class ClusterParser(BaseParser): ...
++
++class AbstractValkeyCluster:
++    ValkeyClusterRequestTTL: ClassVar[int]
++    PRIMARIES: ClassVar[str]
++    REPLICAS: ClassVar[str]
++    ALL_NODES: ClassVar[str]
++    RANDOM: ClassVar[str]
++    DEFAULT_NODE: ClassVar[str]
++    NODE_FLAGS: ClassVar[set[str]]
++    COMMAND_FLAGS: ClassVar[dict[str, str]]
++    CLUSTER_COMMANDS_RESPONSE_CALLBACKS: ClassVar[dict[str, Any]]
++    RESULT_CALLBACKS: ClassVar[dict[str, Callable[[Incomplete, Incomplete], Incomplete]]]
++    ERRORS_ALLOW_RETRY: ClassVar[tuple[type[ValkeyError], ...]]
++
++class ValkeyCluster(AbstractValkeyCluster, ValkeyClusterCommands[_StrType]):
++    user_on_connect_func: Callable[[Connection], object] | None
++    encoder: Encoder
++    cluster_error_retry_attempts: int
++    command_flags: dict[str, str]
++    node_flags: set[str]
++    read_from_replicas: bool
++    reinitialize_counter: int
++    reinitialize_steps: int
++    nodes_manager: NodesManager
++    cluster_response_callbacks: CaseInsensitiveDict[str, Callable[..., Incomplete]]
++    result_callbacks: CaseInsensitiveDict[str, Callable[[Incomplete, Incomplete], Incomplete]]
++    commands_parser: CommandsParser
++    def __init__(  # TODO: make @overloads, either `url` or `host:port` can be passed
++        self,
++        host: str | None = None,
++        port: int | None = 6379,
++        startup_nodes: list[ClusterNode] | None = None,
++        cluster_error_retry_attempts: int = 3,
++        retry: Retry | None = None,
++        require_full_coverage: bool = False,
++        reinitialize_steps: int = 5,
++        read_from_replicas: bool = False,
++        dynamic_startup_nodes: bool = True,
++        url: str | None = None,
++        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
++        **kwargs,
++    ) -> None: ...
++    def __enter__(self) -> Self: ...
++    def __exit__(
++        self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
++    ) -> None: ...
++    def __del__(self) -> None: ...
++    def disconnect_connection_pools(self) -> None: ...
++    @classmethod
++    def from_url(cls, url: str, **kwargs) -> Self: ...
++    def on_connect(self, connection: Connection) -> None: ...
++    def get_valkey_connection(self, node: ClusterNode) -> Valkey[Any]: ...
++    def get_node(
++        self, host: str | None = None, port: str | int | None = None, node_name: str | None = None
++    ) -> ClusterNode | None: ...
++    def get_primaries(self) -> list[ClusterNode]: ...
++    def get_replicas(self) -> list[ClusterNode]: ...
++    def get_random_node(self) -> ClusterNode: ...
++    def get_nodes(self) -> list[ClusterNode]: ...
++    def get_node_from_key(self, key: _Encodable, replica: bool = False) -> ClusterNode | None: ...
++    def get_default_node(self) -> ClusterNode | None: ...
++    def set_default_node(self, node: ClusterNode | None) -> bool: ...
++    def monitor(self, target_node: Incomplete | None = None): ...
++    def pubsub(
++        self, node: Incomplete | None = None, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs
++    ): ...
++    def pipeline(self, transaction: Incomplete | None = None, shard_hint: Incomplete | None = None): ...
++    def lock(
++        self,
++        name: str,
++        timeout: float | None = None,
++        sleep: float = 0.1,
++        blocking: bool = True,
++        blocking_timeout: float | None = None,
++        lock_class: type[Incomplete] | None = None,
++        thread_local: bool = True,
++    ): ...
++    def keyslot(self, key: _Encodable) -> int: ...
++    def determine_slot(self, *args): ...
++    def get_encoder(self) -> Encoder: ...
++    def get_connection_kwargs(self) -> dict[str, Any]: ...
++    def execute_command(self, *args, **kwargs): ...
++    def close(self) -> None: ...
++
++class ClusterNode:
++    host: str
++    port: int
++    name: str
++    server_type: str | None
++    valkey_connection: Valkey[Incomplete] | None
++    def __init__(
++        self, host: str, port: int, server_type: str | None = None, valkey_connection: Valkey[Incomplete] | None = None
++    ) -> None: ...
++    def __eq__(self, obj: object) -> bool: ...
++    def __del__(self) -> None: ...
++
++class LoadBalancer:
++    primary_to_idx: dict[str, int]
++    start_index: int
++    def __init__(self, start_index: int = 0) -> None: ...
++    def get_server_index(self, primary: str, list_size: int) -> int: ...
++    def reset(self) -> None: ...
++
++class NodesManager:
++    nodes_cache: dict[str, ClusterNode]
++    slots_cache: dict[str, list[ClusterNode]]
++    startup_nodes: dict[str, ClusterNode]
++    default_node: ClusterNode | None
++    from_url: bool
++    connection_pool_class: type[ConnectionPool]
++    connection_kwargs: dict[str, Incomplete]  # TODO: could be a TypedDict
++    read_load_balancer: LoadBalancer
++    address_remap: Callable[[str, int], tuple[str, int]] | None
++    def __init__(
++        self,
++        startup_nodes: Iterable[ClusterNode],
++        from_url: bool = False,
++        require_full_coverage: bool = False,
++        lock: Lock | None = None,
++        dynamic_startup_nodes: bool = True,
++        connection_pool_class: type[ConnectionPool] = ...,
++        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
++        **kwargs,  # TODO: same type as connection_kwargs
++    ) -> None: ...
++    def get_node(
++        self, host: str | None = None, port: int | str | None = None, node_name: str | None = None
++    ) -> ClusterNode | None: ...
++    def update_moved_exception(self, exception: MovedError) -> None: ...
++    def get_node_from_slot(self, slot: str, read_from_replicas: bool = False, server_type: str | None = None) -> ClusterNode: ...
++    def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ...
++    def populate_startup_nodes(self, nodes: Iterable[ClusterNode]) -> None: ...
++    def check_slots_coverage(self, slots_cache: dict[str, list[ClusterNode]]) -> bool: ...
++    def create_valkey_connections(self, nodes: Iterable[ClusterNode]) -> None: ...
++    def create_valkey_node(self, host: str, port: int | str, **kwargs: Any) -> Valkey[Incomplete]: ...
++    def initialize(self) -> None: ...
++    def close(self) -> None: ...
++    def reset(self) -> None: ...
++    def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ...
++
++class ClusterPubSub(PubSub):
++    node: ClusterNode | None
++    cluster: ValkeyCluster[Any]
++    def __init__(
++        self,
++        valkey_cluster: ValkeyCluster[Any],
++        node: ClusterNode | None = None,
++        host: str | None = None,
++        port: int | None = None,
++        **kwargs,
++    ) -> None: ...
++    def set_pubsub_node(
++        self, cluster: ValkeyCluster[Any], node: ClusterNode | None = None, host: str | None = None, port: int | None = None
++    ) -> None: ...
++    def get_pubsub_node(self) -> ClusterNode | None: ...
++    def execute_command(self, *args, **kwargs) -> None: ...
++    def get_valkey_connection(self) -> Valkey[Any] | None: ...
++
++class ClusterPipeline(ValkeyCluster[_StrType]):
++    command_stack: list[Incomplete]
++    nodes_manager: Incomplete
++    refresh_table_asap: bool
++    result_callbacks: Incomplete
++    startup_nodes: Incomplete
++    read_from_replicas: bool
++    command_flags: Incomplete
++    cluster_response_callbacks: Incomplete
++    cluster_error_retry_attempts: int
++    reinitialize_counter: int
++    reinitialize_steps: int
++    encoder: Encoder
++    commands_parser: Incomplete
++    def __init__(
++        self,
++        nodes_manager,
++        commands_parser,
++        result_callbacks: Incomplete | None = None,
++        cluster_response_callbacks: Incomplete | None = None,
++        startup_nodes: Incomplete | None = None,
++        read_from_replicas: bool = False,
++        cluster_error_retry_attempts: int = 3,
++        reinitialize_steps: int = 5,
++        lock: Lock | None = None,
++        **kwargs,
++    ) -> None: ...
++    def __len__(self) -> int: ...
++    def __bool__(self) -> Literal[True]: ...
++    def execute_command(self, *args, **kwargs): ...
++    def pipeline_execute_command(self, *args, **options): ...
++    def raise_first_error(self, stack) -> None: ...
++    def annotate_exception(self, exception, number, command) -> None: ...
++    def execute(self, raise_on_error: bool = True): ...
++    scripts: set[Any]  # is only set in `reset()`
++    watching: bool  # is only set in `reset()`
++    explicit_transaction: bool  # is only set in `reset()`
++    def reset(self) -> None: ...
++    def send_cluster_commands(self, stack, raise_on_error: bool = True, allow_redirections: bool = True): ...
++    def eval(self) -> None: ...
++    def multi(self) -> None: ...
++    def immediate_execute_command(self, *args, **options) -> None: ...
++    def load_scripts(self) -> None: ...
++    def watch(self, *names) -> None: ...
++    def unwatch(self) -> None: ...
++    def script_load_for_pipeline(self, *args, **kwargs) -> None: ...
++    def delete(self, *names): ...
++
++def block_pipeline_command(name: str) -> Callable[..., NoReturn]: ...
++
++class PipelineCommand:
++    args: Sequence[EncodableT]
++    options: _ParseResponseOptions
++    position: int | None
++    result: Any | Exception | None
++    node: Incomplete | None
++    asking: bool
++    def __init__(
++        self, args: Sequence[EncodableT], options: _ParseResponseOptions | None = None, position: int | None = None
++    ) -> None: ...
++
++class _ParseResponseCallback(Protocol):
++    def __call__(self, connection: Connection, command: EncodableT, /, **kwargs) -> Any: ...
++
++class NodeCommands:
++    parse_response: _ParseResponseCallback
++    connection_pool: ConnectionPool
++    connection: Connection
++    commands: list[PipelineCommand]
++    def __init__(
++        self, parse_response: _ParseResponseCallback, connection_pool: ConnectionPool, connection: Connection
++    ) -> None: ...
++    def append(self, c: PipelineCommand) -> None: ...
++    def write(self) -> None: ...
++    def read(self) -> None: ...
+diff --git a/valkey/commands/__init__.pyi b/valkey/commands/__init__.pyi
+new file mode 100644
+index 0000000..1abccc4
+--- /dev/null
++++ b/valkey/commands/__init__.pyi
+@@ -0,0 +1,17 @@
++from .cluster import ValkeyClusterCommands as ValkeyClusterCommands
++from .core import AsyncCoreCommands as AsyncCoreCommands, CoreCommands as CoreCommands
++from .helpers import list_or_args as list_or_args
++from .parser import CommandsParser as CommandsParser
++from .valkeymodules import ValkeyModuleCommands as ValkeyModuleCommands
++from .sentinel import AsyncSentinelCommands as AsyncSentinelCommands, SentinelCommands as SentinelCommands
++
++__all__ = [
++    "ValkeyClusterCommands",
++    "CommandsParser",
++    "AsyncCoreCommands",
++    "CoreCommands",
++    "list_or_args",
++    "ValkeyModuleCommands",
++    "AsyncSentinelCommands",
++    "SentinelCommands",
++]
+diff --git a/valkey/commands/bf/__init__.pyi b/valkey/commands/bf/__init__.pyi
+new file mode 100644
+index 0000000..d5ef70e
+--- /dev/null
++++ b/valkey/commands/bf/__init__.pyi
+@@ -0,0 +1,58 @@
++from typing import Any
++
++from .commands import *
++from .info import BFInfo as BFInfo, CFInfo as CFInfo, CMSInfo as CMSInfo, TDigestInfo as TDigestInfo, TopKInfo as TopKInfo
++
++class AbstractBloom:
++    @staticmethod
++    def append_items(params, items) -> None: ...
++    @staticmethod
++    def append_error(params, error) -> None: ...
++    @staticmethod
++    def append_capacity(params, capacity) -> None: ...
++    @staticmethod
++    def append_expansion(params, expansion) -> None: ...
++    @staticmethod
++    def append_no_scale(params, noScale) -> None: ...
++    @staticmethod
++    def append_weights(params, weights) -> None: ...
++    @staticmethod
++    def append_no_create(params, noCreate) -> None: ...
++    @staticmethod
++    def append_items_and_increments(params, items, increments) -> None: ...
++    @staticmethod
++    def append_values_and_weights(params, items, weights) -> None: ...
++    @staticmethod
++    def append_max_iterations(params, max_iterations) -> None: ...
++    @staticmethod
++    def append_bucket_size(params, bucket_size) -> None: ...
++
++class CMSBloom(CMSCommands, AbstractBloom):
++    client: Any
++    commandmixin: Any
++    execute_command: Any
++    def __init__(self, client, **kwargs) -> None: ...
++
++class TOPKBloom(TOPKCommands, AbstractBloom):
++    client: Any
++    commandmixin: Any
++    execute_command: Any
++    def __init__(self, client, **kwargs) -> None: ...
++
++class CFBloom(CFCommands, AbstractBloom):
++    client: Any
++    commandmixin: Any
++    execute_command: Any
++    def __init__(self, client, **kwargs) -> None: ...
++
++class TDigestBloom(TDigestCommands, AbstractBloom):
++    client: Any
++    commandmixin: Any
++    execute_command: Any
++    def __init__(self, client, **kwargs) -> None: ...
++
++class BFBloom(BFCommands, AbstractBloom):
++    client: Any
++    commandmixin: Any
++    execute_command: Any
++    def __init__(self, client, **kwargs) -> None: ...
+diff --git a/valkey/commands/bf/commands.pyi b/valkey/commands/bf/commands.pyi
+new file mode 100644
+index 0000000..99a296f
+--- /dev/null
++++ b/valkey/commands/bf/commands.pyi
+@@ -0,0 +1,112 @@
++from _typeshed import Incomplete
++
++BF_RESERVE: str
++BF_ADD: str
++BF_MADD: str
++BF_INSERT: str
++BF_EXISTS: str
++BF_MEXISTS: str
++BF_SCANDUMP: str
++BF_LOADCHUNK: str
++BF_INFO: str
++CF_RESERVE: str
++CF_ADD: str
++CF_ADDNX: str
++CF_INSERT: str
++CF_INSERTNX: str
++CF_EXISTS: str
++CF_DEL: str
++CF_COUNT: str
++CF_SCANDUMP: str
++CF_LOADCHUNK: str
++CF_INFO: str
++CMS_INITBYDIM: str
++CMS_INITBYPROB: str
++CMS_INCRBY: str
++CMS_QUERY: str
++CMS_MERGE: str
++CMS_INFO: str
++TOPK_RESERVE: str
++TOPK_ADD: str
++TOPK_INCRBY: str
++TOPK_QUERY: str
++TOPK_COUNT: str
++TOPK_LIST: str
++TOPK_INFO: str
++TDIGEST_CREATE: str
++TDIGEST_RESET: str
++TDIGEST_ADD: str
++TDIGEST_MERGE: str
++TDIGEST_CDF: str
++TDIGEST_QUANTILE: str
++TDIGEST_MIN: str
++TDIGEST_MAX: str
++TDIGEST_INFO: str
++
++class BFCommands:
++    def create(self, key, errorRate, capacity, expansion: Incomplete | None = None, noScale: Incomplete | None = None): ...
++    def add(self, key, item): ...
++    def madd(self, key, *items): ...
++    def insert(
++        self,
++        key,
++        items,
++        capacity: Incomplete | None = None,
++        error: Incomplete | None = None,
++        noCreate: Incomplete | None = None,
++        expansion: Incomplete | None = None,
++        noScale: Incomplete | None = None,
++    ): ...
++    def exists(self, key, item): ...
++    def mexists(self, key, *items): ...
++    def scandump(self, key, iter): ...
++    def loadchunk(self, key, iter, data): ...
++    def info(self, key): ...
++
++class CFCommands:
++    def create(
++        self,
++        key,
++        capacity,
++        expansion: Incomplete | None = None,
++        bucket_size: Incomplete | None = None,
++        max_iterations: Incomplete | None = None,
++    ): ...
++    def add(self, key, item): ...
++    def addnx(self, key, item): ...
++    def insert(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
++    def insertnx(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
++    def exists(self, key, item): ...
++    def delete(self, key, item): ...
++    def count(self, key, item): ...
++    def scandump(self, key, iter): ...
++    def loadchunk(self, key, iter, data): ...
++    def info(self, key): ...
++
++class TOPKCommands:
++    def reserve(self, key, k, width, depth, decay): ...
++    def add(self, key, *items): ...
++    def incrby(self, key, items, increments): ...
++    def query(self, key, *items): ...
++    def count(self, key, *items): ...
++    def list(self, key, withcount: bool = False): ...
++    def info(self, key): ...
++
++class TDigestCommands:
++    def create(self, key, compression: int = 100): ...
++    def reset(self, key): ...
++    def add(self, key, values): ...
++    def merge(self, destination_key, num_keys, *keys, compression: int | None = None, override: bool = False): ...
++    def min(self, key): ...
++    def max(self, key): ...
++    def quantile(self, key, quantile, *quantiles): ...
++    def cdf(self, key, value, *values): ...
++    def info(self, key): ...
++
++class CMSCommands:
++    def initbydim(self, key, width, depth): ...
++    def initbyprob(self, key, error, probability): ...
++    def incrby(self, key, items, increments): ...
++    def query(self, key, *items): ...
++    def merge(self, destKey, numKeys, srcKeys, weights=[]): ...
++    def info(self, key): ...
+diff --git a/valkey/commands/bf/info.pyi b/valkey/commands/bf/info.pyi
+new file mode 100644
+index 0000000..54d1cf0
+--- /dev/null
++++ b/valkey/commands/bf/info.pyi
+@@ -0,0 +1,43 @@
++from typing import Any
++
++class BFInfo:
++    capacity: Any
++    size: Any
++    filterNum: Any
++    insertedNum: Any
++    expansionRate: Any
++    def __init__(self, args) -> None: ...
++
++class CFInfo:
++    size: Any
++    bucketNum: Any
++    filterNum: Any
++    insertedNum: Any
++    deletedNum: Any
++    bucketSize: Any
++    expansionRate: Any
++    maxIteration: Any
++    def __init__(self, args) -> None: ...
++
++class CMSInfo:
++    width: Any
++    depth: Any
++    count: Any
++    def __init__(self, args) -> None: ...
++
++class TopKInfo:
++    k: Any
++    width: Any
++    depth: Any
++    decay: Any
++    def __init__(self, args) -> None: ...
++
++class TDigestInfo:
++    compression: Any
++    capacity: Any
++    mergedNodes: Any
++    unmergedNodes: Any
++    mergedWeight: Any
++    unmergedWeight: Any
++    totalCompressions: Any
++    def __init__(self, args) -> None: ...
+diff --git a/valkey/commands/cluster.pyi b/valkey/commands/cluster.pyi
+new file mode 100644
+index 0000000..2654a73
+--- /dev/null
++++ b/valkey/commands/cluster.pyi
+@@ -0,0 +1,60 @@
++from _typeshed import Incomplete
++from typing import NoReturn
++
++from .core import ACLCommands, DataAccessCommands, ManagementCommands, PubSubCommands, _StrType
++
++class ClusterMultiKeyCommands:
++    def mget_nonatomic(self, keys, *args): ...
++    def mset_nonatomic(self, mapping): ...
++    def exists(self, *keys): ...
++    def delete(self, *keys): ...
++    def touch(self, *keys): ...
++    def unlink(self, *keys): ...
++
++class ClusterManagementCommands(ManagementCommands):
++    def slaveof(self, *args, **kwargs) -> None: ...
++    def replicaof(self, *args, **kwargs) -> None: ...
++    def swapdb(self, *args, **kwargs) -> None: ...
++
++class ClusterDataAccessCommands(DataAccessCommands[_StrType]):
++    def stralgo(
++        self,
++        algo,
++        value1,
++        value2,
++        specific_argument: str = "strings",
++        len: bool = False,
++        idx: bool = False,
++        minmatchlen: Incomplete | None = None,
++        withmatchlen: bool = False,
++        **kwargs,
++    ): ...
++
++class ValkeyClusterCommands(
++    ClusterMultiKeyCommands, ClusterManagementCommands, ACLCommands[_StrType], PubSubCommands, ClusterDataAccessCommands[_StrType]
++):
++    def cluster_addslots(self, target_node, *slots): ...
++    def cluster_countkeysinslot(self, slot_id): ...
++    def cluster_count_failure_report(self, node_id): ...
++    def cluster_delslots(self, *slots): ...
++    def cluster_failover(self, target_node, option: Incomplete | None = None): ...
++    def cluster_info(self, target_nodes: Incomplete | None = None): ...
++    def cluster_keyslot(self, key): ...
++    def cluster_meet(self, host, port, target_nodes: Incomplete | None = None): ...
++    def cluster_nodes(self): ...
++    def cluster_replicate(self, target_nodes, node_id): ...
++    def cluster_reset(self, soft: bool = True, target_nodes: Incomplete | None = None): ...
++    def cluster_save_config(self, target_nodes: Incomplete | None = None): ...
++    def cluster_get_keys_in_slot(self, slot, num_keys): ...
++    def cluster_set_config_epoch(self, epoch, target_nodes: Incomplete | None = None): ...
++    def cluster_setslot(self, target_node, node_id, slot_id, state): ...
++    def cluster_setslot_stable(self, slot_id): ...
++    def cluster_replicas(self, node_id, target_nodes: Incomplete | None = None): ...
++    def cluster_slots(self, target_nodes: Incomplete | None = None): ...
++    def cluster_myshardid(self, target_nodes: Incomplete | None = None): ...
++    def cluster_links(self, target_node): ...
++    def cluster_flushslots(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
++    def cluster_bumpepoch(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
++    read_from_replicas: bool
++    def readonly(self, target_nodes: Incomplete | None = None): ...
++    def readwrite(self, target_nodes: Incomplete | None = None): ...
+diff --git a/valkey/commands/core.pyi b/valkey/commands/core.pyi
+new file mode 100644
+index 0000000..d69b671
+--- /dev/null
++++ b/valkey/commands/core.pyi
+@@ -0,0 +1,1771 @@
++import builtins
++from _typeshed import Incomplete, SupportsItems
++from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator, Mapping, Sequence
++from datetime import datetime, timedelta
++from typing import Any, Generic, Literal, TypeVar, overload
++
++from ..asyncio.client import Valkey as AsyncValkey
++from ..client import _CommandOptions, _Key, _Value
++from ..typing import ChannelT, EncodableT, KeyT, PatternT, ScriptTextT, StreamIdT
++
++_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn")
++_StrType = TypeVar("_StrType", bound=str | bytes)
++
++class ACLCommands(Generic[_StrType]):
++    def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ...
++    def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ...
++    def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ...
++    def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ...
++    def acl_help(self, **kwargs: _CommandOptions): ...
++    def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ...
++    def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ...
++    def acl_log_reset(self, **kwargs: _CommandOptions): ...
++    def acl_load(self, **kwargs: _CommandOptions) -> bool: ...
++    def acl_save(self, **kwargs: _CommandOptions): ...
++    def acl_setuser(
++        self,
++        username: str,
++        enabled: bool = False,
++        nopass: bool = False,
++        passwords: Sequence[str] | None = None,
++        hashed_passwords: Sequence[str] | None = None,
++        categories: Sequence[str] | None = None,
++        commands: Sequence[str] | None = None,
++        keys: Sequence[str] | None = None,
++        channels: Iterable[ChannelT] | None = None,
++        selectors: Iterable[tuple[str, KeyT]] | None = None,
++        reset: bool = False,
++        reset_keys: bool = False,
++        reset_channels: bool = False,
++        reset_passwords: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> bool: ...
++    def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ...
++    def acl_whoami(self, **kwargs: _CommandOptions) -> str: ...
++
++class AsyncACLCommands(Generic[_StrType]):
++    async def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ...
++    async def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ...
++    async def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ...
++    async def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ...
++    async def acl_help(self, **kwargs: _CommandOptions): ...
++    async def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ...
++    async def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ...
++    async def acl_log_reset(self, **kwargs: _CommandOptions): ...
++    async def acl_load(self, **kwargs: _CommandOptions) -> bool: ...
++    async def acl_save(self, **kwargs: _CommandOptions): ...
++    async def acl_setuser(
++        self,
++        username: str,
++        enabled: bool = False,
++        nopass: bool = False,
++        passwords: Sequence[str] | None = None,
++        hashed_passwords: Sequence[str] | None = None,
++        categories: Sequence[str] | None = None,
++        commands: Sequence[str] | None = None,
++        keys: Sequence[str] | None = None,
++        channels: Iterable[ChannelT] | None = None,
++        selectors: Iterable[tuple[str, KeyT]] | None = None,
++        reset: bool = False,
++        reset_keys: bool = False,
++        reset_channels: bool = False,
++        reset_passwords: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> bool: ...
++    async def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ...
++    async def acl_whoami(self, **kwargs: _CommandOptions) -> str: ...
++
++class ManagementCommands:
++    def bgrewriteaof(self, **kwargs: _CommandOptions): ...
++    def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ...
++    def role(self): ...
++    def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ...
++    def client_kill_filter(
++        self,
++        _id: Incomplete | None = None,
++        _type: Incomplete | None = None,
++        addr: Incomplete | None = None,
++        skipme: Incomplete | None = None,
++        laddr: Incomplete | None = None,
++        user: Incomplete | None = None,
++        **kwargs: _CommandOptions,
++    ): ...
++    def client_info(self, **kwargs: _CommandOptions): ...
++    def client_list(
++        self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions
++    ) -> list[dict[str, str]]: ...
++    def client_getname(self, **kwargs: _CommandOptions) -> str | None: ...
++    def client_getredir(self, **kwargs: _CommandOptions): ...
++    def client_reply(self, reply, **kwargs: _CommandOptions): ...
++    def client_id(self, **kwargs: _CommandOptions) -> int: ...
++    def client_tracking_on(
++        self,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++    ): ...
++    def client_tracking_off(
++        self,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++    ): ...
++    def client_tracking(
++        self,
++        on: bool = True,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++        **kwargs: _CommandOptions,
++    ): ...
++    def client_trackinginfo(self, **kwargs: _CommandOptions): ...
++    def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ...
++    def client_setinfo(self, attr: str, value: str, **kwargs: _CommandOptions) -> bool: ...
++    def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ...
++    def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ...
++    def client_unpause(self, **kwargs: _CommandOptions): ...
++    def client_no_evict(self, mode: str): ...
++    def client_no_touch(self, mode: str): ...
++    def command(self, **kwargs: _CommandOptions): ...
++    def command_info(self, **kwargs: _CommandOptions): ...
++    def command_count(self, **kwargs: _CommandOptions): ...
++    def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ...
++    def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ...
++    def config_resetstat(self, **kwargs: _CommandOptions): ...
++    def config_rewrite(self, **kwargs: _CommandOptions): ...
++    def dbsize(self, **kwargs: _CommandOptions) -> int: ...
++    def debug_object(self, key, **kwargs: _CommandOptions): ...
++    def debug_segfault(self, **kwargs: _CommandOptions): ...
++    def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ...
++    def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
++    def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
++    def sync(self): ...
++    def psync(self, replicationid, offset): ...
++    def swapdb(self, first, second, **kwargs: _CommandOptions): ...
++    def select(self, index, **kwargs: _CommandOptions): ...
++    def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ...
++    def lastsave(self, **kwargs: _CommandOptions): ...
++    def latency_doctor(self): ...
++    def latency_graph(self): ...
++    def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ...
++    def reset(self) -> None: ...
++    def migrate(
++        self,
++        host,
++        port,
++        keys,
++        destination_db,
++        timeout,
++        copy: bool = False,
++        replace: bool = False,
++        auth: Incomplete | None = None,
++        **kwargs: _CommandOptions,
++    ): ...
++    def object(self, infotype, key, **kwargs: _CommandOptions): ...
++    def memory_doctor(self, **kwargs: _CommandOptions): ...
++    def memory_help(self, **kwargs: _CommandOptions): ...
++    def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ...
++    def memory_malloc_stats(self, **kwargs: _CommandOptions): ...
++    def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ...
++    def memory_purge(self, **kwargs: _CommandOptions): ...
++    def latency_histogram(self, *args): ...
++    def latency_history(self, event: str): ...
++    def latency_latest(self): ...
++    def latency_reset(self, *events: str) -> bool: ...
++    def ping(self, **kwargs: _CommandOptions) -> bool: ...
++    def quit(self, **kwargs: _CommandOptions): ...
++    def replicaof(self, *args, **kwargs: _CommandOptions): ...
++    def save(self, **kwargs: _CommandOptions) -> bool: ...
++    def shutdown(
++        self,
++        save: bool = False,
++        nosave: bool = False,
++        now: bool = False,
++        force: bool = False,
++        abort: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> None: ...
++    def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ...
++    def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ...
++    def slowlog_len(self, **kwargs: _CommandOptions): ...
++    def slowlog_reset(self, **kwargs: _CommandOptions): ...
++    def time(self, **kwargs: _CommandOptions): ...
++    def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ...
++
++class AsyncManagementCommands:
++    async def bgrewriteaof(self, **kwargs: _CommandOptions): ...
++    async def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ...
++    async def role(self): ...
++    async def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ...
++    async def client_kill_filter(
++        self,
++        _id: Incomplete | None = None,
++        _type: Incomplete | None = None,
++        addr: Incomplete | None = None,
++        skipme: Incomplete | None = None,
++        laddr: Incomplete | None = None,
++        user: Incomplete | None = None,
++        **kwargs: _CommandOptions,
++    ): ...
++    async def client_info(self, **kwargs: _CommandOptions): ...
++    async def client_list(
++        self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions
++    ) -> list[dict[str, str]]: ...
++    async def client_getname(self, **kwargs: _CommandOptions) -> str | None: ...
++    async def client_getredir(self, **kwargs: _CommandOptions): ...
++    async def client_reply(self, reply, **kwargs: _CommandOptions): ...
++    async def client_id(self, **kwargs: _CommandOptions) -> int: ...
++    async def client_tracking_on(
++        self,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++    ): ...
++    async def client_tracking_off(
++        self,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++    ): ...
++    async def client_tracking(
++        self,
++        on: bool = True,
++        clientid: Incomplete | None = None,
++        prefix=[],
++        bcast: bool = False,
++        optin: bool = False,
++        optout: bool = False,
++        noloop: bool = False,
++        **kwargs: _CommandOptions,
++    ): ...
++    async def client_trackinginfo(self, **kwargs: _CommandOptions): ...
++    async def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ...
++    async def client_setinfo(self, attr: str, value: str, **kwargs: _CommandOptions) -> bool: ...
++    async def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ...
++    async def client_no_evict(self, mode: str): ...
++    async def client_no_touch(self, mode: str): ...
++    async def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ...
++    async def client_unpause(self, **kwargs: _CommandOptions): ...
++    async def command(self, **kwargs: _CommandOptions): ...
++    async def command_info(self, **kwargs: _CommandOptions): ...
++    async def command_count(self, **kwargs: _CommandOptions): ...
++    async def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ...
++    async def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ...
++    async def config_resetstat(self, **kwargs: _CommandOptions): ...
++    async def config_rewrite(self, **kwargs: _CommandOptions): ...
++    async def dbsize(self, **kwargs: _CommandOptions) -> int: ...
++    async def debug_object(self, key, **kwargs: _CommandOptions): ...
++    async def debug_segfault(self, **kwargs: _CommandOptions): ...
++    async def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ...
++    async def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
++    async def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
++    async def sync(self): ...
++    async def psync(self, replicationid, offset): ...
++    async def swapdb(self, first, second, **kwargs: _CommandOptions): ...
++    async def select(self, index, **kwargs: _CommandOptions): ...
++    async def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ...
++    async def lastsave(self, **kwargs: _CommandOptions): ...
++    async def latency_doctor(self): ...
++    async def latency_graph(self): ...
++    async def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ...
++    async def reset(self) -> None: ...
++    async def migrate(
++        self,
++        host,
++        port,
++        keys,
++        destination_db,
++        timeout,
++        copy: bool = False,
++        replace: bool = False,
++        auth: Incomplete | None = None,
++        **kwargs: _CommandOptions,
++    ): ...
++    async def object(self, infotype, key, **kwargs: _CommandOptions): ...
++    async def memory_doctor(self, **kwargs: _CommandOptions): ...
++    async def memory_help(self, **kwargs: _CommandOptions): ...
++    async def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ...
++    async def memory_malloc_stats(self, **kwargs: _CommandOptions): ...
++    async def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ...
++    async def memory_purge(self, **kwargs: _CommandOptions): ...
++    async def latency_histogram(self, *args): ...
++    async def latency_history(self, event: str): ...
++    async def latency_latest(self): ...
++    async def latency_reset(self, *events: str) -> bool: ...
++    async def ping(self, **kwargs: _CommandOptions) -> bool: ...
++    async def quit(self, **kwargs: _CommandOptions): ...
++    async def replicaof(self, *args, **kwargs: _CommandOptions): ...
++    async def save(self, **kwargs: _CommandOptions) -> bool: ...
++    async def shutdown(
++        self,
++        save: bool = False,
++        nosave: bool = False,
++        now: bool = False,
++        force: bool = False,
++        abort: bool = False,
++        **kwargs: _CommandOptions,
++    ) -> None: ...
++    async def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ...
++    async def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ...
++    async def slowlog_len(self, **kwargs: _CommandOptions): ...
++    async def slowlog_reset(self, **kwargs: _CommandOptions): ...
++    async def time(self, **kwargs: _CommandOptions): ...
++    async def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ...
++
++class BasicKeyCommands(Generic[_StrType]):
++    def append(self, key, value): ...
++    def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ...
++    def bitfield(self, key, default_overflow: Incomplete | None = None): ...
++    def bitfield_ro(self, key, encoding: str, offset: int, items: list[tuple[str, int]] | None = None): ...
++    def bitop(self, operation, dest, *keys): ...
++    def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ...
++    def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ...
++    def decr(self, name, amount: int = 1) -> int: ...
++    def decrby(self, name, amount: int = 1) -> int: ...
++    def delete(self, *names: _Key) -> int: ...
++    def __delitem__(self, name: _Key) -> None: ...
++    def dump(self, name: _Key) -> _StrType | None: ...
++    def exists(self, *names: _Key) -> int: ...
++    __contains__ = exists
++    def expire(
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> bool: ...
++    def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ...
++    def get(self, name: _Key) -> _StrType | None: ...
++    def getdel(self, name: _Key) -> _StrType | None: ...
++    def getex(
++        self,
++        name,
++        ex: Incomplete | None = None,
++        px: Incomplete | None = None,
++        exat: Incomplete | None = None,
++        pxat: Incomplete | None = None,
++        persist: bool = False,
++    ): ...
++    def __getitem__(self, name: str): ...
++    def getbit(self, name: _Key, offset: int) -> int: ...
++    def getrange(self, key, start, end): ...
++    def getset(self, name, value) -> _StrType | None: ...
++    def incr(self, name: _Key, amount: int = 1) -> int: ...
++    def incrby(self, name: _Key, amount: int = 1) -> int: ...
++    def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ...
++    def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ...
++    def lmove(
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> _Value: ...
++    def blmove(
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        timeout: float,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> _Value | None: ...
++    def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
++    def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ...
++    def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ...
++    def move(self, name: _Key, db: int) -> bool: ...
++    def persist(self, name: _Key) -> bool: ...
++    def pexpire(
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Literal[1, 0]: ...
++    def pexpireat(
++        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Literal[1, 0]: ...
++    def psetex(self, name, time_ms, value): ...
++    def pttl(self, name: _Key) -> int: ...
++    def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ...
++    def randomkey(self, **kwargs: _CommandOptions): ...
++    def rename(self, src, dst): ...
++    def renamenx(self, src, dst): ...
++    def restore(
++        self,
++        name,
++        ttl,
++        value,
++        replace: bool = False,
++        absttl: bool = False,
++        idletime: Incomplete | None = None,
++        frequency: Incomplete | None = None,
++    ): ...
++    def set(
++        self,
++        name: _Key,
++        value: _Value,
++        ex: None | float | timedelta = None,
++        px: None | float | timedelta = None,
++        nx: bool = False,
++        xx: bool = False,
++        keepttl: bool = False,
++        get: bool = False,
++        exat: Incomplete | None = None,
++        pxat: Incomplete | None = None,
++    ) -> bool | None: ...
++    def __setitem__(self, name, value) -> None: ...
++    def setbit(self, name: _Key, offset: int, value: int) -> int: ...
++    def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ...
++    def setnx(self, name: _Key, value: _Value) -> bool: ...
++    def setrange(self, name, offset, value): ...
++    def stralgo(
++        self,
++        algo,
++        value1,
++        value2,
++        specific_argument: str = "strings",
++        len: bool = False,
++        idx: bool = False,
++        minmatchlen: Incomplete | None = None,
++        withmatchlen: bool = False,
++        **kwargs: _CommandOptions,
++    ): ...
++    def strlen(self, name): ...
++    def substr(self, name, start, end: int = -1): ...
++    def touch(self, *args): ...
++    def ttl(self, name: _Key) -> int: ...
++    def type(self, name): ...
++    def watch(self, *names): ...
++    def unwatch(self): ...
++    def unlink(self, *names: _Key) -> int: ...
++
++class AsyncBasicKeyCommands(Generic[_StrType]):
++    async def append(self, key, value): ...
++    async def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ...
++    def bitfield(self, key, default_overflow: Incomplete | None = None): ...
++    async def bitfield_ro(self, key, encoding: str, offset: int, items: list[tuple[str, int]] | None = None): ...
++    async def bitop(self, operation, dest, *keys): ...
++    async def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ...
++    async def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ...
++    async def decr(self, name, amount: int = 1) -> int: ...
++    async def decrby(self, name, amount: int = 1) -> int: ...
++    async def delete(self, *names: _Key) -> int: ...
++    async def dump(self, name: _Key) -> _StrType | None: ...
++    async def exists(self, *names: _Key) -> int: ...
++    async def expire(
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> bool: ...
++    async def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ...
++    async def get(self, name: _Key) -> _StrType | None: ...
++    async def getdel(self, name: _Key) -> _StrType | None: ...
++    async def getex(
++        self,
++        name,
++        ex: Incomplete | None = None,
++        px: Incomplete | None = None,
++        exat: Incomplete | None = None,
++        pxat: Incomplete | None = None,
++        persist: bool = False,
++    ): ...
++    async def getbit(self, name: _Key, offset: int) -> int: ...
++    async def getrange(self, key, start, end): ...
++    async def getset(self, name, value) -> _StrType | None: ...
++    async def incr(self, name: _Key, amount: int = 1) -> int: ...
++    async def incrby(self, name: _Key, amount: int = 1) -> int: ...
++    async def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ...
++    async def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ...
++    async def lmove(
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> _Value: ...
++    async def blmove(
++        self,
++        first_list: _Key,
++        second_list: _Key,
++        timeout: float,
++        src: Literal["LEFT", "RIGHT"] = "LEFT",
++        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
++    ) -> _Value | None: ...
++    async def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
++    async def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ...
++    async def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ...
++    async def move(self, name: _Key, db: int) -> bool: ...
++    async def persist(self, name: _Key) -> bool: ...
++    async def pexpire(
++        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Literal[1, 0]: ...
++    async def pexpireat(
++        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
++    ) -> Literal[1, 0]: ...
++    async def psetex(self, name, time_ms, value): ...
++    async def pttl(self, name: _Key) -> int: ...
++    async def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ...
++    async def randomkey(self, **kwargs: _CommandOptions): ...
++    async def rename(self, src, dst): ...
++    async def renamenx(self, src, dst): ...
++    async def restore(
++        self,
++        name,
++        ttl,
++        value,
++        replace: bool = False,
++        absttl: bool = False,
++        idletime: Incomplete | None = None,
++        frequency: Incomplete | None = None,
++    ): ...
++    async def set(
++        self,
++        name: _Key,
++        value: _Value,
++        ex: None | float | timedelta = None,
++        px: None | float | timedelta = None,
++        nx: bool = False,
++        xx: bool = False,
++        keepttl: bool = False,
++        get: bool = False,
++        exat: Incomplete | None = None,
++        pxat: Incomplete | None = None,
++    ) -> bool | None: ...
++    async def setbit(self, name: _Key, offset: int, value: int) -> int: ...
++    async def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ...
++    async def setnx(self, name: _Key, value: _Value) -> bool: ...
++    async def setrange(self, name, offset, value): ...
++    async def stralgo(
++        self,
++        algo,
++        value1,
++        value2,
++        specific_argument: str = "strings",
++        len: bool = False,
++        idx: bool = False,
++        minmatchlen: Incomplete | None = None,
++        withmatchlen: bool = False,
++        **kwargs: _CommandOptions,
++    ): ...
++    async def strlen(self, name): ...
++    async def substr(self, name, start, end: int = -1): ...
++    async def touch(self, *args): ...
++    async def ttl(self, name: _Key) -> int: ...
++    async def type(self, name): ...
++    async def watch(self, *names): ...
++    async def unwatch(self): ...
++    async def unlink(self, *names: _Key) -> int: ...
++    def __getitem__(self, name: str): ...
++    def __setitem__(self, name, value) -> None: ...
++    def __delitem__(self, name: _Key) -> None: ...
++    def __contains__(self, name: _Key) -> None: ...
++
++class ListCommands(Generic[_StrType]):
++    @overload
++    def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
++    @overload
++    def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
++    @overload
++    def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
++    @overload
++    def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
++    def brpoplpush(self, src, dst, timeout: int | None = 0): ...
++    def lindex(self, name: _Key, index: int | str) -> _StrType | None: ...
++    def linsert(
++        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
++    ) -> int: ...
++    def llen(self, name: _Key) -> int: ...
++    def lpop(self, name, count: int | None = None): ...
++    def lpush(self, name: _Value, *values: _Value) -> int: ...
++    def lpushx(self, name, value): ...
++    def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ...
++    def lrem(self, name: _Key, count: int, value: _Value) -> int: ...
++    def lset(self, name: _Key, index: int, value: _Value) -> bool: ...
++    def ltrim(self, name: _Key, start: int, end: int) -> bool: ...
++    def rpop(self, name, count: int | None = None): ...
++    def rpoplpush(self, src, dst): ...
++    def rpush(self, name: _Value, *values: _Value) -> int: ...
++    def rpushx(self, name, value): ...
++    def lpos(
++        self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None
++    ): ...
++    @overload
++    def sort(
++        self,
++        name: _Key,
++        start: int | None = None,
++        num: int | None = None,
++        by: _Key | None = None,
++        get: _Key | Sequence[_Key] | None = None,
++        desc: bool = False,
++        alpha: bool = False,
++        store: None = None,
++        groups: bool = False,
++    ) -> list[_StrType]: ...
++    @overload
++    def sort(
++        self,
++        name: _Key,
++        start: int | None = None,
++        num: int | None = None,
++        by: _Key | None = None,
++        get: _Key | Sequence[_Key] | None = None,
++        desc: bool = False,
++        alpha: bool = False,
++        *,
++        store: _Key,
++        groups: bool = False,
++    ) -> int: ...
++    @overload
++    def sort(
++        self,
++        name: _Key,
++        start: int | None,
++        num: int | None,
++        by: _Key | None,
++        get: _Key | Sequence[_Key] | None,
++        desc: bool,
++        alpha: bool,
++        store: _Key,
++        groups: bool = False,
++    ) -> int: ...
++
++class AsyncListCommands(Generic[_StrType]):
++    @overload
++    async def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
++    @overload
++    async def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
++    @overload
++    async def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
++    @overload
++    async def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
++    async def brpoplpush(self, src, dst, timeout: int | None = 0): ...
++    async def lindex(self, name: _Key, index: int | str) -> _StrType | None: ...
++    async def linsert(
++        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
++    ) -> int: ...
++    async def llen(self, name: _Key) -> int: ...
++    async def lpop(self, name, count: int | None = None): ...
++    async def lpush(self, name: _Value, *values: _Value) -> int: ...
++    async def lpushx(self, name, value): ...
++    async def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ...
++    async def lrem(self, name: _Key, count: int, value: _Value) -> int: ...
++    async def lset(self, name: _Key, index: int, value: _Value) -> bool: ...
++    async def ltrim(self, name: _Key, start: int, end: int) -> bool: ...
++    async def rpop(self, name, count: int | None = None): ...
++    async def rpoplpush(self, src, dst): ...
++    async def rpush(self, name: _Value, *values: _Value) -> int: ...
++    async def rpushx(self, name, value): ...
++    async def lpos(
++        self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None
++    ): ...
++    @overload
++    async def sort(
++        self,
++        name: _Key,
++        start: int | None = None,
++        num: int | None = None,
++        by: _Key | None = None,
++        get: _Key | Sequence[_Key] | None = None,
++        desc: bool = False,
++        alpha: bool = False,
++        store: None = None,
++        groups: bool = False,
++    ) -> list[_StrType]: ...
++    @overload
++    async def sort(
++        self,
++        name: _Key,
++        start: int | None = None,
++        num: int | None = None,
++        by: _Key | None = None,
++        get: _Key | Sequence[_Key] | None = None,
++        desc: bool = False,
++        alpha: bool = False,
++        *,
++        store: _Key,
++        groups: bool = False,
++    ) -> int: ...
++    @overload
++    async def sort(
++        self,
++        name: _Key,
++        start: int | None,
++        num: int | None,
++        by: _Key | None,
++        get: _Key | Sequence[_Key] | None,
++        desc: bool,
++        alpha: bool,
++        store: _Key,
++        groups: bool = False,
++    ) -> int: ...
++
++class ScanCommands(Generic[_StrType]):
++    def scan(
++        self,
++        cursor: int = 0,
++        match: _Key | None = None,
++        count: int | None = None,
++        _type: str | None = None,
++        **kwargs: _CommandOptions,
++    ) -> tuple[int, list[_StrType]]: ...
++    def scan_iter(
++        self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions
++    ) -> Iterator[_StrType]: ...
++    def sscan(
++        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
++    ) -> tuple[int, list[_StrType]]: ...
++    def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[_StrType]: ...
++    def hscan(
++        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None,
++        no_values: bool | None = None,
++    ) -> tuple[int, dict[_StrType, _StrType]]: ...
++    def hscan_iter(
++        self, name: _Key, match: _Key | None = None, count: int | None = None,
++        no_values: bool | None = None,
++    ) -> Iterator[tuple[_StrType, _StrType]]: ...
++    @overload
++    def zscan(
++        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
++    ) -> tuple[int, list[tuple[_StrType, float]]]: ...
++    @overload
++    def zscan(
++        self,
++        name: _Key,
++        cursor: int = 0,
++        match: _Key | None = None,
++        count: int | None = None,
++        *,
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
++    @overload
++    def zscan(
++        self,
++        name: _Key,
++        cursor: int,
++        match: _Key | None,
++        count: int | None,
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
++    @overload
++    def zscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[tuple[_StrType, float]]: ...
++    @overload
++    def zscan_iter(
++        self,
++        name: _Key,
++        match: _Key | None = None,
++        count: int | None = None,
++        *,
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    def zscan_iter(
++        self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn]
++    ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++
++class AsyncScanCommands(Generic[_StrType]):
++    async def scan(
++        self,
++        cursor: int = 0,
++        match: _Key | None = None,
++        count: int | None = None,
++        _type: str | None = None,
++        **kwargs: _CommandOptions,
++    ) -> tuple[int, list[_StrType]]: ...
++    def scan_iter(
++        self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions
++    ) -> AsyncIterator[_StrType]: ...
++    async def sscan(
++        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
++    ) -> tuple[int, list[_StrType]]: ...
++    def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> AsyncIterator[_StrType]: ...
++    async def hscan(
++        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None,
++        no_values: bool | None = None,
++    ) -> tuple[int, dict[_StrType, _StrType]]: ...
++    def hscan_iter(
++        self, name: _Key, match: _Key | None = None, count: int | None = None,
++        no_values: bool | None = None,
++    ) -> AsyncIterator[tuple[_StrType, _StrType]]: ...
++    @overload
++    async def zscan(
++        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
++    ) -> tuple[int, list[tuple[_StrType, float]]]: ...
++    @overload
++    async def zscan(
++        self,
++        name: _Key,
++        cursor: int = 0,
++        match: _Key | None = None,
++        count: int | None = None,
++        *,
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
++    @overload
++    async def zscan(
++        self,
++        name: _Key,
++        cursor: int,
++        match: _Key | None,
++        count: int | None,
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
++    @overload
++    def zscan_iter(
++        self, name: _Key, match: _Key | None = None, count: int | None = None
++    ) -> AsyncIterator[tuple[_StrType, float]]: ...
++    @overload
++    def zscan_iter(
++        self,
++        name: _Key,
++        match: _Key | None = None,
++        count: int | None = None,
++        *,
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    def zscan_iter(
++        self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn]
++    ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++
++class SetCommands(Generic[_StrType]):
++    def sadd(self, name: _Key, *values: _Value) -> int: ...
++    def scard(self, name: _Key) -> int: ...
++    def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
++    def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
++    def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
++    def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
++    def sismember(self, name: _Key, value: _Value) -> bool: ...
++    def smembers(self, name: _Key) -> builtins.list[_StrType]: ...
++    def smismember(self, name, values, *args): ...
++    def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ...
++    @overload
++    def spop(self, name: _Key, count: None = None) -> _Value | None: ...
++    @overload
++    def spop(self, name: _Key, count: int) -> list[_Value]: ...
++    @overload
++    def srandmember(self, name: _Key, number: None = None) -> _Value | None: ...
++    @overload
++    def srandmember(self, name: _Key, number: int) -> list[_Value]: ...
++    def srem(self, name: _Key, *values: _Value) -> int: ...
++    def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
++    def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
++
++class AsyncSetCommands(Generic[_StrType]):
++    async def sadd(self, name: _Key, *values: _Value) -> int: ...
++    async def scard(self, name: _Key) -> int: ...
++    async def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
++    async def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
++    async def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
++    async def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
++    async def sismember(self, name: _Key, value: _Value) -> bool: ...
++    async def smembers(self, name: _Key) -> builtins.list[_StrType]: ...
++    async def smismember(self, name, values, *args): ...
++    async def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ...
++    @overload
++    async def spop(self, name: _Key, count: None = None) -> _Value | None: ...
++    @overload
++    async def spop(self, name: _Key, count: int) -> list[_Value]: ...
++    @overload
++    async def srandmember(self, name: _Key, number: None = None) -> _Value | None: ...
++    @overload
++    async def srandmember(self, name: _Key, number: int) -> list[_Value]: ...
++    async def srem(self, name: _Key, *values: _Value) -> int: ...
++    async def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
++    async def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
++
++class StreamCommands:
++    def xack(self, name, groupname, *ids): ...
++    def xadd(
++        self,
++        name: KeyT,
++        # Only accepts dict objects, but for variance reasons we use a looser annotation
++        fields: SupportsItems[bytes | memoryview | str | float, Any],
++        id: str | int | bytes | memoryview = "*",
++        maxlen=None,
++        approximate: bool = True,
++        nomkstream: bool = False,
++        minid: Incomplete | None = None,
++        limit: Incomplete | None = None,
++    ): ...
++    def xautoclaim(
++        self,
++        name,
++        groupname,
++        consumername,
++        min_idle_time,
++        start_id: StreamIdT = "0-0",
++        count: Incomplete | None = None,
++        justid: bool = False,
++    ): ...
++    def xclaim(
++        self,
++        name,
++        groupname,
++        consumername,
++        min_idle_time,
++        message_ids,
++        idle=None,
++        time=None,
++        retrycount=None,
++        force=False,
++        justid=False,
++    ): ...
++    def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ...
++    def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None): ...
++    def xgroup_delconsumer(self, name, groupname, consumername): ...
++    def xgroup_destroy(self, name, groupname): ...
++    def xgroup_createconsumer(self, name, groupname, consumername): ...
++    def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ...
++    def xinfo_consumers(self, name, groupname): ...
++    def xinfo_groups(self, name): ...
++    def xinfo_stream(self, name, full: bool = False): ...
++    def xlen(self, name: _Key) -> int: ...
++    def xpending(self, name, groupname): ...
++    def xpending_range(
++        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
++    ): ...
++    def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ...
++    def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ...
++    def xreadgroup(
++        self,
++        groupname,
++        consumername,
++        streams,
++        count: Incomplete | None = None,
++        block: Incomplete | None = None,
++        noack: bool = False,
++    ): ...
++    def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ...
++    def xtrim(
++        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
++    ): ...
++
++class AsyncStreamCommands:
++    async def xack(self, name, groupname, *ids): ...
++    async def xadd(
++        self,
++        name: KeyT,
++        # Only accepts dict objects, but for variance reasons we use a looser annotation
++        fields: SupportsItems[bytes | memoryview | str | float, Any],
++        id: str | int | bytes | memoryview = "*",
++        maxlen=None,
++        approximate: bool = True,
++        nomkstream: bool = False,
++        minid: Incomplete | None = None,
++        limit: Incomplete | None = None,
++    ): ...
++    async def xautoclaim(
++        self,
++        name,
++        groupname,
++        consumername,
++        min_idle_time,
++        start_id: StreamIdT = "0-0",
++        count: Incomplete | None = None,
++        justid: bool = False,
++    ): ...
++    async def xclaim(
++        self,
++        name,
++        groupname,
++        consumername,
++        min_idle_time,
++        message_ids,
++        idle=None,
++        time=None,
++        retrycount=None,
++        force=False,
++        justid=False,
++    ): ...
++    async def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ...
++    async def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None): ...
++    async def xgroup_delconsumer(self, name, groupname, consumername): ...
++    async def xgroup_destroy(self, name, groupname): ...
++    async def xgroup_createconsumer(self, name, groupname, consumername): ...
++    async def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ...
++    async def xinfo_consumers(self, name, groupname): ...
++    async def xinfo_groups(self, name): ...
++    async def xinfo_stream(self, name, full: bool = False): ...
++    async def xlen(self, name: _Key) -> int: ...
++    async def xpending(self, name, groupname): ...
++    async def xpending_range(
++        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
++    ): ...
++    async def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ...
++    async def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ...
++    async def xreadgroup(
++        self,
++        groupname,
++        consumername,
++        streams,
++        count: Incomplete | None = None,
++        block: Incomplete | None = None,
++        noack: bool = False,
++    ): ...
++    async def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ...
++    async def xtrim(
++        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
++    ): ...
++
++class SortedSetCommands(Generic[_StrType]):
++    def zadd(
++        self,
++        name: _Key,
++        mapping: Mapping[_Key, _Value],
++        nx: bool = False,
++        xx: bool = False,
++        ch: bool = False,
++        incr: bool = False,
++        gt: Incomplete | None = False,
++        lt: Incomplete | None = False,
++    ) -> int: ...
++    def zcard(self, name: _Key) -> int: ...
++    def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    def zdiff(self, keys, withscores: bool = False): ...
++    def zdiffstore(self, dest, keys): ...
++    def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ...
++    def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
++    def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ...
++    def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
++    def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
++    def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ...
++    @overload
++    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
++    @overload
++    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
++    @overload
++    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
++    @overload
++    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
++    @overload
++    def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], float] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], float] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    def zrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool = False,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[_StrType]: ...
++    @overload
++    def zrevrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    def zrevrange(  # type: ignore[overload-overlap]
++        self, name: _Key, start: int, end: int, withscores: Literal[True]
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    def zrevrange(
++        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ...
++    ) -> list[_StrType]: ...
++    def zrangestore(
++        self,
++        dest,
++        name,
++        start,
++        end,
++        byscore: bool = False,
++        bylex: bool = False,
++        desc: bool = False,
++        offset: Incomplete | None = None,
++        num: Incomplete | None = None,
++    ): ...
++    def zrangebylex(
++        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None
++    ) -> list[_StrType]: ...
++    def zrevrangebylex(
++        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None
++    ) -> list[_StrType]: ...
++    @overload
++    def zrangebyscore(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        min: _Value,
++        max: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    def zrangebyscore(  # type: ignore[overload-overlap]
++        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    def zrangebyscore(
++        self,
++        name: _Key,
++        min: _Value,
++        max: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> list[_StrType]: ...
++    @overload
++    def zrevrangebyscore(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        max: _Value,
++        min: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    def zrevrangebyscore(  # type: ignore[overload-overlap]
++        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    def zrevrangebyscore(
++        self,
++        name: _Key,
++        max: _Value,
++        min: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> list[_StrType]: ...
++    def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
++    def zrem(self, name: _Key, *values: _Value) -> int: ...
++    def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ...
++    def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
++    def zscore(self, name: _Key, value: _Value) -> float | None: ...
++    def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
++    def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ...
++    def zmscore(self, key, members): ...
++
++class AsyncSortedSetCommands(Generic[_StrType]):
++    async def zadd(
++        self,
++        name: _Key,
++        mapping: Mapping[_Key, _Value],
++        nx: bool = False,
++        xx: bool = False,
++        ch: bool = False,
++        incr: bool = False,
++        gt: Incomplete | None = False,
++        lt: Incomplete | None = False,
++    ) -> int: ...
++    async def zcard(self, name: _Key) -> int: ...
++    async def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    async def zdiff(self, keys, withscores: bool = False): ...
++    async def zdiffstore(self, dest, keys): ...
++    async def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ...
++    async def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
++    async def zinterstore(
++        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
++    ) -> int: ...
++    async def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    async def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
++    async def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
++    async def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ...
++    @overload
++    async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
++    @overload
++    async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
++    @overload
++    async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
++    @overload
++    async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
++    @overload
++    async def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    async def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], float] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    async def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool = False,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    async def zrange(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool = False,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], float] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    async def zrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        desc: bool = False,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++        byscore: bool = False,
++        bylex: bool = False,
++        offset: int | None = None,
++        num: int | None = None,
++    ) -> list[_StrType]: ...
++    @overload
++    async def zrevrange(
++        self,
++        name: _Key,
++        start: int,
++        end: int,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    async def zrevrange(  # type: ignore[overload-overlap]
++        self, name: _Key, start: int, end: int, withscores: Literal[True]
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    async def zrevrange(
++        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ...
++    ) -> list[_StrType]: ...
++    async def zrangestore(
++        self,
++        dest,
++        name,
++        start,
++        end,
++        byscore: bool = False,
++        bylex: bool = False,
++        desc: bool = False,
++        offset: Incomplete | None = None,
++        num: Incomplete | None = None,
++    ): ...
++    async def zrangebylex(
++        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None
++    ) -> list[_StrType]: ...
++    async def zrevrangebylex(
++        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None
++    ) -> list[_StrType]: ...
++    @overload
++    async def zrangebyscore(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        min: _Value,
++        max: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    async def zrangebyscore(  # type: ignore[overload-overlap]
++        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    async def zrangebyscore(
++        self,
++        name: _Key,
++        min: _Value,
++        max: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> list[_StrType]: ...
++    @overload
++    async def zrevrangebyscore(  # type: ignore[overload-overlap]
++        self,
++        name: _Key,
++        max: _Value,
++        min: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        *,
++        withscores: Literal[True],
++        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
++    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
++    @overload
++    async def zrevrangebyscore(  # type: ignore[overload-overlap]
++        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
++    ) -> list[tuple[_StrType, float]]: ...
++    @overload
++    async def zrevrangebyscore(
++        self,
++        name: _Key,
++        max: _Value,
++        min: _Value,
++        start: int | None = None,
++        num: int | None = None,
++        withscores: bool = False,
++        score_cast_func: Callable[[_StrType], Any] = ...,
++    ) -> list[_StrType]: ...
++    async def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
++    async def zrem(self, name: _Key, *values: _Value) -> int: ...
++    async def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    async def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ...
++    async def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ...
++    async def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
++    async def zscore(self, name: _Key, value: _Value) -> float | None: ...
++    async def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
++    async def zunionstore(
++        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
++    ) -> int: ...
++    async def zmscore(self, key, members): ...
++
++class HyperlogCommands:
++    def pfadd(self, name: _Key, *values: _Value) -> int: ...
++    def pfcount(self, name: _Key) -> int: ...
++    def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ...
++
++class AsyncHyperlogCommands:
++    async def pfadd(self, name: _Key, *values: _Value) -> int: ...
++    async def pfcount(self, name: _Key) -> int: ...
++    async def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ...
++
++class HashCommands(Generic[_StrType]):
++    def hdel(self, name: _Key, *keys: _Key) -> int: ...
++    def hexists(self, name: _Key, key: _Key) -> bool: ...
++    def hget(self, name: _Key, key: _Key) -> _StrType | None: ...
++    def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ...
++    def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ...
++    def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ...
++    def hkeys(self, name: _Key) -> list[_StrType]: ...
++    def hlen(self, name: _Key) -> int: ...
++    @overload
++    def hset(
++        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
++    ) -> int: ...
++    @overload
++    def hset(
++        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
++    ) -> int: ...
++    @overload
++    def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ...
++    def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ...
++    def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ...
++    def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
++    def hvals(self, name: _Key) -> list[_StrType]: ...
++    def hstrlen(self, name, key): ...
++
++class AsyncHashCommands(Generic[_StrType]):
++    async def hdel(self, name: _Key, *keys: _Key) -> int: ...
++    async def hexists(self, name: _Key, key: _Key) -> bool: ...
++    async def hget(self, name: _Key, key: _Key) -> _StrType | None: ...
++    async def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ...
++    async def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ...
++    async def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ...
++    async def hkeys(self, name: _Key) -> list[_StrType]: ...
++    async def hlen(self, name: _Key) -> int: ...
++    @overload
++    async def hset(
++        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
++    ) -> int: ...
++    @overload
++    async def hset(
++        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
++    ) -> int: ...
++    @overload
++    async def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ...
++    async def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ...
++    async def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ...
++    async def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
++    async def hvals(self, name: _Key) -> list[_StrType]: ...
++    async def hstrlen(self, name, key): ...
++
++class AsyncScript:
++    def __init__(self, registered_client: AsyncValkey[Any], script: ScriptTextT) -> None: ...
++    async def __call__(
++        self, keys: Sequence[KeyT] | None = None, args: Iterable[EncodableT] | None = None, client: AsyncValkey[Any] | None = None
++    ): ...
++
++class PubSubCommands:
++    def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ...
++    def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ...
++    def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ...
++    def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ...
++
++class AsyncPubSubCommands:
++    async def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ...
++    async def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ...
++    async def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ...
++    async def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ...
++
++class ScriptCommands(Generic[_StrType]):
++    def eval(self, script, numkeys, *keys_and_args): ...
++    def evalsha(self, sha, numkeys, *keys_and_args): ...
++    def script_exists(self, *args): ...
++    def script_debug(self, *args): ...
++    def script_flush(self, sync_type: Incomplete | None = None): ...
++    def script_kill(self): ...
++    def script_load(self, script): ...
++    def register_script(self, script: str | _StrType) -> Script: ...
++
++class AsyncScriptCommands(Generic[_StrType]):
++    async def eval(self, script, numkeys, *keys_and_args): ...
++    async def evalsha(self, sha, numkeys, *keys_and_args): ...
++    async def script_exists(self, *args): ...
++    async def script_debug(self, *args): ...
++    async def script_flush(self, sync_type: Incomplete | None = None): ...
++    async def script_kill(self): ...
++    async def script_load(self, script): ...
++    def register_script(self, script: ScriptTextT) -> AsyncScript: ...
++
++class GeoCommands:
++    def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ...
++    def geodist(self, name, place1, place2, unit: Incomplete | None = None): ...
++    def geohash(self, name, *values): ...
++    def geopos(self, name, *values): ...
++    def georadius(
++        self,
++        name,
++        longitude,
++        latitude,
++        radius,
++        unit: Incomplete | None = None,
++        withdist: bool = False,
++        withcoord: bool = False,
++        withhash: bool = False,
++        count: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        store: Incomplete | None = None,
++        store_dist: Incomplete | None = None,
++        any: bool = False,
++    ): ...
++    def georadiusbymember(
++        self,
++        name,
++        member,
++        radius,
++        unit: Incomplete | None = None,
++        withdist: bool = False,
++        withcoord: bool = False,
++        withhash: bool = False,
++        count: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        store: Incomplete | None = None,
++        store_dist: Incomplete | None = None,
++        any: bool = False,
++    ): ...
++    def geosearch(
++        self,
++        name,
++        member: Incomplete | None = None,
++        longitude: Incomplete | None = None,
++        latitude: Incomplete | None = None,
++        unit: str = "m",
++        radius: Incomplete | None = None,
++        width: Incomplete | None = None,
++        height: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        count: Incomplete | None = None,
++        any: bool = False,
++        withcoord: bool = False,
++        withdist: bool = False,
++        withhash: bool = False,
++    ): ...
++    def geosearchstore(
++        self,
++        dest,
++        name,
++        member: Incomplete | None = None,
++        longitude: Incomplete | None = None,
++        latitude: Incomplete | None = None,
++        unit: str = "m",
++        radius: Incomplete | None = None,
++        width: Incomplete | None = None,
++        height: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        count: Incomplete | None = None,
++        any: bool = False,
++        storedist: bool = False,
++    ): ...
++
++class AsyncGeoCommands:
++    async def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ...
++    async def geodist(self, name, place1, place2, unit: Incomplete | None = None): ...
++    async def geohash(self, name, *values): ...
++    async def geopos(self, name, *values): ...
++    async def georadius(
++        self,
++        name,
++        longitude,
++        latitude,
++        radius,
++        unit: Incomplete | None = None,
++        withdist: bool = False,
++        withcoord: bool = False,
++        withhash: bool = False,
++        count: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        store: Incomplete | None = None,
++        store_dist: Incomplete | None = None,
++        any: bool = False,
++    ): ...
++    async def georadiusbymember(
++        self,
++        name,
++        member,
++        radius,
++        unit: Incomplete | None = None,
++        withdist: bool = False,
++        withcoord: bool = False,
++        withhash: bool = False,
++        count: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        store: Incomplete | None = None,
++        store_dist: Incomplete | None = None,
++        any: bool = False,
++    ): ...
++    async def geosearch(
++        self,
++        name,
++        member: Incomplete | None = None,
++        longitude: Incomplete | None = None,
++        latitude: Incomplete | None = None,
++        unit: str = "m",
++        radius: Incomplete | None = None,
++        width: Incomplete | None = None,
++        height: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        count: Incomplete | None = None,
++        any: bool = False,
++        withcoord: bool = False,
++        withdist: bool = False,
++        withhash: bool = False,
++    ): ...
++    async def geosearchstore(
++        self,
++        dest,
++        name,
++        member: Incomplete | None = None,
++        longitude: Incomplete | None = None,
++        latitude: Incomplete | None = None,
++        unit: str = "m",
++        radius: Incomplete | None = None,
++        width: Incomplete | None = None,
++        height: Incomplete | None = None,
++        sort: Incomplete | None = None,
++        count: Incomplete | None = None,
++        any: bool = False,
++        storedist: bool = False,
++    ): ...
++
++class ModuleCommands:
++    def module_load(self, path, *args): ...
++    def module_loadex(self, path: str, options: list[str] | None = None, args: list[str] | None = None): ...
++    def module_unload(self, name): ...
++    def module_list(self): ...
++    def command_info(self): ...
++    def command_count(self): ...
++    def command_list(self, module: str | None = None, category: str | None = None, pattern: str | None = None): ...
++    def command_getkeysandflags(self, *args: str): ...
++    def command_getkeys(self, *args): ...
++    def command(self): ...
++
++class Script:
++    def __init__(self, registered_client, script) -> None: ...
++    def __call__(self, keys=[], args=[], client: Incomplete | None = None): ...
++
++class BitFieldOperation:
++    def __init__(self, client, key, default_overflow: Incomplete | None = None): ...
++    def reset(self) -> None: ...
++    def overflow(self, overflow): ...
++    def incrby(self, fmt, offset, increment, overflow: Incomplete | None = None): ...
++    def get(self, fmt, offset): ...
++    def set(self, fmt, offset, value): ...
++    @property
++    def command(self): ...
++    def execute(self): ...
++
++class AsyncModuleCommands(ModuleCommands):
++    async def module_loadex(self, path: str, options: list[str] | None = None, args: list[str] | None = None): ...
++    async def command_info(self) -> None: ...
++    async def command_list(self, module: str | None = None, category: str | None = None, pattern: str | None = None): ...
++    async def command_getkeysandflags(self, *args: str): ...
++
++class ClusterCommands:
++    def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ...
++    def readwrite(self, **kwargs: _CommandOptions) -> bool: ...
++    def readonly(self, **kwargs: _CommandOptions) -> bool: ...
++
++class AsyncClusterCommands:
++    async def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ...
++    async def readwrite(self, **kwargs: _CommandOptions) -> bool: ...
++    async def readonly(self, **kwargs: _CommandOptions) -> bool: ...
++
++class FunctionCommands:
++    def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ...
++    def function_delete(self, library: str) -> Awaitable[str] | str: ...
++    def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ...
++    def function_list(self, library: str | None = "*", withcode: bool | None = False) -> Awaitable[list[Any]] | list[Any]: ...
++    def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
++    def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
++    def function_dump(self) -> Awaitable[str] | str: ...
++    def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ...
++    def function_kill(self) -> Awaitable[str] | str: ...
++    def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ...
++
++class AsyncFunctionCommands:
++    async def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ...
++    async def function_delete(self, library: str) -> Awaitable[str] | str: ...
++    async def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ...
++    async def function_list(
++        self, library: str | None = "*", withcode: bool | None = False
++    ) -> Awaitable[list[Any]] | list[Any]: ...
++    async def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
++    async def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
++    async def function_dump(self) -> Awaitable[str] | str: ...
++    async def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ...
++    async def function_kill(self) -> Awaitable[str] | str: ...
++    async def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ...
++
++class DataAccessCommands(
++    BasicKeyCommands[_StrType],
++    HyperlogCommands,
++    HashCommands[_StrType],
++    GeoCommands,
++    ListCommands[_StrType],
++    ScanCommands[_StrType],
++    SetCommands[_StrType],
++    StreamCommands,
++    SortedSetCommands[_StrType],
++): ...
++class AsyncDataAccessCommands(
++    AsyncBasicKeyCommands[_StrType],
++    AsyncHyperlogCommands,
++    AsyncHashCommands[_StrType],
++    AsyncGeoCommands,
++    AsyncListCommands[_StrType],
++    AsyncScanCommands[_StrType],
++    AsyncSetCommands[_StrType],
++    AsyncStreamCommands,
++    AsyncSortedSetCommands[_StrType],
++): ...
++class CoreCommands(
++    ACLCommands[_StrType],
++    ClusterCommands,
++    DataAccessCommands[_StrType],
++    ManagementCommands,
++    ModuleCommands,
++    PubSubCommands,
++    ScriptCommands[_StrType],
++): ...
++class AsyncCoreCommands(
++    AsyncACLCommands[_StrType],
++    AsyncClusterCommands,
++    AsyncDataAccessCommands[_StrType],
++    AsyncManagementCommands,
++    AsyncModuleCommands,
++    AsyncPubSubCommands,
++    AsyncScriptCommands[_StrType],
++    AsyncFunctionCommands,
++): ...
+diff --git a/valkey/commands/graph/__init__.pyi b/valkey/commands/graph/__init__.pyi
+new file mode 100644
+index 0000000..222db4e
+--- /dev/null
++++ b/valkey/commands/graph/__init__.pyi
+@@ -0,0 +1,45 @@
++from typing import Any
++
++from .commands import GraphCommands as GraphCommands
++from .edge import Edge as Edge
++from .node import Node as Node
++from .path import Path as Path
++
++class Graph(GraphCommands):
++    NAME: Any
++    client: Any
++    execute_command: Any
++    nodes: Any
++    edges: Any
++    version: int
++    def __init__(self, client, name=...) -> None: ...
++    @property
++    def name(self): ...
++    def get_label(self, idx): ...
++    def get_relation(self, idx): ...
++    def get_property(self, idx): ...
++    def add_node(self, node) -> None: ...
++    def add_edge(self, edge) -> None: ...
++    def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ...
++    def labels(self): ...
++    def relationship_types(self): ...
++    def property_keys(self): ...
++
++
++class AsyncGraph(GraphCommands):
++    NAME: Any
++    client: Any
++    execute_command: Any
++    nodes: Any
++    edges: Any
++    version: int
++    def __init__(self, client, name=...) -> None: ...
++    async def get_label(self, idx): ...
++    async def get_relation(self, idx): ...
++    async def get_property(self, idx): ...
++    async def add_node(self, node) -> None: ...
++    async def add_edge(self, edge) -> None: ...
++    async def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ...
++    async def labels(self): ...
++    async def relationship_types(self): ...
++    async def property_keys(self): ...
+diff --git a/valkey/commands/graph/commands.pyi b/valkey/commands/graph/commands.pyi
+new file mode 100644
+index 0000000..b57418d
+--- /dev/null
++++ b/valkey/commands/graph/commands.pyi
+@@ -0,0 +1,25 @@
++from _typeshed import Incomplete
++from typing import Any
++
++class GraphCommands:
++    def commit(self): ...
++    version: Any
++    def query(
++        self,
++        q,
++        params: Incomplete | None = None,
++        timeout: Incomplete | None = None,
++        read_only: bool = False,
++        profile: bool = False,
++    ): ...
++    def merge(self, pattern): ...
++    def delete(self): ...
++    nodes: Any
++    edges: Any
++    def flush(self) -> None: ...
++    def explain(self, query, params: Incomplete | None = None): ...
++    def bulk(self, **kwargs) -> None: ...
++    def profile(self, query): ...
++    def slowlog(self): ...
++    def config(self, name, value: Incomplete | None = None, set: bool = False): ...
++    def list_keys(self): ...
+diff --git a/valkey/commands/graph/edge.pyi b/valkey/commands/graph/edge.pyi
+new file mode 100644
+index 0000000..3bd36b6
+--- /dev/null
++++ b/valkey/commands/graph/edge.pyi
+@@ -0,0 +1,14 @@
++from _typeshed import Incomplete
++from typing import Any
++
++class Edge:
++    id: Any
++    relation: Any
++    properties: Any
++    src_node: Any
++    dest_node: Any
++    def __init__(
++        self, src_node, relation, dest_node, edge_id: Incomplete | None = None, properties: Incomplete | None = None
++    ) -> None: ...
++    def to_string(self): ...
++    def __eq__(self, rhs): ...
+diff --git a/valkey/commands/graph/exceptions.pyi b/valkey/commands/graph/exceptions.pyi
+new file mode 100644
+index 0000000..6069e05
+--- /dev/null
++++ b/valkey/commands/graph/exceptions.pyi
+@@ -0,0 +1,5 @@
++from typing import Any
++
++class VersionMismatchException(Exception):
++    version: Any
++    def __init__(self, version) -> None: ...
+diff --git a/valkey/commands/graph/execution_plan.py b/valkey/commands/graph/execution_plan.py
+index 179a80c..0f07427 100644
+--- a/valkey/commands/graph/execution_plan.py
++++ b/valkey/commands/graph/execution_plan.py
+@@ -166,10 +166,10 @@ class ExecutionPlan:
+             args.pop(0)
+             if len(args) > 0 and "Records produced" in args[-1]:
+                 records_produced = int(
+-                    re.search("Records produced: (\\d+)", args[-1]).group(1)
++                    re.search("Records produced: (\\d+)", args[-1]).group(1)  # type: ignore[union-attr]
+                 )
+                 execution_time = float(
+-                    re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1)
++                    re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1)  # type: ignore[union-attr]
+                 )
+                 profile_stats = ProfileStats(records_produced, execution_time)
+                 args.pop(-1)
+@@ -194,7 +194,7 @@ class ExecutionPlan:
+                 # if the operation is child of the current operation
+                 # add it as child and set as current operation
+                 child = _create_operation(current_op.split("|"))
+-                current.append_child(child)
++                current.append_child(child)  # type: ignore[union-attr]
+                 stack.append(current)
+                 current = child
+                 level += 1
+diff --git a/valkey/commands/graph/node.pyi b/valkey/commands/graph/node.pyi
+new file mode 100644
+index 0000000..e7a6553
+--- /dev/null
++++ b/valkey/commands/graph/node.pyi
+@@ -0,0 +1,18 @@
++from _typeshed import Incomplete
++from typing import Any
++
++class Node:
++    id: Any
++    alias: Any
++    label: Any
++    labels: Any
++    properties: Any
++    def __init__(
++        self,
++        node_id: Incomplete | None = None,
++        alias: Incomplete | None = None,
++        label: str | list[str] | None = None,
++        properties: Incomplete | None = None,
++    ) -> None: ...
++    def to_string(self): ...
++    def __eq__(self, rhs): ...
+diff --git a/valkey/commands/graph/path.pyi b/valkey/commands/graph/path.pyi
+new file mode 100644
+index 0000000..69106f8
+--- /dev/null
++++ b/valkey/commands/graph/path.pyi
+@@ -0,0 +1,18 @@
++from typing import Any
++
++class Path:
++    append_type: Any
++    def __init__(self, nodes, edges) -> None: ...
++    @classmethod
++    def new_empty_path(cls): ...
++    def nodes(self): ...
++    def edges(self): ...
++    def get_node(self, index): ...
++    def get_relationship(self, index): ...
++    def first_node(self): ...
++    def last_node(self): ...
++    def edge_count(self): ...
++    def nodes_count(self): ...
++    def add_node(self, node): ...
++    def add_edge(self, edge): ...
++    def __eq__(self, other): ...
+diff --git a/valkey/commands/graph/query_result.pyi b/valkey/commands/graph/query_result.pyi
+new file mode 100644
+index 0000000..d9f8b51
+--- /dev/null
++++ b/valkey/commands/graph/query_result.pyi
+@@ -0,0 +1,74 @@
++from typing import Any, ClassVar, Literal
++
++LABELS_ADDED: str
++NODES_CREATED: str
++NODES_DELETED: str
++RELATIONSHIPS_DELETED: str
++PROPERTIES_SET: str
++RELATIONSHIPS_CREATED: str
++INDICES_CREATED: str
++INDICES_DELETED: str
++CACHED_EXECUTION: str
++INTERNAL_EXECUTION_TIME: str
++STATS: Any
++
++class ResultSetColumnTypes:
++    COLUMN_UNKNOWN: ClassVar[Literal[0]]
++    COLUMN_SCALAR: ClassVar[Literal[1]]
++    COLUMN_NODE: ClassVar[Literal[2]]
++    COLUMN_RELATION: ClassVar[Literal[3]]
++
++class ResultSetScalarTypes:
++    VALUE_UNKNOWN: ClassVar[Literal[0]]
++    VALUE_NULL: ClassVar[Literal[1]]
++    VALUE_STRING: ClassVar[Literal[2]]
++    VALUE_INTEGER: ClassVar[Literal[3]]
++    VALUE_BOOLEAN: ClassVar[Literal[4]]
++    VALUE_DOUBLE: ClassVar[Literal[5]]
++    VALUE_ARRAY: ClassVar[Literal[6]]
++    VALUE_EDGE: ClassVar[Literal[7]]
++    VALUE_NODE: ClassVar[Literal[8]]
++    VALUE_PATH: ClassVar[Literal[9]]
++    VALUE_MAP: ClassVar[Literal[10]]
++    VALUE_POINT: ClassVar[Literal[11]]
++
++class QueryResult:
++    graph: Any
++    header: Any
++    result_set: Any
++    def __init__(self, graph, response, profile: bool = False) -> None: ...
++    def parse_results(self, raw_result_set) -> None: ...
++    statistics: Any
++    def parse_statistics(self, raw_statistics) -> None: ...
++    def parse_header(self, raw_result_set): ...
++    def parse_records(self, raw_result_set): ...
++    def parse_entity_properties(self, props): ...
++    def parse_string(self, cell): ...
++    def parse_node(self, cell): ...
++    def parse_edge(self, cell): ...
++    def parse_path(self, cell): ...
++    def parse_map(self, cell): ...
++    def parse_point(self, cell): ...
++    def parse_scalar(self, cell): ...
++    def parse_profile(self, response) -> None: ...
++    def is_empty(self): ...
++    @property
++    def labels_added(self): ...
++    @property
++    def nodes_created(self): ...
++    @property
++    def nodes_deleted(self): ...
++    @property
++    def properties_set(self): ...
++    @property
++    def relationships_created(self): ...
++    @property
++    def relationships_deleted(self): ...
++    @property
++    def indices_created(self): ...
++    @property
++    def indices_deleted(self): ...
++    @property
++    def cached_execution(self): ...
++    @property
++    def run_time_ms(self): ...
+diff --git a/valkey/commands/helpers.pyi b/valkey/commands/helpers.pyi
+new file mode 100644
+index 0000000..b4e5ac7
+--- /dev/null
++++ b/valkey/commands/helpers.pyi
+@@ -0,0 +1,10 @@
++def list_or_args(keys, args): ...
++def nativestr(x): ...
++def delist(x): ...
++def parse_to_list(response): ...
++def parse_list_to_dict(response): ...
++def parse_to_dict(response): ...
++def random_string(length: int = 10) -> str: ...
++def quote_string(v): ...
++def decode_dict_keys(obj): ...
++def stringify_param_value(value): ...
+diff --git a/valkey/commands/json/__init__.pyi b/valkey/commands/json/__init__.pyi
+new file mode 100644
+index 0000000..f9e8825
+--- /dev/null
++++ b/valkey/commands/json/__init__.pyi
+@@ -0,0 +1,15 @@
++from _typeshed import Incomplete
++from typing import Any
++
++from ...client import Pipeline as ClientPipeline
++from .commands import JSONCommands
++
++class JSON(JSONCommands):
++    MODULE_CALLBACKS: dict[str, Any]
++    client: Any
++    execute_command: Any
++    MODULE_VERSION: Incomplete | None
++    def __init__(self, client, version: Incomplete | None = None, decoder=..., encoder=...) -> None: ...
++    def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ...
++
++class Pipeline(JSONCommands, ClientPipeline[Incomplete]): ...  # type: ignore[misc]
+diff --git a/valkey/commands/json/commands.pyi b/valkey/commands/json/commands.pyi
+new file mode 100644
+index 0000000..38d4d4c
+--- /dev/null
++++ b/valkey/commands/json/commands.pyi
+@@ -0,0 +1,32 @@
++from _typeshed import Incomplete
++
++class JSONCommands:
++    def arrappend(self, name: str, path: str | None = ".", *args) -> list[int | None]: ...
++    def arrindex(
++        self, name: str, path: str, scalar: int, start: int | None = None, stop: int | None = None
++    ) -> list[int | None]: ...
++    def arrinsert(self, name: str, path: str, index: int, *args) -> list[int | None]: ...
++    def arrlen(self, name: str, path: str | None = ".") -> list[int | None]: ...
++    def arrpop(self, name: str, path: str | None = ".", index: int | None = -1) -> list[str | None]: ...
++    def arrtrim(self, name: str, path: str, start: int, stop: int) -> list[int | None]: ...
++    def type(self, name: str, path: str | None = ".") -> list[str]: ...
++    def resp(self, name: str, path: str | None = ".") -> list[Incomplete]: ...
++    def objkeys(self, name, path="."): ...
++    def objlen(self, name, path="."): ...
++    def numincrby(self, name, path, number): ...
++    def nummultby(self, name, path, number): ...
++    def clear(self, name, path="."): ...
++    def delete(self, key, path="."): ...
++    forget = delete
++    def get(self, name, *args, no_escape: bool = False): ...
++    def mget(self, keys, path): ...
++    def set(self, name, path, obj, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
++    def set_file(self, name, path, file_name, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
++    def set_path(self, json_path, root_folder, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
++    def strlen(self, name, path: Incomplete | None = None): ...
++    def toggle(self, name, path="."): ...
++    def strappend(self, name, value, path="."): ...
++    def debug(self, subcommand, key: Incomplete | None = None, path="."): ...
++    def jsonget(self, *args, **kwargs): ...
++    def jsonmget(self, *args, **kwargs): ...
++    def jsonset(self, *args, **kwargs): ...
+diff --git a/valkey/commands/json/decoders.pyi b/valkey/commands/json/decoders.pyi
+new file mode 100644
+index 0000000..ccea243
+--- /dev/null
++++ b/valkey/commands/json/decoders.pyi
+@@ -0,0 +1,4 @@
++def bulk_of_jsons(d): ...
++def decode_dict_keys(obj): ...
++def unstring(obj): ...
++def decode_list(b): ...
+diff --git a/valkey/commands/json/path.pyi b/valkey/commands/json/path.pyi
+new file mode 100644
+index 0000000..bbc35c4
+--- /dev/null
++++ b/valkey/commands/json/path.pyi
+@@ -0,0 +1,5 @@
++class Path:
++    strPath: str
++    @staticmethod
++    def root_path() -> str: ...
++    def __init__(self, path: str) -> None: ...
+diff --git a/valkey/commands/parser.pyi b/valkey/commands/parser.pyi
+new file mode 100644
+index 0000000..f17afa2
+--- /dev/null
++++ b/valkey/commands/parser.pyi
+@@ -0,0 +1,8 @@
++from valkey.client import AbstractValkey
++from valkey.typing import EncodableT
++
++class CommandsParser:
++    commands: dict[str, str]
++    def __init__(self, valkey_connection: AbstractValkey) -> None: ...
++    def initialize(self, r: AbstractValkey) -> None: ...
++    def get_keys(self, valkey_conn: AbstractValkey, *args: EncodableT) -> list[EncodableT] | None: ...
+diff --git a/valkey/commands/redismodules.pyi b/valkey/commands/redismodules.pyi
+new file mode 100644
+index 0000000..129b2a1
+--- /dev/null
++++ b/valkey/commands/redismodules.pyi
+@@ -0,0 +1,14 @@
++from .json import JSON
++from .search import Search
++from .timeseries import TimeSeries
++
++class ValkeyModuleCommands:
++    def json(self, encoder=..., decoder=...) -> JSON: ...
++    def ft(self, index_name: str = "idx") -> Search: ...
++    def ts(self) -> TimeSeries: ...
++    def bf(self): ...
++    def cf(self): ...
++    def cms(self): ...
++    def topk(self): ...
++    def tdigest(self): ...
++    def graph(self, index_name: str = "idx"): ...
+diff --git a/valkey/commands/search/__init__.pyi b/valkey/commands/search/__init__.pyi
+new file mode 100644
+index 0000000..4776dfc
+--- /dev/null
++++ b/valkey/commands/search/__init__.pyi
+@@ -0,0 +1,40 @@
++from _typeshed import Incomplete
++
++from .commands import SearchCommands
++
++class Search(SearchCommands):
++    class BatchIndexer:
++        def __init__(self, client, chunk_size: int = 1000) -> None: ...
++        def add_document(
++            self,
++            doc_id,
++            nosave: bool = False,
++            score: float = 1.0,
++            payload: Incomplete | None = None,
++            replace: bool = False,
++            partial: bool = False,
++            no_create: bool = False,
++            **fields,
++        ): ...
++        def add_document_hash(self, doc_id, score: float = 1.0, replace: bool = False): ...
++        def commit(self): ...
++
++    def __init__(self, client, index_name: str = "idx") -> None: ...
++
++class AsyncSearch(SearchCommands):
++    class BatchIndexer:
++        def __init__(self, client, chunk_size: int = 1000) -> None: ...
++        async def add_document(
++            self,
++            doc_id,
++            nosave: bool = False,
++            score: float = 1.0,
++            payload: Incomplete | None = None,
++            replace: bool = False,
++            partial: bool = False,
++            no_create: bool = False,
++            **fields,
++        ): ...
++        async def commit(self): ...
++
++    def __init__(self, client, index_name: str = "idx") -> None: ...
+diff --git a/valkey/commands/search/aggregation.py b/valkey/commands/search/aggregation.py
+index 4517238..6c52321 100644
+--- a/valkey/commands/search/aggregation.py
++++ b/valkey/commands/search/aggregation.py
+@@ -22,7 +22,7 @@ class Reducer:
+     See the `valkeyearch.reducers` module for the actual reducers.
+     """
+ 
+-    NAME = None
++    NAME: Union[str, None] = None
+ 
+     def __init__(self, *args: List[str]) -> None:
+         self._args = args
+diff --git a/valkey/commands/search/aggregation.pyi b/valkey/commands/search/aggregation.pyi
+new file mode 100644
+index 0000000..f520033
+--- /dev/null
++++ b/valkey/commands/search/aggregation.pyi
+@@ -0,0 +1,53 @@
++from typing import Any, ClassVar, Literal, Union
++
++FIELDNAME: Any
++
++class Limit:
++    offset: Any
++    count: Any
++    def __init__(self, offset: int = 0, count: int = 0) -> None: ...
++    def build_args(self): ...
++
++class Reducer:
++    NAME: ClassVar[Union[str, None]]
++    def __init__(self, *args) -> None: ...
++    def alias(self, alias): ...
++    @property
++    def args(self): ...
++
++class SortDirection:
++    DIRSTRING: ClassVar[str | None]
++    field: Any
++    def __init__(self, field) -> None: ...
++
++class Asc(SortDirection):
++    DIRSTRING: ClassVar[Literal["ASC"]]
++
++class Desc(SortDirection):
++    DIRSTRING: ClassVar[Literal["DESC"]]
++
++class AggregateRequest:
++    def __init__(self, query: str = "*") -> None: ...
++    def load(self, *fields): ...
++    def group_by(self, fields, *reducers): ...
++    def apply(self, **kwexpr): ...
++    def limit(self, offset, num): ...
++    def sort_by(self, *fields, **kwargs): ...
++    def filter(self, expressions): ...
++    def with_schema(self): ...
++    def verbatim(self): ...
++    def cursor(self, count: int = 0, max_idle: float = 0.0): ...
++    def build_args(self): ...
++
++class Cursor:
++    cid: Any
++    max_idle: int
++    count: int
++    def __init__(self, cid) -> None: ...
++    def build_args(self): ...
++
++class AggregateResult:
++    rows: Any
++    cursor: Any
++    schema: Any
++    def __init__(self, rows, cursor, schema) -> None: ...
+diff --git a/valkey/commands/search/commands.pyi b/valkey/commands/search/commands.pyi
+new file mode 100644
+index 0000000..f8a2baf
+--- /dev/null
++++ b/valkey/commands/search/commands.pyi
+@@ -0,0 +1,111 @@
++from _typeshed import Incomplete
++from collections.abc import Mapping
++from typing import Any, Literal
++from typing_extensions import TypeAlias
++
++from .aggregation import AggregateRequest, AggregateResult, Cursor
++from .query import Query
++from .result import Result
++
++_QueryParams: TypeAlias = Mapping[str, str | float]
++
++NUMERIC: Literal["NUMERIC"]
++
++CREATE_CMD: Literal["FT.CREATE"]
++ALTER_CMD: Literal["FT.ALTER"]
++SEARCH_CMD: Literal["FT.SEARCH"]
++ADD_CMD: Literal["FT.ADD"]
++ADDHASH_CMD: Literal["FT.ADDHASH"]
++DROP_CMD: Literal["FT.DROP"]
++EXPLAIN_CMD: Literal["FT.EXPLAIN"]
++EXPLAINCLI_CMD: Literal["FT.EXPLAINCLI"]
++DEL_CMD: Literal["FT.DEL"]
++AGGREGATE_CMD: Literal["FT.AGGREGATE"]
++PROFILE_CMD: Literal["FT.PROFILE"]
++CURSOR_CMD: Literal["FT.CURSOR"]
++SPELLCHECK_CMD: Literal["FT.SPELLCHECK"]
++DICT_ADD_CMD: Literal["FT.DICTADD"]
++DICT_DEL_CMD: Literal["FT.DICTDEL"]
++DICT_DUMP_CMD: Literal["FT.DICTDUMP"]
++GET_CMD: Literal["FT.GET"]
++MGET_CMD: Literal["FT.MGET"]
++CONFIG_CMD: Literal["FT.CONFIG"]
++TAGVALS_CMD: Literal["FT.TAGVALS"]
++ALIAS_ADD_CMD: Literal["FT.ALIASADD"]
++ALIAS_UPDATE_CMD: Literal["FT.ALIASUPDATE"]
++ALIAS_DEL_CMD: Literal["FT.ALIASDEL"]
++INFO_CMD: Literal["FT.INFO"]
++SUGADD_COMMAND: Literal["FT.SUGADD"]
++SUGDEL_COMMAND: Literal["FT.SUGDEL"]
++SUGLEN_COMMAND: Literal["FT.SUGLEN"]
++SUGGET_COMMAND: Literal["FT.SUGGET"]
++SYNUPDATE_CMD: Literal["FT.SYNUPDATE"]
++SYNDUMP_CMD: Literal["FT.SYNDUMP"]
++
++NOOFFSETS: Literal["NOOFFSETS"]
++NOFIELDS: Literal["NOFIELDS"]
++STOPWORDS: Literal["STOPWORDS"]
++WITHSCORES: Literal["WITHSCORES"]
++FUZZY: Literal["FUZZY"]
++WITHPAYLOADS: Literal["WITHPAYLOADS"]
++
++class SearchCommands:
++    def batch_indexer(self, chunk_size: int = 100): ...
++    def create_index(
++        self,
++        fields,
++        no_term_offsets: bool = False,
++        no_field_flags: bool = False,
++        stopwords: Incomplete | None = None,
++        definition: Incomplete | None = None,
++        max_text_fields: bool = False,  # added in 4.1.1
++        temporary: Incomplete | None = None,  # added in 4.1.1
++        no_highlight: bool = False,  # added in 4.1.1
++        no_term_frequencies: bool = False,  # added in 4.1.1
++        skip_initial_scan: bool = False,  # added in 4.1.1
++    ): ...
++    def alter_schema_add(self, fields): ...
++    def dropindex(self, delete_documents: bool = False): ...
++    def add_document(
++        self,
++        doc_id,
++        nosave: bool = False,
++        score: float = 1.0,
++        payload: Incomplete | None = None,
++        replace: bool = False,
++        partial: bool = False,
++        language: Incomplete | None = None,
++        no_create: bool = False,
++        **fields,
++    ): ...
++    def add_document_hash(self, doc_id, score: float = 1.0, language: Incomplete | None = None, replace: bool = False): ...
++    def delete_document(self, doc_id, conn: Incomplete | None = None, delete_actual_document: bool = False): ...
++    def load_document(self, id): ...
++    def get(self, *ids): ...
++    def info(self): ...
++    def get_params_args(self, query_params: _QueryParams) -> list[Any]: ...
++    def search(self, query: str | Query, query_params: _QueryParams | None = None) -> Result: ...
++    def explain(self, query: str | Query, query_params: _QueryParams | None = None): ...
++    def explain_cli(self, query): ...
++    def aggregate(self, query: AggregateRequest | Cursor, query_params: _QueryParams | None = None) -> AggregateResult: ...
++    def profile(
++        self, query: str | Query | AggregateRequest, limited: bool = False, query_params: Mapping[str, str | float] | None = None
++    ) -> tuple[Incomplete, Incomplete]: ...
++    def spellcheck(
++        self, query, distance: Incomplete | None = None, include: Incomplete | None = None, exclude: Incomplete | None = None
++    ): ...
++    def dict_add(self, name, *terms): ...
++    def dict_del(self, name, *terms): ...
++    def dict_dump(self, name): ...
++    def config_set(self, option: str, value: str) -> bool: ...
++    def config_get(self, option: str) -> dict[str, str]: ...
++    def tagvals(self, tagfield): ...
++    def aliasadd(self, alias): ...
++    def aliasupdate(self, alias): ...
++    def aliasdel(self, alias): ...
++    def sugadd(self, key, *suggestions, **kwargs): ...
++    def suglen(self, key): ...
++    def sugdel(self, key, string): ...
++    def sugget(self, key, prefix, fuzzy: bool = False, num: int = 10, with_scores: bool = False, with_payloads: bool = False): ...
++    def synupdate(self, groupid, skipinitial: bool = False, *terms): ...
++    def syndump(self): ...
+diff --git a/valkey/commands/search/field.py b/valkey/commands/search/field.py
+index 72907ae..f9b25b6 100644
+--- a/valkey/commands/search/field.py
++++ b/valkey/commands/search/field.py
+@@ -1,4 +1,4 @@
+-from typing import List
++from typing import List, Union
+ 
+ from valkey import DataError
+ 
+@@ -18,10 +18,10 @@ class Field:
+     def __init__(
+         self,
+         name: str,
+-        args: List[str] = None,
++        args: Union[List[str], None] = None,
+         sortable: bool = False,
+         no_index: bool = False,
+-        as_name: str = None,
++        as_name: Union[str, None] = None,
+     ):
+         if args is None:
+             args = []
+@@ -63,11 +63,11 @@ class TextField(Field):
+         name: str,
+         weight: float = 1.0,
+         no_stem: bool = False,
+-        phonetic_matcher: str = None,
++        phonetic_matcher: Union[str, None] = None,
+         withsuffixtrie: bool = False,
+         **kwargs,
+     ):
+-        Field.__init__(self, name, args=[Field.TEXT, Field.WEIGHT, weight], **kwargs)
++        Field.__init__(self, name, args=[Field.TEXT, Field.WEIGHT, weight], **kwargs)  # type: ignore[list-item]
+ 
+         if no_stem:
+             Field.append_arg(self, self.NOSTEM)
+@@ -148,7 +148,7 @@ class VectorField(Field):
+     See https://oss.valkey.com/valkeyearch/Vectors/#vector_fields.
+     """
+ 
+-    def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs):
++    def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs):  # type: ignore[type-arg]
+         """
+         Create Vector Field. Notice that Vector cannot have sortable or no_index tag,
+         although it's also a Field.
+@@ -180,5 +180,5 @@ class VectorField(Field):
+             attr_li.extend([key, value])
+ 
+         Field.__init__(
+-            self, name, args=[Field.VECTOR, algorithm, len(attr_li), *attr_li], **kwargs
++            self, name, args=[Field.VECTOR, algorithm, len(attr_li), *attr_li], **kwargs  # type: ignore[list-item]
+         )
+diff --git a/valkey/commands/search/query.pyi b/valkey/commands/search/query.pyi
+new file mode 100644
+index 0000000..eb1846b
+--- /dev/null
++++ b/valkey/commands/search/query.pyi
+@@ -0,0 +1,52 @@
++from _typeshed import Incomplete
++from typing import Any
++
++class Query:
++    def __init__(self, query_string) -> None: ...
++    def query_string(self): ...
++    def limit_ids(self, *ids): ...
++    def return_fields(self, *fields): ...
++    def return_field(self, field, as_field: Incomplete | None = None): ...
++    def summarize(
++        self,
++        fields: Incomplete | None = None,
++        context_len: Incomplete | None = None,
++        num_frags: Incomplete | None = None,
++        sep: Incomplete | None = None,
++    ): ...
++    def highlight(self, fields: Incomplete | None = None, tags: Incomplete | None = None): ...
++    def language(self, language): ...
++    def slop(self, slop): ...
++    def in_order(self): ...
++    def scorer(self, scorer): ...
++    def get_args(self): ...
++    def paging(self, offset, num): ...
++    def verbatim(self): ...
++    def no_content(self): ...
++    def no_stopwords(self): ...
++    def with_payloads(self): ...
++    def with_scores(self): ...
++    def limit_fields(self, *fields): ...
++    def add_filter(self, flt): ...
++    def sort_by(self, field, asc: bool = True): ...
++    def expander(self, expander): ...
++
++class Filter:
++    args: Any
++    def __init__(self, keyword, field, *args) -> None: ...
++
++class NumericFilter(Filter):
++    INF: str
++    NEG_INF: str
++    def __init__(self, field, minval, maxval, minExclusive: bool = False, maxExclusive: bool = False) -> None: ...
++
++class GeoFilter(Filter):
++    METERS: str
++    KILOMETERS: str
++    FEET: str
++    MILES: str
++    def __init__(self, field, lon, lat, radius, unit="km") -> None: ...
++
++class SortbyField:
++    args: Any
++    def __init__(self, field, asc: bool = True) -> None: ...
+diff --git a/valkey/commands/search/querystring.py b/valkey/commands/search/querystring.py
+index 3ff1320..bd57649 100644
+--- a/valkey/commands/search/querystring.py
++++ b/valkey/commands/search/querystring.py
+@@ -182,7 +182,7 @@ class Node:
+ 
+         self.params = []
+ 
+-        kvparams = {}
++        kvparams = {}  # type: ignore[var-annotated]
+         for k, v in kwparams.items():
+             curvals = kvparams.setdefault(k, [])
+             if isinstance(v, (str, int, float)):
+diff --git a/valkey/commands/search/reducers.py b/valkey/commands/search/reducers.py
+index 694558d..00f6507 100644
+--- a/valkey/commands/search/reducers.py
++++ b/valkey/commands/search/reducers.py
+@@ -151,7 +151,7 @@ class first_value(Reducer):
+             and isinstance(byfields[0], type)
+             and issubclass(byfields[0], SortDirection)
+         ):
+-            byfields = [byfields[0](field)]
++            byfields = [byfields[0](field)]  # type: ignore[assignment]
+ 
+         for f in byfields:
+             fieldstrs += [f.field, f.DIRSTRING]
+diff --git a/valkey/commands/search/result.pyi b/valkey/commands/search/result.pyi
+new file mode 100644
+index 0000000..046c317
+--- /dev/null
++++ b/valkey/commands/search/result.pyi
+@@ -0,0 +1,7 @@
++from typing import Any
++
++class Result:
++    total: Any
++    duration: Any
++    docs: Any
++    def __init__(self, res, hascontent, duration: int = 0, has_payload: bool = False, with_scores: bool = False) -> None: ...
+diff --git a/valkey/commands/sentinel.pyi b/valkey/commands/sentinel.pyi
+new file mode 100644
+index 0000000..b526a45
+--- /dev/null
++++ b/valkey/commands/sentinel.pyi
+@@ -0,0 +1,17 @@
++class SentinelCommands:
++    def sentinel(self, *args): ...
++    def sentinel_get_master_addr_by_name(self, service_name): ...
++    def sentinel_master(self, service_name): ...
++    def sentinel_masters(self): ...
++    def sentinel_monitor(self, name, ip, port, quorum): ...
++    def sentinel_remove(self, name): ...
++    def sentinel_sentinels(self, service_name): ...
++    def sentinel_set(self, name, option, value): ...
++    def sentinel_slaves(self, service_name): ...
++    def sentinel_reset(self, pattern): ...
++    def sentinel_failover(self, new_master_name): ...
++    def sentinel_ckquorum(self, new_master_name): ...
++    def sentinel_flushconfig(self): ...
++
++class AsyncSentinelCommands(SentinelCommands):
++    async def sentinel(self, *args) -> None: ...
+diff --git a/valkey/commands/timeseries/__init__.pyi b/valkey/commands/timeseries/__init__.pyi
+new file mode 100644
+index 0000000..95457d6
+--- /dev/null
++++ b/valkey/commands/timeseries/__init__.pyi
+@@ -0,0 +1,14 @@
++from _typeshed import Incomplete
++from typing import Any
++
++from ...client import Pipeline as ClientPipeline
++from .commands import TimeSeriesCommands
++
++class TimeSeries(TimeSeriesCommands):
++    MODULE_CALLBACKS: dict[str, Any]
++    client: Any
++    execute_command: Any
++    def __init__(self, client: Incomplete | None = None, **kwargs) -> None: ...
++    def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ...
++
++class Pipeline(TimeSeriesCommands, ClientPipeline[Incomplete]): ...  # type: ignore[misc]
+diff --git a/valkey/commands/timeseries/commands.pyi b/valkey/commands/timeseries/commands.pyi
+new file mode 100644
+index 0000000..ed70e57
+--- /dev/null
++++ b/valkey/commands/timeseries/commands.pyi
+@@ -0,0 +1,160 @@
++from typing import Literal
++from typing_extensions import TypeAlias
++
++_Key: TypeAlias = bytes | str | memoryview
++
++ADD_CMD: Literal["TS.ADD"]
++ALTER_CMD: Literal["TS.ALTER"]
++CREATERULE_CMD: Literal["TS.CREATERULE"]
++CREATE_CMD: Literal["TS.CREATE"]
++DECRBY_CMD: Literal["TS.DECRBY"]
++DELETERULE_CMD: Literal["TS.DELETERULE"]
++DEL_CMD: Literal["TS.DEL"]
++GET_CMD: Literal["TS.GET"]
++INCRBY_CMD: Literal["TS.INCRBY"]
++INFO_CMD: Literal["TS.INFO"]
++MADD_CMD: Literal["TS.MADD"]
++MGET_CMD: Literal["TS.MGET"]
++MRANGE_CMD: Literal["TS.MRANGE"]
++MREVRANGE_CMD: Literal["TS.MREVRANGE"]
++QUERYINDEX_CMD: Literal["TS.QUERYINDEX"]
++RANGE_CMD: Literal["TS.RANGE"]
++REVRANGE_CMD: Literal["TS.REVRANGE"]
++
++class TimeSeriesCommands:
++    def create(
++        self,
++        key: _Key,
++        retention_msecs: int | None = None,
++        uncompressed: bool | None = False,
++        labels: dict[str, str] | None = None,
++        chunk_size: int | None = None,
++        duplicate_policy: str | None = None,
++    ): ...
++    def alter(
++        self,
++        key: _Key,
++        retention_msecs: int | None = None,
++        labels: dict[str, str] | None = None,
++        chunk_size: int | None = None,
++        duplicate_policy: str | None = None,
++    ): ...
++    def add(
++        self,
++        key: _Key,
++        timestamp: int | str,
++        value: float,
++        retention_msecs: int | None = None,
++        uncompressed: bool | None = False,
++        labels: dict[str, str] | None = None,
++        chunk_size: int | None = None,
++        duplicate_policy: str | None = None,
++    ): ...
++    def madd(self, ktv_tuples): ...
++    def incrby(
++        self,
++        key: _Key,
++        value: float,
++        timestamp: int | str | None = None,
++        retention_msecs: int | None = None,
++        uncompressed: bool | None = False,
++        labels: dict[str, str] | None = None,
++        chunk_size: int | None = None,
++    ): ...
++    def decrby(
++        self,
++        key: _Key,
++        value: float,
++        timestamp: int | str | None = None,
++        retention_msecs: int | None = None,
++        uncompressed: bool | None = False,
++        labels: dict[str, str] | None = None,
++        chunk_size: int | None = None,
++    ): ...
++    def delete(self, key, from_time, to_time): ...
++    def createrule(
++        self, source_key: _Key, dest_key: _Key, aggregation_type: str, bucket_size_msec: int, align_timestamp: int | None = None
++    ): ...
++    def deleterule(self, source_key, dest_key): ...
++    def range(
++        self,
++        key: _Key,
++        from_time: int | str,
++        to_time: int | str,
++        count: int | None = None,
++        aggregation_type: str | None = None,
++        bucket_size_msec: int | None = 0,
++        filter_by_ts: list[int] | None = None,
++        filter_by_min_value: int | None = None,
++        filter_by_max_value: int | None = None,
++        align: int | str | None = None,
++        latest: bool | None = False,
++        bucket_timestamp: str | None = None,
++        empty: bool | None = False,
++    ): ...
++    def revrange(
++        self,
++        key: _Key,
++        from_time: int | str,
++        to_time: int | str,
++        count: int | None = None,
++        aggregation_type: str | None = None,
++        bucket_size_msec: int | None = 0,
++        filter_by_ts: list[int] | None = None,
++        filter_by_min_value: int | None = None,
++        filter_by_max_value: int | None = None,
++        align: int | str | None = None,
++        latest: bool | None = False,
++        bucket_timestamp: str | None = None,
++        empty: bool | None = False,
++    ): ...
++    def mrange(
++        self,
++        from_time: int | str,
++        to_time: int | str,
++        filters: list[str],
++        count: int | None = None,
++        aggregation_type: str | None = None,
++        bucket_size_msec: int | None = 0,
++        with_labels: bool | None = False,
++        filter_by_ts: list[int] | None = None,
++        filter_by_min_value: int | None = None,
++        filter_by_max_value: int | None = None,
++        groupby: str | None = None,
++        reduce: str | None = None,
++        select_labels: list[str] | None = None,
++        align: int | str | None = None,
++        latest: bool | None = False,
++        bucket_timestamp: str | None = None,
++        empty: bool | None = False,
++    ): ...
++    def mrevrange(
++        self,
++        from_time: int | str,
++        to_time: int | str,
++        filters: list[str],
++        count: int | None = None,
++        aggregation_type: str | None = None,
++        bucket_size_msec: int | None = 0,
++        with_labels: bool | None = False,
++        filter_by_ts: list[int] | None = None,
++        filter_by_min_value: int | None = None,
++        filter_by_max_value: int | None = None,
++        groupby: str | None = None,
++        reduce: str | None = None,
++        select_labels: list[str] | None = None,
++        align: int | str | None = None,
++        latest: bool | None = False,
++        bucket_timestamp: str | None = None,
++        empty: bool | None = False,
++    ): ...
++    def get(self, key: _Key, latest: bool | None = False): ...
++    def mget(
++        self,
++        filters: list[str],
++        with_labels: bool | None = False,
++        select_labels: list[str] | None = None,
++        latest: bool | None = False,
++    ): ...
++    def info(self, key): ...
++    def queryindex(self, filters): ...
+diff --git a/valkey/commands/timeseries/info.pyi b/valkey/commands/timeseries/info.pyi
+new file mode 100644
+index 0000000..8b082c7
+--- /dev/null
++++ b/valkey/commands/timeseries/info.pyi
+@@ -0,0 +1,18 @@
++from _typeshed import Incomplete
++from typing import Any
++
++class TSInfo:
++    rules: list[Any]
++    labels: list[Any]
++    sourceKey: Incomplete | None
++    chunk_count: Incomplete | None
++    memory_usage: Incomplete | None
++    total_samples: Incomplete | None
++    retention_msecs: Incomplete | None
++    last_time_stamp: Incomplete | None
++    first_time_stamp: Incomplete | None
++
++    max_samples_per_chunk: Incomplete | None
++    chunk_size: Incomplete | None
++    duplicate_policy: Incomplete | None
++    def __init__(self, args) -> None: ...
+diff --git a/valkey/commands/timeseries/utils.pyi b/valkey/commands/timeseries/utils.pyi
+new file mode 100644
+index 0000000..4a0d52c
+--- /dev/null
++++ b/valkey/commands/timeseries/utils.pyi
+@@ -0,0 +1,5 @@
++def list_to_dict(aList): ...
++def parse_range(response): ...
++def parse_m_range(response): ...
++def parse_get(response): ...
++def parse_m_get(response): ...
+diff --git a/valkey/connection.pyi b/valkey/connection.pyi
+new file mode 100644
+index 0000000..9796fd2
+--- /dev/null
++++ b/valkey/connection.pyi
+@@ -0,0 +1,289 @@
++from _typeshed import Incomplete, Unused
++from abc import abstractmethod
++from collections.abc import Callable, Iterable, Mapping
++from queue import Queue
++from socket import socket
++from typing import Any, ClassVar
++from typing_extensions import Self, TypeAlias
++
++from .credentials import CredentialProvider
++from .retry import Retry
++
++ssl_available: bool
++SYM_STAR: bytes
++SYM_DOLLAR: bytes
++SYM_CRLF: bytes
++SYM_EMPTY: bytes
++SERVER_CLOSED_CONNECTION_ERROR: str
++NONBLOCKING_EXCEPTIONS: tuple[type[Exception], ...]
++NONBLOCKING_EXCEPTION_ERROR_NUMBERS: dict[type[Exception], int]
++SENTINEL: object
++MODULE_LOAD_ERROR: str
++NO_SUCH_MODULE_ERROR: str
++MODULE_UNLOAD_NOT_POSSIBLE_ERROR: str
++MODULE_EXPORTS_DATA_TYPES_ERROR: str
++FALSE_STRINGS: tuple[str, ...]
++URL_QUERY_ARGUMENT_PARSERS: dict[str, Callable[[Any], Any]]
++
++# Options as passed to Pool.get_connection().
++_ConnectionPoolOptions: TypeAlias = Any
++_ConnectFunc: TypeAlias = Callable[[Connection], object]
++
++class BaseParser:
++    EXCEPTION_CLASSES: ClassVar[dict[str, type[Exception] | dict[str, type[Exception]]]]
++    @classmethod
++    def parse_error(cls, response: str) -> Exception: ...
++
++class SocketBuffer:
++    socket_read_size: int
++    bytes_written: int
++    bytes_read: int
++    socket_timeout: float | None
++    def __init__(self, socket: socket, socket_read_size: int, socket_timeout: float | None) -> None: ...
++    def unread_bytes(self) -> int: ...
++    def can_read(self, timeout: float | None) -> bool: ...
++    def read(self, length: int) -> bytes: ...
++    def readline(self) -> bytes: ...
++    def get_pos(self) -> int: ...
++    def rewind(self, pos: int) -> None: ...
++    def purge(self) -> None: ...
++    def close(self) -> None: ...
++
++class PythonParser(BaseParser):
++    encoding: str
++    socket_read_size: int
++    encoder: Encoder | None
++    def __init__(self, socket_read_size: int) -> None: ...
++    def __del__(self) -> None: ...
++    def on_connect(self, connection: Connection) -> None: ...
++    def on_disconnect(self) -> None: ...
++    def can_read(self, timeout: float | None) -> bool: ...
++    def read_response(self, disable_decoding: bool = False) -> Any: ...  # `str | bytes` or `list[str | bytes]`
++
++class LibvalkeyParser(BaseParser):
++    socket_read_size: int
++    def __init__(self, socket_read_size: int) -> None: ...
++    def __del__(self) -> None: ...
++    def on_connect(self, connection: Connection, **kwargs) -> None: ...
++    def on_disconnect(self) -> None: ...
++    def can_read(self, timeout: float | None) -> bool: ...
++    def read_from_socket(self, timeout: float | None = ..., raise_on_timeout: bool = True) -> bool: ...
++    def read_response(self, disable_decoding: bool = False) -> Any: ...  # `str | bytes` or `list[str | bytes]`
++
++DefaultParser: type[BaseParser]  # Libvalkey or PythonParser
++
++_Encodable: TypeAlias = str | bytes | memoryview | bool | float
++
++class Encoder:
++    encoding: str
++    encoding_errors: str
++    decode_responses: bool
++    def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ...
++    def encode(self, value: _Encodable) -> bytes: ...
++    def decode(self, value: str | bytes | memoryview, force: bool = False) -> str: ...
++
++class AbstractConnection:
++    pid: int
++    db: int
++    client_name: str | None
++    credential_provider: CredentialProvider | None
++    password: str | None
++    username: str | None
++    socket_timeout: float | None
++    socket_connect_timeout: float | None
++    retry_on_timeout: bool
++    retry_on_error: list[type[Exception]]
++    retry: Retry
++    health_check_interval: int
++    next_health_check: int
++    valkey_connect_func: _ConnectFunc | None
++    encoder: Encoder
++
++    def __init__(
++        self,
++        db: int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[Exception]] = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: _ConnectFunc | None = None,
++        credential_provider: CredentialProvider | None = None,
++        command_packer: Incomplete | None = None,
++    ) -> None: ...
++    @abstractmethod
++    def repr_pieces(self) -> list[tuple[str, Any]]: ...
++    def register_connect_callback(self, callback: _ConnectFunc) -> None: ...
++    def clear_connect_callbacks(self) -> None: ...
++    def set_parser(self, parser_class: type[BaseParser]) -> None: ...
++    def connect(self) -> None: ...
++    def on_connect(self) -> None: ...
++    def disconnect(self, *args: Unused) -> None: ...  # 'args' added in valkey 4.1.2
++    def check_health(self) -> None: ...
++    def send_packed_command(self, command: str | Iterable[str], check_health: bool = True) -> None: ...
++    def send_command(self, *args, **kwargs) -> None: ...
++    def can_read(self, timeout: float | None = 0) -> bool: ...
++    def read_response(
++        self, disable_decoding: bool = False, *, disconnect_on_error: bool = True
++    ) -> Any: ...  # `str | bytes` or `list[str | bytes]`
++    def pack_command(self, *args) -> list[bytes]: ...
++    def pack_commands(self, commands: Iterable[Iterable[Incomplete]]) -> list[bytes]: ...
++
++class Connection(AbstractConnection):
++    host: str
++    port: int
++    socket_keepalive: bool
++    socket_keepalive_options: Mapping[str, int | str]
++    socket_type: int
++    def __init__(
++        self,
++        host: str = "localhost",
++        port: int = 6379,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[str, int | str] | None = None,
++        socket_type: int = 0,
++        *,
++        db: int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[Exception]] = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: _ConnectFunc | None = None,
++        credential_provider: CredentialProvider | None = None,
++        command_packer: Incomplete | None = None,
++    ) -> None: ...
++    def repr_pieces(self) -> list[tuple[str, Any]]: ...
++
++class SSLConnection(Connection):
++    keyfile: Any
++    certfile: Any
++    cert_reqs: Any
++    ca_certs: Any
++    ca_path: Incomplete | None
++    check_hostname: bool
++    certificate_password: Incomplete | None
++    ssl_validate_ocsp: bool
++    ssl_validate_ocsp_stapled: bool  # added in 4.1.1
++    ssl_ocsp_context: Incomplete | None  # added in 4.1.1
++    ssl_ocsp_expected_cert: Incomplete | None  # added in 4.1.1
++    def __init__(
++        self,
++        ssl_keyfile=None,
++        ssl_certfile=None,
++        ssl_cert_reqs="required",
++        ssl_ca_certs=None,
++        ssl_ca_data: Incomplete | None = None,
++        ssl_check_hostname: bool = False,
++        ssl_ca_path: Incomplete | None = None,
++        ssl_password: Incomplete | None = None,
++        ssl_validate_ocsp: bool = False,
++        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
++        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
++        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
++        *,
++        host: str = "localhost",
++        port: int = 6379,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        socket_keepalive: bool = False,
++        socket_keepalive_options: Mapping[str, int | str] | None = None,
++        socket_type: int = 0,
++        db: int = 0,
++        password: str | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[Exception]] = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: _ConnectFunc | None = None,
++        credential_provider: CredentialProvider | None = None,
++        command_packer: Incomplete | None = None,
++    ) -> None: ...
++
++class UnixDomainSocketConnection(AbstractConnection):
++    path: str
++    def __init__(
++        self,
++        path: str = "",
++        *,
++        db: int = 0,
++        password: str | None = None,
++        socket_timeout: float | None = None,
++        socket_connect_timeout: float | None = None,
++        retry_on_timeout: bool = False,
++        retry_on_error: list[type[Exception]] = ...,
++        encoding: str = "utf-8",
++        encoding_errors: str = "strict",
++        decode_responses: bool = False,
++        parser_class: type[BaseParser] = ...,
++        socket_read_size: int = 65536,
++        health_check_interval: int = 0,
++        client_name: str | None = None,
++        username: str | None = None,
++        retry: Retry | None = None,
++        valkey_connect_func: _ConnectFunc | None = None,
++        credential_provider: CredentialProvider | None = None,
++        command_packer: Incomplete | None = None,
++    ) -> None: ...
++    def repr_pieces(self) -> list[tuple[str, Any]]: ...
++
++# TODO: make generic on `connection_class`
++class ConnectionPool:
++    connection_class: type[Connection]
++    connection_kwargs: dict[str, Any]
++    max_connections: int
++    pid: int
++    @classmethod
++    def from_url(cls, url: str, *, db: int = ..., decode_components: bool = ..., **kwargs) -> Self: ...
++    def __init__(
++        self, connection_class: type[AbstractConnection] = ..., max_connections: int | None = None, **connection_kwargs
++    ) -> None: ...
++    def reset(self) -> None: ...
++    def get_connection(self, command_name: Unused, *keys, **options: _ConnectionPoolOptions) -> Connection: ...
++    def make_connection(self) -> Connection: ...
++    def release(self, connection: Connection) -> None: ...
++    def disconnect(self, inuse_connections: bool = True) -> None: ...
++    def get_encoder(self) -> Encoder: ...
++    def owns_connection(self, connection: Connection) -> bool: ...
++
++class BlockingConnectionPool(ConnectionPool):
++    queue_class: type[Queue[Any]]
++    timeout: float
++    pool: Queue[Connection | None]  # might not be defined
++    def __init__(
++        self,
++        max_connections: int = 50,
++        timeout: float = 20,
++        connection_class: type[Connection] = ...,
++        queue_class: type[Queue[Any]] = ...,
++        **connection_kwargs,
++    ) -> None: ...
++    def disconnect(self) -> None: ...  # type: ignore[override]
++
++def to_bool(value: object) -> bool: ...
++def parse_url(url: str) -> dict[str, Any]: ...
+diff --git a/valkey/crc.pyi b/valkey/crc.pyi
+new file mode 100644
+index 0000000..d808e65
+--- /dev/null
++++ b/valkey/crc.pyi
+@@ -0,0 +1,5 @@
++from valkey.typing import EncodedT
++
++VALKEY_CLUSTER_HASH_SLOTS: int
++
++def key_slot(key: EncodedT, bucket: int = 16384) -> int: ...
+diff --git a/valkey/credentials.pyi b/valkey/credentials.pyi
+new file mode 100644
+index 0000000..7a2d78e
+--- /dev/null
++++ b/valkey/credentials.pyi
+@@ -0,0 +1,11 @@
++from abc import abstractmethod
++
++class CredentialProvider:
++    @abstractmethod
++    def get_credentials(self) -> tuple[str] | tuple[str, str]: ...
++
++class UsernamePasswordCredentialProvider(CredentialProvider):
++    username: str
++    password: str
++    def __init__(self, username: str | None = None, password: str | None = None) -> None: ...
++    def get_credentials(self) -> tuple[str] | tuple[str, str]: ...
+diff --git a/valkey/exceptions.pyi b/valkey/exceptions.pyi
+new file mode 100644
+index 0000000..50eb895
+--- /dev/null
++++ b/valkey/exceptions.pyi
+@@ -0,0 +1,43 @@
++class ValkeyError(Exception): ...
++class AuthenticationError(ValkeyError): ...
++class ConnectionError(ValkeyError): ...
++class TimeoutError(ValkeyError): ...
++class AuthorizationError(ConnectionError): ...
++class BusyLoadingError(ConnectionError): ...
++class InvalidResponse(ValkeyError): ...
++class ResponseError(ValkeyError): ...
++class DataError(ValkeyError): ...
++class PubSubError(ValkeyError): ...
++class WatchError(ValkeyError): ...
++class NoScriptError(ResponseError): ...
++class OutOfMemoryError(ResponseError): ...
++class ExecAbortError(ResponseError): ...
++class ReadOnlyError(ResponseError): ...
++class NoPermissionError(ResponseError): ...
++class ModuleError(ResponseError): ...
++class LockError(ValkeyError, ValueError): ...
++class LockNotOwnedError(LockError): ...
++class ChildDeadlockedError(Exception): ...
++class AuthenticationWrongNumberOfArgsError(ResponseError): ...
++class ValkeyClusterException(Exception): ...
++class ClusterError(ValkeyError): ...
++
++class ClusterDownError(ClusterError, ResponseError):
++    args: tuple[str]
++    message: str
++    def __init__(self, resp: str) -> None: ...
++
++class AskError(ResponseError):
++    args: tuple[str]
++    message: str
++    slot_id: int
++    node_addr: tuple[str, int]
++    host: str
++    port: int
++    def __init__(self, resp: str) -> None: ...
++
++class TryAgainError(ResponseError): ...
++class ClusterCrossSlotError(ResponseError): ...
++class MovedError(AskError): ...
++class MasterDownError(ClusterDownError): ...
++class SlotNotCoveredError(ValkeyClusterException): ...
+diff --git a/valkey/lock.pyi b/valkey/lock.pyi
+new file mode 100644
+index 0000000..81d1dca
+--- /dev/null
++++ b/valkey/lock.pyi
+@@ -0,0 +1,56 @@
++from _typeshed import Incomplete
++from types import TracebackType
++from typing import Any, ClassVar, Protocol
++from typing_extensions import Self
++
++from valkey.client import Valkey
++
++class _Local(Protocol):
++    token: str | bytes | None
++
++class Lock:
++    LUA_EXTEND_SCRIPT: ClassVar[str]
++    LUA_REACQUIRE_SCRIPT: ClassVar[str]
++    LUA_RELEASE_SCRIPT: ClassVar[str]
++    lua_extend: ClassVar[Incomplete | None]
++    lua_reacquire: ClassVar[Incomplete | None]
++    lua_release: ClassVar[Incomplete | None]
++    valkey: Valkey[Any]
++    name: str
++    timeout: float | None
++    sleep: float
++    blocking: bool
++    blocking_timeout: float | None
++    thread_local: bool
++    local: _Local
++    def __init__(
++        self,
++        valkey: Valkey[Any],
++        name: str,
++        timeout: float | None = None,
++        sleep: float = 0.1,
++        blocking: bool = True,
++        blocking_timeout: float | None = None,
++        thread_local: bool = True,
++    ) -> None: ...
++    def register_scripts(self) -> None: ...
++    def __enter__(self) -> Self: ...
++    def __exit__(
++        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
++    ) -> bool | None: ...
++    def acquire(
++        self,
++        sleep: float | None = None,
++        blocking: bool | None = None,
++        blocking_timeout: float | None = None,
++        token: str | bytes | None = None,
++    ) -> bool: ...
++    def do_acquire(self, token: str | bytes) -> bool: ...
++    def locked(self) -> bool: ...
++    def owned(self) -> bool: ...
++    def release(self) -> None: ...
++    def do_release(self, expected_token: str | bytes) -> None: ...
++    def extend(self, additional_time: float, replace_ttl: bool = False) -> bool: ...
++    def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ...
++    def reacquire(self) -> bool: ...
++    def do_reacquire(self) -> bool: ...
+diff --git a/valkey/ocsp.pyi b/valkey/ocsp.pyi
+new file mode 100644
+index 0000000..5fc72e0
+--- /dev/null
++++ b/valkey/ocsp.pyi
+@@ -0,0 +1,21 @@
++from _typeshed import Incomplete
++from ssl import SSLObject, SSLSocket
++from typing import Literal
++
++from cryptography.x509.base import Certificate
++from OpenSSL.SSL import Connection
++
++def ocsp_staple_verifier(con: Connection, ocsp_bytes: bytes, expected: bytes | None = None) -> Literal[True]: ...
++
++class OCSPVerifier:
++    SOCK: SSLObject | SSLSocket
++    HOST: str
++    PORT: int
++    CA_CERTS: str | None
++    def __init__(self, sock: SSLObject | SSLSocket, host: str, port: int, ca_certs: str | None = None) -> None: ...
++    # cryptography.x509.general_name.GeneralName.value is typed as Any
++    def components_from_socket(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ...
++    def components_from_direct_connection(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ...
++    def build_certificate_url(self, server: str, cert: Certificate, issuer_cert: Certificate) -> str: ...
++    def check_certificate(self, server: str, cert: Certificate, issuer_url: str | bytes) -> Literal[True]: ...
++    def is_valid(self) -> Literal[True]: ...
+diff --git a/valkey/retry.py b/valkey/retry.py
+index e40a833..4eb34d7 100644
+--- a/valkey/retry.py
++++ b/valkey/retry.py
+@@ -7,7 +7,7 @@ from valkey.exceptions import ConnectionError, TimeoutError
+ T = TypeVar("T")
+ 
+ if TYPE_CHECKING:
+-    from redis.backoff import AbstractBackoff
++    from valkey.backoff import AbstractBackoff
+ 
+ 
+ class Retry:
+diff --git a/valkey/retry.pyi b/valkey/retry.pyi
+new file mode 100644
+index 0000000..ab727e6
+--- /dev/null
++++ b/valkey/retry.pyi
+@@ -0,0 +1,11 @@
++from collections.abc import Callable, Iterable
++from typing import TypeVar
++
++from valkey.backoff import AbstractBackoff
++
++_T = TypeVar("_T")
++
++class Retry:
++    def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[Exception], ...] = ...) -> None: ...
++    def update_supported_errors(self, specified_errors: Iterable[type[Exception]]) -> None: ...
++    def call_with_retry(self, do: Callable[[], _T], fail: Callable[[Exception], object]) -> _T: ...
+diff --git a/valkey/sentinel.pyi b/valkey/sentinel.pyi
+new file mode 100644
+index 0000000..4a4c948
+--- /dev/null
++++ b/valkey/sentinel.pyi
+@@ -0,0 +1,62 @@
++from collections.abc import Iterable, Iterator
++from typing import Any, Literal, TypeVar, overload
++from typing_extensions import TypeAlias
++
++from valkey.client import Valkey
++from valkey.commands.sentinel import SentinelCommands
++from valkey.connection import Connection, ConnectionPool, SSLConnection
++from valkey.exceptions import ConnectionError
++
++_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any])
++_AddressAndPort: TypeAlias = tuple[str, int]
++_SentinelState: TypeAlias = dict[str, Any]  # TODO: this can be a TypedDict
++
++class MasterNotFoundError(ConnectionError): ...
++class SlaveNotFoundError(ConnectionError): ...
++
++class SentinelManagedConnection(Connection):
++    connection_pool: SentinelConnectionPool
++    def __init__(self, *, connection_pool: SentinelConnectionPool, **kwargs) -> None: ...
++    def connect_to(self, address: _AddressAndPort) -> None: ...
++    def connect(self) -> None: ...
++    # The result can be either `str | bytes` or `list[str | bytes]`
++    def read_response(self, disable_decoding: bool = False, *, disconnect_on_error: bool = False) -> Any: ...
++
++class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ...
++
++class SentinelConnectionPool(ConnectionPool):
++    is_master: bool
++    check_connection: bool
++    service_name: str
++    sentinel_manager: Sentinel
++    def __init__(self, service_name: str, sentinel_manager: Sentinel, **kwargs) -> None: ...
++    def reset(self) -> None: ...
++    def owns_connection(self, connection: Connection) -> bool: ...
++    def get_master_address(self) -> _AddressAndPort: ...
++    def rotate_slaves(self) -> Iterator[_AddressAndPort]: ...
++
++class Sentinel(SentinelCommands):
++    sentinel_kwargs: dict[str, Any]
++    sentinels: list[Valkey[Any]]
++    min_other_sentinels: int
++    connection_kwargs: dict[str, Any]
++    def __init__(
++        self,
++        sentinels: Iterable[_AddressAndPort],
++        min_other_sentinels: int = 0,
++        sentinel_kwargs: dict[str, Any] | None = None,
++        **connection_kwargs,
++    ) -> None: ...
++    def check_master_state(self, state: _SentinelState, service_name: str) -> bool: ...
++    def discover_master(self, service_name: str) -> _AddressAndPort: ...
++    def filter_slaves(self, slaves: Iterable[_SentinelState]) -> list[_AddressAndPort]: ...
++    def discover_slaves(self, service_name: str) -> list[_AddressAndPort]: ...
++    @overload
++    def master_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ...
++    @overload
++    def master_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ...
++    @overload
++    def slave_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ...
++    @overload
++    def slave_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ...
++    def execute_command(self, *args, **kwargs) -> Literal[True]: ...
+diff --git a/valkey/typing.pyi b/valkey/typing.pyi
+new file mode 100644
+index 0000000..dce33cb
+--- /dev/null
++++ b/valkey/typing.pyi
+@@ -0,0 +1,34 @@
++from collections.abc import Iterable
++from datetime import datetime, timedelta
++from typing import Any, Protocol, TypeVar
++from typing_extensions import TypeAlias
++
++from valkey.asyncio.connection import ConnectionPool as AsyncConnectionPool
++from valkey.connection import ConnectionPool
++
++# The following type aliases exist at runtime.
++EncodedT: TypeAlias = bytes | memoryview
++DecodedT: TypeAlias = str | int | float
++EncodableT: TypeAlias = EncodedT | DecodedT
++AbsExpiryT: TypeAlias = int | datetime
++ExpiryT: TypeAlias = int | timedelta
++ZScoreBoundT: TypeAlias = float | str
++BitfieldOffsetT: TypeAlias = int | str
++_StringLikeT: TypeAlias = bytes | str | memoryview  # noqa: Y043
++KeyT: TypeAlias = _StringLikeT
++PatternT: TypeAlias = _StringLikeT
++FieldT: TypeAlias = EncodableT
++KeysT: TypeAlias = KeyT | Iterable[KeyT]
++ChannelT: TypeAlias = _StringLikeT
++GroupT: TypeAlias = _StringLikeT
++ConsumerT: TypeAlias = _StringLikeT
++StreamIdT: TypeAlias = int | _StringLikeT
++ScriptTextT: TypeAlias = _StringLikeT
++TimeoutSecT: TypeAlias = int | float | _StringLikeT
++AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview)  # noqa: Y001
++AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview)  # noqa: Y001
++AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview)  # noqa: Y001
++
++class CommandsProtocol(Protocol):
++    connection_pool: AsyncConnectionPool[Any] | ConnectionPool
++    def execute_command(self, *args, **options): ...
+diff --git a/valkey/utils.pyi b/valkey/utils.pyi
+new file mode 100644
+index 0000000..de41c11
+--- /dev/null
++++ b/valkey/utils.pyi
+@@ -0,0 +1,22 @@
++from _typeshed import Unused
++from collections.abc import Iterable, Mapping
++from contextlib import AbstractContextManager
++from typing import Any, Literal, TypeVar, overload
++
++from .client import Pipeline, Valkey, _StrType
++
++_T = TypeVar("_T")
++
++LIBVALKEY_AVAILABLE: bool
++CRYPTOGRAPHY_AVAILABLE: bool
++
++@overload
++def from_url(url: str, *, db: int = ..., decode_responses: Literal[True], **kwargs: Any) -> Valkey[str]: ...
++@overload
++def from_url(url: str, *, db: int = ..., decode_responses: Literal[False] = False, **kwargs: Any) -> Valkey[bytes]: ...
++def pipeline(valkey_obj: Valkey[_StrType]) -> AbstractContextManager[Pipeline[_StrType]]: ...
++def str_if_bytes(value: str | bytes) -> str: ...
++def safe_str(value: object) -> str: ...
++def dict_merge(*dicts: Mapping[str, _T]) -> dict[str, _T]: ...
++def list_keys_to_dict(key_list, callback): ...  # unused, alias for `dict.fromkeys`
++def merge_result(command: Unused, res: Mapping[Any, Iterable[_T]]) -> list[_T]: ...
diff --git a/valkey/__init__.py b/valkey/__init__.py
index e4202fbe..1feaac77 100644
--- a/valkey/__init__.py
+++ b/valkey/__init__.py
@@ -1,4 +1,5 @@
 from importlib import metadata
+from typing import Tuple, Union
 
 from valkey import asyncio  # noqa
 from valkey.backoff import default_backoff
@@ -44,6 +45,9 @@ def int_or_str(value):
         return value
 
 
+__version__: str
+VERSION: Tuple[Union[int, str], ...]
+
 try:
     __version__ = metadata.version("valkey")
 except metadata.PackageNotFoundError:
diff --git a/valkey/asyncio/__init__.pyi b/valkey/asyncio/__init__.pyi
new file mode 100644
index 00000000..7d45bb0f
--- /dev/null
+++ b/valkey/asyncio/__init__.pyi
@@ -0,0 +1,64 @@
+from valkey.asyncio.client import Valkey as Valkey, StrictValkey as StrictValkey
+from valkey.asyncio.cluster import ValkeyCluster as ValkeyCluster
+from valkey.asyncio.connection import (
+    BlockingConnectionPool as BlockingConnectionPool,
+    Connection as Connection,
+    ConnectionPool as ConnectionPool,
+    SSLConnection as SSLConnection,
+    UnixDomainSocketConnection as UnixDomainSocketConnection,
+)
+from valkey.asyncio.parser import CommandsParser as CommandsParser
+from valkey.asyncio.sentinel import (
+    Sentinel as Sentinel,
+    SentinelConnectionPool as SentinelConnectionPool,
+    SentinelManagedConnection as SentinelManagedConnection,
+    SentinelManagedSSLConnection as SentinelManagedSSLConnection,
+)
+from valkey.asyncio.utils import from_url as from_url
+from valkey.backoff import default_backoff as default_backoff
+from valkey.exceptions import (
+    AuthenticationError as AuthenticationError,
+    AuthenticationWrongNumberOfArgsError as AuthenticationWrongNumberOfArgsError,
+    BusyLoadingError as BusyLoadingError,
+    ChildDeadlockedError as ChildDeadlockedError,
+    ConnectionError as ConnectionError,
+    DataError as DataError,
+    InvalidResponse as InvalidResponse,
+    PubSubError as PubSubError,
+    ReadOnlyError as ReadOnlyError,
+    ValkeyError as ValkeyError,
+    ResponseError as ResponseError,
+    TimeoutError as TimeoutError,
+    WatchError as WatchError,
+)
+
+__all__ = [
+    "AuthenticationError",
+    "AuthenticationWrongNumberOfArgsError",
+    "BlockingConnectionPool",
+    "BusyLoadingError",
+    "ChildDeadlockedError",
+    "CommandsParser",
+    "Connection",
+    "ConnectionError",
+    "ConnectionPool",
+    "DataError",
+    "from_url",
+    "default_backoff",
+    "InvalidResponse",
+    "PubSubError",
+    "ReadOnlyError",
+    "Valkey",
+    "ValkeyCluster",
+    "ValkeyError",
+    "ResponseError",
+    "Sentinel",
+    "SentinelConnectionPool",
+    "SentinelManagedConnection",
+    "SentinelManagedSSLConnection",
+    "SSLConnection",
+    "StrictValkey",
+    "TimeoutError",
+    "UnixDomainSocketConnection",
+    "WatchError",
+]
diff --git a/valkey/asyncio/client.pyi b/valkey/asyncio/client.pyi
new file mode 100644
index 00000000..7cb11b26
--- /dev/null
+++ b/valkey/asyncio/client.pyi
@@ -0,0 +1,1102 @@
+from _typeshed import Incomplete, Unused
+from collections.abc import AsyncIterator, Awaitable, Callable, Generator, Iterable, Mapping, MutableMapping, Sequence
+from datetime import datetime, timedelta
+from types import TracebackType
+from typing import Any, ClassVar, Literal, NoReturn, Protocol, TypedDict, overload
+from typing_extensions import Self, TypeAlias
+
+from valkey import ValkeyError
+from valkey.asyncio.connection import ConnectCallbackT, Connection, ConnectionPool
+from valkey.asyncio.lock import Lock
+from valkey.asyncio.retry import Retry
+from valkey.client import AbstractValkey, _CommandOptions, _Key, _StrType, _Value
+from valkey.commands import AsyncCoreCommands, AsyncSentinelCommands, ValkeyModuleCommands
+from valkey.credentials import CredentialProvider
+from valkey.typing import ChannelT, EncodableT, KeyT, PatternT, StreamIdT
+
+PubSubHandler: TypeAlias = Callable[[dict[str, str]], Awaitable[None]]
+
+class ResponseCallbackProtocol(Protocol):
+    def __call__(self, response: Any, **kwargs): ...
+
+class AsyncResponseCallbackProtocol(Protocol):
+    async def __call__(self, response: Any, **kwargs): ...
+
+ResponseCallbackT: TypeAlias = ResponseCallbackProtocol | AsyncResponseCallbackProtocol
+
+class Valkey(AbstractValkey, ValkeyModuleCommands, AsyncCoreCommands[_StrType], AsyncSentinelCommands):
+    response_callbacks: MutableMapping[str | bytes, ResponseCallbackT]
+    auto_close_connection_pool: bool
+    connection_pool: Any
+    single_connection_client: Any
+    connection: Any
+    @overload
+    @classmethod
+    def from_url(
+        cls,
+        url: str,
+        *,
+        host: str = "localhost",
+        port: int = 6379,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool | None = None,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        connection_pool: ConnectionPool[Any] | None = None,
+        unix_socket_path: str | None = None,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: Literal[True],
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | None = None,
+        ssl: bool = False,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: str = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: str | None = None,
+        ssl_check_hostname: bool = False,
+        max_connections: int | None = None,
+        single_connection_client: bool = False,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        lib_name: str | None = None,
+        lib_version: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        auto_close_connection_pool: bool = True,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        credential_provider: CredentialProvider | None = None,
+    ) -> Valkey[str]: ...
+    @overload
+    @classmethod
+    def from_url(
+        cls,
+        url: str,
+        *,
+        host: str = "localhost",
+        port: int = 6379,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool | None = None,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        connection_pool: ConnectionPool[Any] | None = None,
+        unix_socket_path: str | None = None,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: Literal[False] = False,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | None = None,
+        ssl: bool = False,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: str = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: str | None = None,
+        ssl_check_hostname: bool = False,
+        max_connections: int | None = None,
+        single_connection_client: bool = False,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        lib_name: str | None = None,
+        lib_version: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        auto_close_connection_pool: bool = True,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        credential_provider: CredentialProvider | None = None,
+    ) -> Valkey[bytes]: ...
+    @overload
+    def __init__(
+        self: Valkey[str],
+        *,
+        host: str = "localhost",
+        port: int = 6379,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool | None = None,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        connection_pool: ConnectionPool[Any] | None = None,
+        unix_socket_path: str | None = None,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: Literal[True],
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | None = None,
+        ssl: bool = False,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: str = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: str | None = None,
+        ssl_check_hostname: bool = False,
+        max_connections: int | None = None,
+        single_connection_client: bool = False,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        lib_name: str | None = None,
+        lib_version: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        auto_close_connection_pool: bool = True,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    @overload
+    def __init__(
+        self: Valkey[bytes],
+        *,
+        host: str = "localhost",
+        port: int = 6379,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool | None = None,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        connection_pool: ConnectionPool[Any] | None = None,
+        unix_socket_path: str | None = None,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: Literal[False] = False,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | None = None,
+        ssl: bool = False,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: str = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: str | None = None,
+        ssl_check_hostname: bool = False,
+        max_connections: int | None = None,
+        single_connection_client: bool = False,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        lib_name: str | None = None,
+        lib_version: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        auto_close_connection_pool: bool = True,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    def __await__(self) -> Generator[Any, None, Self]: ...
+    async def initialize(self) -> Self: ...
+    def set_response_callback(self, command: str, callback: ResponseCallbackT): ...
+    def load_external_module(self, funcname, func) -> None: ...
+    def pipeline(self, transaction: bool = True, shard_hint: str | None = None) -> Pipeline[_StrType]: ...
+    async def transaction(
+        self,
+        func: Callable[[Pipeline[_StrType]], Any | Awaitable[Any]],
+        *watches: KeyT,
+        shard_hint: str | None = None,
+        value_from_callable: bool = False,
+        watch_delay: float | None = None,
+    ): ...
+    def lock(
+        self,
+        name: KeyT,
+        timeout: float | None = None,
+        sleep: float = 0.1,
+        blocking: bool = True,
+        blocking_timeout: float | None = None,
+        lock_class: type[Lock] | None = None,
+        thread_local: bool = True,
+    ) -> Lock: ...
+    def pubsub(self, **kwargs) -> PubSub: ...
+    def monitor(self) -> Monitor: ...
+    def client(self) -> Valkey[_StrType]: ...
+    async def __aenter__(self) -> Self: ...
+    async def __aexit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __del__(self, _warnings: Any = ...) -> None: ...
+    async def aclose(self, close_connection_pool: bool | None = None) -> None: ...
+    async def close(self, close_connection_pool: bool | None = None) -> None: ...
+    async def execute_command(self, *args, **options): ...
+    async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ...
+
+StrictValkey = Valkey
+
+class MonitorCommandInfo(TypedDict):
+    time: float
+    db: int
+    client_address: str
+    client_port: str
+    client_type: str
+    command: str
+
+class Monitor:
+    monitor_re: Any
+    command_re: Any
+    connection_pool: Any
+    connection: Any
+    def __init__(self, connection_pool: ConnectionPool[Any]) -> None: ...
+    async def connect(self) -> None: ...
+    async def __aenter__(self) -> Self: ...
+    async def __aexit__(self, *args: Unused) -> None: ...
+    async def next_command(self) -> MonitorCommandInfo: ...
+    def listen(self) -> AsyncIterator[MonitorCommandInfo]: ...
+
+class PubSub:
+    PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, ...]]
+    UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, ...]]
+    HEALTH_CHECK_MESSAGE: ClassVar[str]
+    connection_pool: Any
+    shard_hint: str | None
+    ignore_subscribe_messages: bool
+    connection: Any
+    encoder: Any
+    health_check_response: Iterable[str | bytes]
+    channels: Any
+    pending_unsubscribe_channels: Any
+    patterns: Any
+    pending_unsubscribe_patterns: Any
+    def __init__(
+        self,
+        connection_pool: ConnectionPool[Any],
+        shard_hint: str | None = None,
+        ignore_subscribe_messages: bool = False,
+        encoder: Incomplete | None = None,
+    ) -> None: ...
+    async def __aenter__(self) -> Self: ...
+    async def __aexit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __del__(self) -> None: ...
+    async def reset(self) -> None: ...
+    async def aclose(self) -> None: ...
+    def close(self) -> Awaitable[NoReturn]: ...
+    async def on_connect(self, connection: Connection): ...
+    @property
+    def subscribed(self) -> bool: ...
+    async def execute_command(self, *args: EncodableT): ...
+    async def parse_response(self, block: bool = True, timeout: float = 0): ...
+    async def check_health(self) -> None: ...
+    async def psubscribe(self, *args: ChannelT, **kwargs: PubSubHandler): ...
+    def punsubscribe(self, *args: ChannelT) -> Awaitable[Any]: ...
+    async def subscribe(self, *args: ChannelT, **kwargs: Callable[..., Any]): ...
+    def unsubscribe(self, *args) -> Awaitable[Any]: ...
+    def listen(self) -> AsyncIterator[Any]: ...
+    async def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0): ...
+    def ping(self, message: Incomplete | None = None) -> Awaitable[Any]: ...
+    async def handle_message(self, response, ignore_subscribe_messages: bool = False): ...
+    async def run(self, *, exception_handler: PSWorkerThreadExcHandlerT | None = None, poll_timeout: float = 1.0) -> None: ...
+
+class PubsubWorkerExceptionHandler(Protocol):
+    def __call__(self, e: BaseException, pubsub: PubSub): ...
+
+class AsyncPubsubWorkerExceptionHandler(Protocol):
+    async def __call__(self, e: BaseException, pubsub: PubSub): ...
+
+PSWorkerThreadExcHandlerT: TypeAlias = PubsubWorkerExceptionHandler | AsyncPubsubWorkerExceptionHandler
+CommandT: TypeAlias = tuple[tuple[str | bytes, ...], Mapping[str, Any]]
+CommandStackT: TypeAlias = list[CommandT]
+
+class Pipeline(Valkey[_StrType]):
+    UNWATCH_COMMANDS: ClassVar[set[str]]
+    connection_pool: Any
+    connection: Any
+    response_callbacks: Any
+    is_transaction: bool
+    shard_hint: str | None
+    watching: bool
+    command_stack: Any
+    scripts: Any
+    explicit_transaction: bool
+    def __init__(
+        self,
+        connection_pool: ConnectionPool[Any],
+        response_callbacks: MutableMapping[str | bytes, ResponseCallbackT],
+        transaction: bool,
+        shard_hint: str | None,
+    ) -> None: ...
+    async def __aenter__(self) -> Self: ...
+    async def __aexit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __await__(self) -> Generator[Any, None, Self]: ...
+    def __len__(self) -> int: ...
+    def __bool__(self) -> bool: ...
+    async def reset(self) -> None: ...
+    async def aclose(self) -> None: ...  # type: ignore[override]
+    def multi(self) -> None: ...
+    def execute_command(self, *args, **kwargs) -> Pipeline[_StrType] | Awaitable[Pipeline[_StrType]]: ...
+    async def immediate_execute_command(self, *args, **options): ...
+    def pipeline_execute_command(self, *args, **options): ...
+    def raise_first_error(self, commands: CommandStackT, response: Iterable[Any]): ...
+    def annotate_exception(self, exception: Exception, number: int, command: Iterable[object]) -> None: ...
+    async def parse_response(self, connection: Connection, command_name: str | bytes, **options): ...
+    async def load_scripts(self) -> None: ...
+    async def execute(self, raise_on_error: bool = True): ...
+    async def discard(self) -> None: ...
+    async def watch(self, *names: KeyT) -> bool: ...
+    async def unwatch(self) -> bool: ...
+    # region acl commands
+    def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> Any: ...
+    def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> Any: ...
+    def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> Any: ...
+    def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any: ...
+    def acl_help(self, **kwargs: _CommandOptions) -> Any: ...
+    def acl_list(self, **kwargs: _CommandOptions) -> Any: ...
+    def acl_log(self, count: int | None = None, **kwargs: _CommandOptions) -> Any: ...
+    def acl_log_reset(self, **kwargs: _CommandOptions) -> Any: ...
+    def acl_load(self, **kwargs: _CommandOptions) -> Any: ...
+    def acl_save(self, **kwargs: _CommandOptions) -> Any: ...
+    def acl_setuser(  # type: ignore[override]
+        self,
+        username: str,
+        enabled: bool = False,
+        nopass: bool = False,
+        passwords: Sequence[str] | None = None,
+        hashed_passwords: Sequence[str] | None = None,
+        categories: Sequence[str] | None = None,
+        commands: Sequence[str] | None = None,
+        keys: Sequence[str] | None = None,
+        channels: Iterable[ChannelT] | None = None,
+        selectors: Iterable[tuple[str, KeyT]] | None = None,
+        reset: bool = False,
+        reset_keys: bool = False,
+        reset_channels: bool = False,
+        reset_passwords: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> Pipeline[_StrType]: ...
+    def acl_users(self, **kwargs: _CommandOptions) -> Any: ...
+    def acl_whoami(self, **kwargs: _CommandOptions) -> Any: ...
+    # endregion
+    # region cluster commands
+    def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions) -> Any: ...
+    def readwrite(self, **kwargs: _CommandOptions) -> Any: ...
+    def readonly(self, **kwargs: _CommandOptions) -> Any: ...
+    # endregion
+    # region BasicKey commands
+    def append(self, key, value) -> Any: ...
+    def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ...
+    def bitfield(self, key, default_overflow: Incomplete | None = None) -> Any: ...
+    def bitop(self, operation, dest, *keys) -> Any: ...
+    def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None) -> Any: ...
+    def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False) -> Any: ...
+    def decr(self, name, amount: int = 1) -> Any: ...
+    def decrby(self, name, amount: int = 1) -> Any: ...
+    def delete(self, *names: _Key) -> Any: ...
+    def dump(self, name: _Key) -> Any: ...
+    def exists(self, *names: _Key) -> Any: ...
+    def expire(
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Any: ...
+    def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False) -> Any: ...
+    def get(self, name: _Key) -> Any: ...
+    def getdel(self, name: _Key) -> Any: ...
+    def getex(
+        self,
+        name,
+        ex: Incomplete | None = None,
+        px: Incomplete | None = None,
+        exat: Incomplete | None = None,
+        pxat: Incomplete | None = None,
+        persist: bool = False,
+    ) -> Any: ...
+    def getbit(self, name: _Key, offset: int) -> Any: ...
+    def getrange(self, key, start, end) -> Any: ...
+    def getset(self, name, value) -> Any: ...
+    def incr(self, name: _Key, amount: int = 1) -> Any: ...
+    def incrby(self, name: _Key, amount: int = 1) -> Any: ...
+    def incrbyfloat(self, name: _Key, amount: float = 1.0) -> Any: ...
+    def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ...
+    def lmove(
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> Any: ...
+    def blmove(
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        timeout: float,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> Any: ...
+    def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    def mset(self, mapping: Mapping[_Key, _Value]) -> Any: ...
+    def msetnx(self, mapping: Mapping[_Key, _Value]) -> Any: ...
+    def move(self, name: _Key, db: int) -> Any: ...
+    def persist(self, name: _Key) -> Any: ...
+    def pexpire(
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Any: ...
+    def pexpireat(
+        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Any: ...
+    def psetex(self, name, time_ms, value) -> Any: ...
+    def pttl(self, name: _Key) -> Any: ...
+    def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False) -> Any: ...
+    def randomkey(self, **kwargs: _CommandOptions) -> Any: ...
+    def rename(self, src, dst) -> Any: ...
+    def renamenx(self, src, dst) -> Any: ...
+    def restore(
+        self,
+        name,
+        ttl,
+        value,
+        replace: bool = False,
+        absttl: bool = False,
+        idletime: Incomplete | None = None,
+        frequency: Incomplete | None = None,
+    ) -> Any: ...
+    def set(  # type: ignore[override]
+        self,
+        name: _Key,
+        value: _Value,
+        ex: None | int | timedelta = None,
+        px: None | int | timedelta = None,
+        nx: bool = False,
+        xx: bool = False,
+        keepttl: bool = False,
+        get: bool = False,
+        exat: Incomplete | None = None,
+        pxat: Incomplete | None = None,
+    ) -> Any: ...
+    def setbit(self, name: _Key, offset: int, value: int) -> Any: ...
+    def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Any: ...
+    def setnx(self, name: _Key, value: _Value) -> Any: ...
+    def setrange(self, name, offset, value) -> Any: ...
+    def stralgo(
+        self,
+        algo,
+        value1,
+        value2,
+        specific_argument: str = "strings",
+        len: bool = False,
+        idx: bool = False,
+        minmatchlen: Incomplete | None = None,
+        withmatchlen: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> Any: ...
+    def strlen(self, name) -> Any: ...
+    def substr(self, name, start, end: int = -1) -> Any: ...
+    def touch(self, *args) -> Any: ...
+    def ttl(self, name: _Key) -> Any: ...
+    def type(self, name) -> Any: ...
+    def unlink(self, *names: _Key) -> Any: ...
+    # endregion
+    # region hyperlog commands
+    def pfadd(self, name: _Key, *values: _Value) -> Any: ...
+    def pfcount(self, name: _Key) -> Any: ...
+    def pfmerge(self, dest: _Key, *sources: _Key) -> Any: ...
+    # endregion
+    # region hash commands
+    def hdel(self, name: _Key, *keys: _Key) -> Any: ...
+    def hexists(self, name: _Key, key: _Key) -> Any: ...
+    def hget(self, name: _Key, key: _Key) -> Any: ...
+    def hgetall(self, name: _Key) -> Any: ...
+    def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Any: ...
+    def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Any: ...
+    def hkeys(self, name: _Key) -> Any: ...
+    def hlen(self, name: _Key) -> Any: ...
+    @overload
+    def hset(
+        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
+    ) -> Any: ...
+    @overload
+    def hset(
+        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
+    ) -> Any: ...
+    @overload
+    def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Any: ...
+    def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Any: ...
+    def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Any: ...
+    def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    def hvals(self, name: _Key) -> Any: ...
+    def hstrlen(self, name, key) -> Any: ...
+    # endregion
+    # region geo commands
+    def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False) -> Any: ...
+    def geodist(self, name, place1, place2, unit: Incomplete | None = None) -> Any: ...
+    def geohash(self, name, *values) -> Any: ...
+    def geopos(self, name, *values) -> Any: ...
+    def georadius(
+        self,
+        name,
+        longitude,
+        latitude,
+        radius,
+        unit: Incomplete | None = None,
+        withdist: bool = False,
+        withcoord: bool = False,
+        withhash: bool = False,
+        count: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        store: Incomplete | None = None,
+        store_dist: Incomplete | None = None,
+        any: bool = False,
+    ) -> Any: ...
+    def georadiusbymember(
+        self,
+        name,
+        member,
+        radius,
+        unit: Incomplete | None = None,
+        withdist: bool = False,
+        withcoord: bool = False,
+        withhash: bool = False,
+        count: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        store: Incomplete | None = None,
+        store_dist: Incomplete | None = None,
+        any: bool = False,
+    ) -> Any: ...
+    def geosearch(
+        self,
+        name,
+        member: Incomplete | None = None,
+        longitude: Incomplete | None = None,
+        latitude: Incomplete | None = None,
+        unit: str = "m",
+        radius: Incomplete | None = None,
+        width: Incomplete | None = None,
+        height: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        count: Incomplete | None = None,
+        any: bool = False,
+        withcoord: bool = False,
+        withdist: bool = False,
+        withhash: bool = False,
+    ) -> Any: ...
+    def geosearchstore(
+        self,
+        dest,
+        name,
+        member: Incomplete | None = None,
+        longitude: Incomplete | None = None,
+        latitude: Incomplete | None = None,
+        unit: str = "m",
+        radius: Incomplete | None = None,
+        width: Incomplete | None = None,
+        height: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        count: Incomplete | None = None,
+        any: bool = False,
+        storedist: bool = False,
+    ) -> Any: ...
+    # endregion
+    # region list commands
+    @overload
+    def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ...
+    @overload
+    def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ...
+    @overload
+    def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> Any: ...
+    @overload
+    def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Any: ...
+    def brpoplpush(self, src, dst, timeout: int | None = 0) -> Any: ...
+    def lindex(self, name: _Key, index: int | str) -> Any: ...
+    def linsert(
+        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
+    ) -> Any: ...
+    def llen(self, name: _Key) -> Any: ...
+    def lpop(self, name, count: int | None = None) -> Any: ...
+    def lpush(self, name: _Value, *values: _Value) -> Any: ...
+    def lpushx(self, name, value) -> Any: ...
+    def lrange(self, name: _Key, start: int, end: int) -> Any: ...
+    def lrem(self, name: _Key, count: int, value: _Value) -> Any: ...
+    def lset(self, name: _Key, index: int, value: _Value) -> Any: ...
+    def ltrim(self, name: _Key, start: int, end: int) -> Any: ...
+    def rpop(self, name, count: int | None = None) -> Any: ...
+    def rpoplpush(self, src, dst) -> Any: ...
+    def rpush(self, name: _Value, *values: _Value) -> Any: ...
+    def rpushx(self, name, value) -> Any: ...
+    def lpos(
+        self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None
+    ) -> Any: ...
+    @overload  # type: ignore[override]
+    def sort(
+        self,
+        name: _Key,
+        start: int | None = None,
+        num: int | None = None,
+        by: _Key | None = None,
+        get: _Key | Sequence[_Key] | None = None,
+        desc: bool = False,
+        alpha: bool = False,
+        store: None = None,
+        groups: bool = False,
+    ) -> list[_StrType]: ...
+    @overload
+    def sort(
+        self,
+        name: _Key,
+        start: int | None = None,
+        num: int | None = None,
+        by: _Key | None = None,
+        get: _Key | Sequence[_Key] | None = None,
+        desc: bool = False,
+        alpha: bool = False,
+        *,
+        store: _Key,
+        groups: bool = False,
+    ) -> Any: ...
+    @overload
+    def sort(
+        self,
+        name: _Key,
+        start: int | None,
+        num: int | None,
+        by: _Key | None,
+        get: _Key | Sequence[_Key] | None,
+        desc: bool,
+        alpha: bool,
+        store: _Key,
+        groups: bool = False,
+    ) -> Any: ...
+    # endregion
+    # region scan commands
+    def scan(
+        self,
+        cursor: int = 0,
+        match: _Key | None = None,
+        count: int | None = None,
+        _type: str | None = None,
+        **kwargs: _CommandOptions,
+    ) -> Any: ...
+    def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ...
+    def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Any: ...
+    @overload
+    def zscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Any: ...
+    @overload
+    def zscan(
+        self,
+        name: _Key,
+        cursor: int = 0,
+        match: _Key | None = None,
+        count: int | None = None,
+        *,
+        score_cast_func: Callable[[_StrType], Any],
+    ) -> Any: ...
+    @overload
+    def zscan(
+        self, name: _Key, cursor: int, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], Any]
+    ) -> Any: ...
+    # endregion
+    # region set commands
+    def sadd(self, name: _Key, *values: _Value) -> Any: ...
+    def scard(self, name: _Key) -> Any: ...
+    def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    def sismember(self, name: _Key, value: _Value) -> Any: ...
+    def smembers(self, name: _Key) -> Any: ...
+    def smismember(self, name, values, *args) -> Any: ...
+    def smove(self, src: _Key, dst: _Key, value: _Value) -> Any: ...
+    @overload
+    def spop(self, name: _Key, count: None = None) -> Any: ...
+    @overload
+    def spop(self, name: _Key, count: int) -> Any: ...
+    @overload
+    def srandmember(self, name: _Key, number: None = None) -> Any: ...
+    @overload
+    def srandmember(self, name: _Key, number: int) -> Any: ...
+    def srem(self, name: _Key, *values: _Value) -> Any: ...
+    def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Any: ...
+    # endregion
+    # region stream commands
+    def xack(self, name, groupname, *ids) -> Any: ...
+    def xadd(
+        self,
+        name,
+        fields,
+        id: str | int | bytes | memoryview = "*",
+        maxlen=None,
+        approximate: bool = True,
+        nomkstream: bool = False,
+        minid: Incomplete | None = None,
+        limit: Incomplete | None = None,
+    ) -> Any: ...
+    def xautoclaim(
+        self,
+        name,
+        groupname,
+        consumername,
+        min_idle_time,
+        start_id: StreamIdT = "0-0",
+        count: Incomplete | None = None,
+        justid: bool = False,
+    ) -> Any: ...
+    def xclaim(
+        self,
+        name,
+        groupname,
+        consumername,
+        min_idle_time,
+        message_ids,
+        idle=None,
+        time=None,
+        retrycount=None,
+        force=False,
+        justid=False,
+    ) -> Any: ...
+    def xdel(self, name, *ids) -> Any: ...
+    def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None) -> Any: ...
+    def xgroup_delconsumer(self, name, groupname, consumername) -> Any: ...
+    def xgroup_destroy(self, name, groupname) -> Any: ...
+    def xgroup_createconsumer(self, name, groupname, consumername) -> Any: ...
+    def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Any: ...
+    def xinfo_consumers(self, name, groupname) -> Any: ...
+    def xinfo_groups(self, name) -> Any: ...
+    def xinfo_stream(self, name, full: bool = False) -> Any: ...
+    def xlen(self, name: _Key) -> Any: ...
+    def xpending(self, name, groupname) -> Any: ...
+    def xpending_range(
+        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
+    ) -> Any: ...
+    def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None) -> Any: ...
+    def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None) -> Any: ...
+    def xreadgroup(
+        self,
+        groupname,
+        consumername,
+        streams,
+        count: Incomplete | None = None,
+        block: Incomplete | None = None,
+        noack: bool = False,
+    ) -> Any: ...
+    def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None) -> Any: ...
+    def xtrim(
+        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
+    ) -> Any: ...
+    # endregion
+    # region sorted set commands
+    def zadd(
+        self,
+        name: _Key,
+        mapping: Mapping[_Key, _Value],
+        nx: bool = False,
+        xx: bool = False,
+        ch: bool = False,
+        incr: bool = False,
+        gt: Incomplete | None = False,
+        lt: Incomplete | None = False,
+    ) -> Any: ...
+    def zcard(self, name: _Key) -> Any: ...
+    def zcount(self, name: _Key, min: _Value, max: _Value) -> Any: ...
+    def zdiff(self, keys, withscores: bool = False) -> Any: ...
+    def zdiffstore(self, dest, keys) -> Any: ...
+    def zincrby(self, name: _Key, amount: float, value: _Value) -> Any: ...
+    def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ...
+    def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ...
+    def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Any: ...
+    def zpopmax(self, name: _Key, count: int | None = None) -> Any: ...
+    def zpopmin(self, name: _Key, count: int | None = None) -> Any: ...
+    def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False) -> Any: ...
+    @overload
+    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ...
+    @overload
+    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ...
+    @overload
+    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> Any: ...
+    @overload
+    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> Any: ...
+    @overload  # type: ignore[override]
+    def zrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], Any],
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> Any: ...
+    @overload
+    def zrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], float] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> Any: ...
+    @overload
+    def zrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], None],
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> Any: ...
+    @overload
+    def zrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], float] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> Any: ...
+    @overload
+    def zrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool = False,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> Any: ...
+    @overload  # type: ignore[override]
+    def zrevrange(
+        self, name: _Key, start: int, end: int, withscores: Literal[True], score_cast_func: Callable[[_StrType], None]
+    ) -> Any: ...
+    @overload
+    def zrevrange(self, name: _Key, start: int, end: int, withscores: Literal[True]) -> Any: ...
+    @overload
+    def zrevrange(
+        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ...
+    ) -> Any: ...
+    def zrangestore(
+        self,
+        dest,
+        name,
+        start,
+        end,
+        byscore: bool = False,
+        bylex: bool = False,
+        desc: bool = False,
+        offset: Incomplete | None = None,
+        num: Incomplete | None = None,
+    ) -> Any: ...
+    def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None) -> Any: ...
+    def zrevrangebylex(self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None) -> Any: ...
+    @overload  # type: ignore[override]
+    def zrangebyscore(
+        self,
+        name: _Key,
+        min: _Value,
+        max: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], None],
+    ) -> Any: ...
+    @overload
+    def zrangebyscore(
+        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
+    ) -> Any: ...
+    @overload
+    def zrangebyscore(
+        self,
+        name: _Key,
+        min: _Value,
+        max: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> Any: ...
+    @overload
+    def zrevrangebyscore(
+        self,
+        name: _Key,
+        max: _Value,
+        min: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], Any],
+    ) -> Any: ...
+    @overload
+    def zrevrangebyscore(
+        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
+    ) -> Any: ...
+    @overload
+    def zrevrangebyscore(
+        self,
+        name: _Key,
+        max: _Value,
+        min: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> Any: ...
+    def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ...
+    def zrem(self, name: _Key, *values: _Value) -> Any: ...
+    def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Any: ...
+    def zremrangebyrank(self, name: _Key, min: int, max: int) -> Any: ...
+    def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Any: ...
+    def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Any: ...
+    def zscore(self, name: _Key, value: _Value) -> Any: ...
+    def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False) -> Any: ...
+    def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> Any: ...
+    def zmscore(self, key, members) -> Any: ...
+    # endregion
+    # region management commands
+    def bgrewriteaof(self, **kwargs: _CommandOptions) -> Any: ...
+    def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions) -> Any: ...
+    def role(self) -> Any: ...
+    def client_kill(self, address: str, **kwargs: _CommandOptions) -> Any: ...
+    def client_kill_filter(
+        self,
+        _id: Incomplete | None = None,
+        _type: Incomplete | None = None,
+        addr: Incomplete | None = None,
+        skipme: Incomplete | None = None,
+        laddr: Incomplete | None = None,
+        user: Incomplete | None = None,
+        **kwargs: _CommandOptions,
+    ) -> Any: ...
+    def client_info(self, **kwargs: _CommandOptions) -> Any: ...
+    def client_list(self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions) -> Any: ...
+    def client_getname(self, **kwargs: _CommandOptions) -> Any: ...
+    def client_getredir(self, **kwargs: _CommandOptions) -> Any: ...
+    def client_reply(self, reply, **kwargs: _CommandOptions) -> Any: ...
+    def client_id(self, **kwargs: _CommandOptions) -> Any: ...
+    def client_tracking_on(
+        self,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+    ) -> Any: ...
+    def client_tracking_off(
+        self,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+    ) -> Any: ...
+    def client_tracking(
+        self,
+        on: bool = True,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> Any: ...
+    def client_trackinginfo(self, **kwargs: _CommandOptions) -> Any: ...
+    def client_setname(self, name: str, **kwargs: _CommandOptions) -> Any: ...
+    def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions) -> Any: ...
+    def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions) -> Any: ...
+    def client_unpause(self, **kwargs: _CommandOptions) -> Any: ...
+    def command(self, **kwargs: _CommandOptions) -> Any: ...
+    def command_info(self, **kwargs: _CommandOptions) -> Any: ...
+    def command_count(self, **kwargs: _CommandOptions) -> Any: ...
+    def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Any: ...
+    def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions) -> Any: ...
+    def config_resetstat(self, **kwargs: _CommandOptions) -> Any: ...
+    def config_rewrite(self, **kwargs: _CommandOptions) -> Any: ...
+    def dbsize(self, **kwargs: _CommandOptions) -> Any: ...
+    def debug_object(self, key, **kwargs: _CommandOptions) -> Any: ...
+    def debug_segfault(self, **kwargs: _CommandOptions) -> Any: ...
+    def echo(self, value: _Value, **kwargs: _CommandOptions) -> Any: ...
+    def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ...
+    def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> Any: ...
+    def sync(self) -> Any: ...
+    def psync(self, replicationid, offset) -> Any: ...
+    def swapdb(self, first, second, **kwargs: _CommandOptions) -> Any: ...
+    def select(self, index, **kwargs: _CommandOptions) -> Any: ...
+    def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Any: ...
+    def lastsave(self, **kwargs: _CommandOptions) -> Any: ...
+    def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> Any: ...
+    def migrate(
+        self,
+        host,
+        port,
+        keys,
+        destination_db,
+        timeout,
+        copy: bool = False,
+        replace: bool = False,
+        auth: Incomplete | None = None,
+        **kwargs: _CommandOptions,
+    ) -> Any: ...
+    def object(self, infotype, key, **kwargs: _CommandOptions) -> Any: ...
+    def memory_doctor(self, **kwargs: _CommandOptions) -> Any: ...
+    def memory_help(self, **kwargs: _CommandOptions) -> Any: ...
+    def memory_stats(self, **kwargs: _CommandOptions) -> Any: ...
+    def memory_malloc_stats(self, **kwargs: _CommandOptions) -> Any: ...
+    def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ...
+    def memory_purge(self, **kwargs: _CommandOptions) -> Any: ...
+    def ping(self, **kwargs: _CommandOptions) -> Any: ...
+    def quit(self, **kwargs: _CommandOptions) -> Any: ...
+    def replicaof(self, *args, **kwargs: _CommandOptions) -> Any: ...
+    def save(self, **kwargs: _CommandOptions) -> Any: ...
+    def shutdown(
+        self,
+        save: bool = False,
+        nosave: bool = False,
+        now: bool = False,
+        force: bool = False,
+        abort: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> Any: ...
+    def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ...
+    def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions) -> Any: ...
+    def slowlog_len(self, **kwargs: _CommandOptions) -> Any: ...
+    def slowlog_reset(self, **kwargs: _CommandOptions) -> Any: ...
+    def time(self, **kwargs: _CommandOptions) -> Any: ...
+    def wait(self, num_replicas, timeout, **kwargs: _CommandOptions) -> Any: ...
+    # endregion
+    # region module commands
+    def module_load(self, path, *args) -> Any: ...
+    def module_unload(self, name) -> Any: ...
+    def module_list(self) -> Any: ...
+    def command_getkeys(self, *args) -> Any: ...
+    # endregion
+    # region pubsub commands
+    def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> Any: ...
+    def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> Any: ...
+    def pubsub_numpat(self, **kwargs: _CommandOptions) -> Any: ...
+    def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> Any: ...
+    # endregion
+    # region script commands
+    def eval(self, script, numkeys, *keys_and_args) -> Any: ...
+    def evalsha(self, sha, numkeys, *keys_and_args) -> Any: ...
+    def script_exists(self, *args) -> Any: ...
+    def script_debug(self, *args) -> Any: ...
+    def script_flush(self, sync_type: Incomplete | None = None) -> Any: ...
+    def script_kill(self) -> Any: ...
+    def script_load(self, script) -> Any: ...
+    def register_script(self, script: str | _StrType) -> Any: ...  # type: ignore[override]
+    # endregion
diff --git a/valkey/asyncio/cluster.pyi b/valkey/asyncio/cluster.pyi
new file mode 100644
index 00000000..257769d6
--- /dev/null
+++ b/valkey/asyncio/cluster.pyi
@@ -0,0 +1,229 @@
+from _typeshed import Incomplete
+from collections.abc import Awaitable, Callable, Mapping
+from types import TracebackType
+from typing import Any, Generic, TypeVar
+from typing_extensions import Self
+
+from valkey.asyncio.client import ResponseCallbackT
+from valkey.asyncio.connection import AbstractConnection, BaseParser, Connection, Encoder
+from valkey.asyncio.parser import CommandsParser
+from valkey.client import AbstractValkey
+from valkey.cluster import AbstractValkeyCluster, LoadBalancer
+
+# TODO: add  AsyncValkeyClusterCommands stubs
+# from valkey.commands import AsyncValkeyClusterCommands
+from valkey.commands.core import _StrType
+from valkey.credentials import CredentialProvider
+from valkey.exceptions import ResponseError
+from valkey.retry import Retry
+from valkey.typing import AnyKeyT, EncodableT, KeyT
+
+TargetNodesT = TypeVar("TargetNodesT", str, ClusterNode, list[ClusterNode], dict[Any, ClusterNode])  # noqa: Y001
+
+# It uses `DefaultParser` in real life, but it is a dynamic base class.
+class ClusterParser(BaseParser):
+    def on_disconnect(self) -> None: ...
+    def on_connect(self, connection: AbstractConnection) -> None: ...
+    async def can_read_destructive(self) -> bool: ...
+    async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ...
+
+class ValkeyCluster(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]):  # TODO: AsyncValkeyClusterCommands
+    @classmethod
+    def from_url(
+        cls,
+        url: str,
+        *,
+        host: str | None = None,
+        port: str | int = 6379,
+        # Cluster related kwargs
+        startup_nodes: list[ClusterNode] | None = None,
+        require_full_coverage: bool = True,
+        read_from_replicas: bool = False,
+        reinitialize_steps: int = 5,
+        cluster_error_retry_attempts: int = 3,
+        connection_error_retry_attempts: int = 3,
+        max_connections: int = 2147483648,
+        # Client related kwargs
+        db: str | int = 0,
+        path: str | None = None,
+        credential_provider: CredentialProvider | None = None,
+        username: str | None = None,
+        password: str | None = None,
+        client_name: str | None = None,
+        # Encoding related kwargs
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        # Connection related kwargs
+        health_check_interval: float = 0,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        socket_timeout: float | None = None,
+        retry: Retry | None = None,
+        retry_on_error: list[Exception] | None = None,
+        # SSL related kwargs
+        ssl: bool = False,
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: str | None = None,
+        ssl_cert_reqs: str = "required",
+        ssl_certfile: str | None = None,
+        ssl_check_hostname: bool = False,
+        ssl_keyfile: str | None = None,
+        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
+    ) -> Self: ...
+
+    retry: Retry | None
+    connection_kwargs: dict[str, Any]
+    nodes_manager: NodesManager
+    encoder: Encoder
+    read_from_replicas: bool
+    reinitialize_steps: int
+    cluster_error_retry_attempts: int
+    reinitialize_counter: int
+    commands_parser: CommandsParser
+    node_flags: set[str]
+    command_flags: dict[str, str]
+    response_callbacks: Incomplete
+    result_callbacks: dict[str, Callable[[Incomplete, Incomplete], Incomplete]]
+
+    def __init__(
+        self,
+        host: str | None = None,
+        port: str | int = 6379,
+        # Cluster related kwargs
+        startup_nodes: list[ClusterNode] | None = None,
+        require_full_coverage: bool = True,
+        read_from_replicas: bool = False,
+        reinitialize_steps: int = 5,
+        cluster_error_retry_attempts: int = 3,
+        connection_error_retry_attempts: int = 3,
+        max_connections: int = 2147483648,
+        # Client related kwargs
+        db: str | int = 0,
+        path: str | None = None,
+        credential_provider: CredentialProvider | None = None,
+        username: str | None = None,
+        password: str | None = None,
+        client_name: str | None = None,
+        # Encoding related kwargs
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        # Connection related kwargs
+        health_check_interval: float = 0,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        socket_timeout: float | None = None,
+        retry: Retry | None = None,
+        retry_on_error: list[Exception] | None = None,
+        # SSL related kwargs
+        ssl: bool = False,
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: str | None = None,
+        ssl_cert_reqs: str = "required",
+        ssl_certfile: str | None = None,
+        ssl_check_hostname: bool = False,
+        ssl_keyfile: str | None = None,
+        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
+    ) -> None: ...
+    async def initialize(self) -> Self: ...
+    async def close(self) -> None: ...
+    async def __aenter__(self) -> Self: ...
+    async def __aexit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __await__(self) -> Awaitable[Self]: ...
+    def __del__(self) -> None: ...
+    async def on_connect(self, connection: Connection) -> None: ...
+    def get_nodes(self) -> list[ClusterNode]: ...
+    def get_primaries(self) -> list[ClusterNode]: ...
+    def get_replicas(self) -> list[ClusterNode]: ...
+    def get_random_node(self) -> ClusterNode: ...
+    def get_default_node(self) -> ClusterNode: ...
+    def set_default_node(self, node: ClusterNode) -> None: ...
+    def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ...
+    def get_node_from_key(self, key: str, replica: bool = False) -> ClusterNode | None: ...
+    def keyslot(self, key: EncodableT) -> int: ...
+    def get_encoder(self) -> Encoder: ...
+    def get_connection_kwargs(self) -> dict[str, Any | None]: ...
+    def set_response_callback(self, command: str, callback: ResponseCallbackT) -> None: ...
+    async def execute_command(self, *args: EncodableT, **kwargs: Any) -> Any: ...
+    def pipeline(self, transaction: Any | None = None, shard_hint: Any | None = None) -> ClusterPipeline[_StrType]: ...
+
+class ClusterNode:
+    host: str
+    port: str | int
+    name: str
+    server_type: str | None
+    max_connections: int
+    connection_class: type[Connection]
+    connection_kwargs: dict[str, Any]
+    response_callbacks: dict[Incomplete, Incomplete]
+    def __init__(
+        self,
+        host: str,
+        port: str | int,
+        server_type: str | None = None,
+        *,
+        max_connections: int = 2147483648,
+        connection_class: type[Connection] = ...,
+        **connection_kwargs: Any,
+    ) -> None: ...
+    def __eq__(self, obj: object) -> bool: ...
+    def __del__(self) -> None: ...
+    async def disconnect(self) -> None: ...
+    def acquire_connection(self) -> Connection: ...
+    async def parse_response(self, connection: Connection, command: str, **kwargs: Any) -> Any: ...
+    async def execute_command(self, *args: Any, **kwargs: Any) -> Any: ...
+    async def execute_pipeline(self, commands: list[PipelineCommand]) -> bool: ...
+
+class NodesManager:
+    startup_nodes: dict[str, ClusterNode]
+    require_full_coverage: bool
+    connection_kwargs: dict[str, Any]
+    default_node: ClusterNode | None
+    nodes_cache: dict[str, ClusterNode]
+    slots_cache: dict[int, list[ClusterNode]]
+    read_load_balancer: LoadBalancer
+    address_remap: Callable[[str, int], tuple[str, int]] | None
+    def __init__(
+        self,
+        startup_nodes: list[ClusterNode],
+        require_full_coverage: bool,
+        connection_kwargs: dict[str, Any],
+        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
+    ) -> None: ...
+    def get_node(self, host: str | None = None, port: int | None = None, node_name: str | None = None) -> ClusterNode | None: ...
+    def set_nodes(self, old: dict[str, ClusterNode], new: dict[str, ClusterNode], remove_old: bool = False) -> None: ...
+    def get_node_from_slot(self, slot: int, read_from_replicas: bool = False) -> ClusterNode: ...
+    def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ...
+    async def initialize(self) -> None: ...
+    async def close(self, attr: str = "nodes_cache") -> None: ...
+    def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ...
+
+class ClusterPipeline(AbstractValkey, AbstractValkeyCluster, Generic[_StrType]):  # TODO: AsyncValkeyClusterCommands
+    def __init__(self, client: ValkeyCluster[_StrType]) -> None: ...
+    async def initialize(self) -> Self: ...
+    async def __aenter__(self) -> Self: ...
+    async def __aexit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __await__(self) -> Awaitable[Self]: ...
+    def __enter__(self) -> Self: ...
+    def __exit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __bool__(self) -> bool: ...
+    def __len__(self) -> int: ...
+    def execute_command(self, *args: KeyT | EncodableT, **kwargs: Any) -> Self: ...
+    async def execute(self, raise_on_error: bool = True, allow_redirections: bool = True) -> list[Any]: ...
+    def mset_nonatomic(self, mapping: Mapping[AnyKeyT, EncodableT]) -> Self: ...
+
+class PipelineCommand:
+    args: Any
+    kwargs: Any
+    position: int
+    result: Exception | None | Any
+    def __init__(self, position: int, *args: Any, **kwargs: Any) -> None: ...
diff --git a/valkey/asyncio/connection.pyi b/valkey/asyncio/connection.pyi
new file mode 100644
index 00000000..b0525ffd
--- /dev/null
+++ b/valkey/asyncio/connection.pyi
@@ -0,0 +1,363 @@
+import asyncio
+import enum
+import ssl
+from _typeshed import Unused
+from abc import abstractmethod
+from collections.abc import Callable, Iterable, Mapping
+from types import MappingProxyType
+from typing import Any, Final, Generic, Literal, Protocol, TypedDict, TypeVar, overload
+from typing_extensions import Self, TypeAlias
+
+from valkey.asyncio.retry import Retry
+from valkey.credentials import CredentialProvider
+from valkey.exceptions import AuthenticationError, ValkeyError, ResponseError
+from valkey.typing import EncodableT, EncodedT
+
+_SSLVerifyMode: TypeAlias = Literal["none", "optional", "required"]
+
+SYM_STAR: Final[bytes]
+SYM_DOLLAR: Final[bytes]
+SYM_CRLF: Final[bytes]
+SYM_LF: Final[bytes]
+SYM_EMPTY: Final[bytes]
+
+SERVER_CLOSED_CONNECTION_ERROR: Final[str]
+
+class _Sentinel(enum.Enum):
+    sentinel = object()
+
+SENTINEL: Final[object]
+MODULE_LOAD_ERROR: Final[str]
+NO_SUCH_MODULE_ERROR: Final[str]
+MODULE_UNLOAD_NOT_POSSIBLE_ERROR: Final[str]
+MODULE_EXPORTS_DATA_TYPES_ERROR: Final[str]
+NO_AUTH_SET_ERROR: Final[dict[str, type[AuthenticationError]]]
+
+class Encoder:
+    encoding: str
+    encoding_errors: str
+    decode_responses: bool
+    def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ...
+    def encode(self, value: EncodableT) -> EncodedT: ...
+    def decode(self, value: EncodableT, force: bool = False) -> EncodableT: ...
+
+ExceptionMappingT: TypeAlias = Mapping[str, type[Exception] | Mapping[str, type[Exception]]]
+
+class BaseParser:
+    EXCEPTION_CLASSES: ExceptionMappingT
+    def __init__(self, socket_read_size: int) -> None: ...
+    @classmethod
+    def parse_error(cls, response: str) -> ResponseError: ...
+    @abstractmethod
+    def on_disconnect(self) -> None: ...
+    @abstractmethod
+    def on_connect(self, connection: AbstractConnection) -> None: ...
+    @abstractmethod
+    async def can_read_destructive(self) -> bool: ...
+    @abstractmethod
+    async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | list[EncodableT] | None: ...
+
+class PythonParser(BaseParser):
+    encoder: Encoder | None
+    def __init__(self, socket_read_size: int) -> None: ...
+    def on_connect(self, connection: AbstractConnection) -> None: ...
+    def on_disconnect(self) -> None: ...
+    async def can_read_destructive(self) -> bool: ...
+    async def read_response(self, disable_decoding: bool = False) -> EncodableT | ResponseError | None: ...
+
+class LibvalkeyParser(BaseParser):
+    def __init__(self, socket_read_size: int) -> None: ...
+    def on_connect(self, connection: AbstractConnection) -> None: ...
+    def on_disconnect(self) -> None: ...
+    async def can_read_destructive(self) -> bool: ...
+    async def read_from_socket(self) -> Literal[True]: ...
+    async def read_response(self, disable_decoding: bool = False) -> EncodableT | list[EncodableT]: ...
+
+DefaultParser: type[PythonParser | LibvalkeyParser]
+
+class ConnectCallbackProtocol(Protocol):
+    def __call__(self, connection: Connection): ...
+
+class AsyncConnectCallbackProtocol(Protocol):
+    async def __call__(self, connection: Connection): ...
+
+ConnectCallbackT: TypeAlias = ConnectCallbackProtocol | AsyncConnectCallbackProtocol
+
+class AbstractConnection:
+    pid: int
+    db: str | int
+    client_name: str | None
+    credential_provider: CredentialProvider | None
+    password: str | None
+    username: str | None
+    socket_timeout: float | None
+    socket_connect_timeout: float | None
+    retry_on_timeout: bool
+    retry_on_error: list[type[Exception]]
+    retry: Retry
+    health_check_interval: float
+    next_health_check: float
+    encoder: Encoder
+    valkey_connect_func: ConnectCallbackT | None
+
+    def __init__(
+        self,
+        *,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        encoder_class: type[Encoder] = ...,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    @abstractmethod
+    def repr_pieces(self) -> list[tuple[str, Any]]: ...
+    @property
+    def is_connected(self) -> bool: ...
+    def register_connect_callback(self, callback: ConnectCallbackT) -> None: ...
+    def clear_connect_callbacks(self) -> None: ...
+    def set_parser(self, parser_class: type[BaseParser]) -> None: ...
+    async def connect(self) -> None: ...
+    async def on_connect(self) -> None: ...
+    async def disconnect(self, nowait: bool = False) -> None: ...
+    async def check_health(self) -> None: ...
+    async def send_packed_command(self, command: bytes | str | Iterable[bytes], check_health: bool = True) -> None: ...
+    async def send_command(self, *args: Any, **kwargs: Any) -> None: ...
+    async def can_read_destructive(self) -> bool: ...
+    async def read_response(
+        self, disable_decoding: bool = False, timeout: float | None = None, *, disconnect_on_error: bool = True
+    ) -> EncodableT | list[EncodableT] | None: ...
+    def pack_command(self, *args: EncodableT) -> list[bytes]: ...
+    def pack_commands(self, commands: Iterable[Iterable[EncodableT]]) -> list[bytes]: ...
+
+class Connection(AbstractConnection):
+    host: str
+    port: int
+    socket_keepalive: bool
+    socket_keepalive_options: Mapping[int, int | bytes] | None
+    socket_type: int
+
+    def __init__(
+        self,
+        *,
+        host: str = "localhost",
+        port: str | int = 6379,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        socket_type: int = 0,
+        # **kwargs forwarded to AbstractConnection.
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        encoder_class: type[Encoder] = ...,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    def repr_pieces(self) -> list[tuple[str, Any]]: ...
+
+class SSLConnection(Connection):
+    ssl_context: ValkeySSLContext
+    def __init__(
+        self,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: _SSLVerifyMode = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: str | None = None,
+        ssl_check_hostname: bool = False,
+        *,
+        # **kwargs forwarded to Connection.
+        host: str = "localhost",
+        port: str | int = 6379,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        socket_type: int = 0,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        encoder_class: type[Encoder] = ...,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    @property
+    def keyfile(self) -> str | None: ...
+    @property
+    def certfile(self) -> str | None: ...
+    @property
+    def cert_reqs(self) -> ssl.VerifyMode: ...
+    @property
+    def ca_certs(self) -> str | None: ...
+    @property
+    def ca_data(self) -> str | None: ...
+    @property
+    def check_hostname(self) -> bool: ...
+
+class ValkeySSLContext:
+    keyfile: str | None
+    certfile: str | None
+    cert_reqs: ssl.VerifyMode
+    ca_certs: str | None
+    ca_data: str | None
+    check_hostname: bool
+    context: ssl.SSLContext | None
+    def __init__(
+        self,
+        keyfile: str | None = None,
+        certfile: str | None = None,
+        cert_reqs: _SSLVerifyMode | None = None,
+        ca_certs: str | None = None,
+        ca_data: str | None = None,
+        check_hostname: bool = False,
+    ) -> None: ...
+    def get(self) -> ssl.SSLContext: ...
+
+class UnixDomainSocketConnection(Connection):
+    path: str
+    def __init__(
+        self,
+        *,
+        path: str = "",
+        # **kwargs forwarded to AbstractConnection.
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        encoder_class: type[Encoder] = ...,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    def repr_pieces(self) -> list[tuple[str, Any]]: ...
+
+FALSE_STRINGS: Final[tuple[str, ...]]
+
+def to_bool(value: object) -> bool | None: ...
+
+URL_QUERY_ARGUMENT_PARSERS: MappingProxyType[str, Callable[[str], Any]]
+
+class ConnectKwargs(TypedDict):
+    username: str
+    password: str
+    connection_class: type[AbstractConnection]
+    host: str
+    port: int
+    db: int
+    path: str
+
+def parse_url(url: str) -> ConnectKwargs: ...
+
+_ConnectionT = TypeVar("_ConnectionT", bound=AbstractConnection)
+
+class ConnectionPool(Generic[_ConnectionT]):
+    # kwargs accepts all arguments from the connection class chosen for
+    # the given URL, except those encoded in the URL itself.
+    @classmethod
+    def from_url(cls, url: str, **kwargs: Any) -> Self: ...
+
+    connection_class: type[_ConnectionT]
+    connection_kwargs: Mapping[str, Any]
+    max_connections: int
+    encoder_class: type[Encoder]
+    pid: int
+
+    @overload
+    def __init__(
+        self: ConnectionPool[_ConnectionT],  # pyright: ignore[reportInvalidTypeVarUse]  #11780
+        connection_class: type[_ConnectionT],
+        max_connections: int | None = None,
+        # **kwargs are passed to the constructed connection instances.
+        **connection_kwargs: Any,
+    ) -> None: ...
+    @overload
+    def __init__(self: ConnectionPool[Connection], *, max_connections: int | None = None, **connection_kwargs) -> None: ...
+    def reset(self) -> None: ...
+    async def get_connection(self, command_name: Unused, *keys: Unused, **options: Unused) -> _ConnectionT: ...
+    def get_encoder(self) -> Encoder: ...
+    def make_connection(self) -> _ConnectionT: ...
+    async def release(self, connection: AbstractConnection) -> None: ...
+    def owns_connection(self, connection: AbstractConnection) -> bool: ...
+    async def disconnect(self, inuse_connections: bool = True) -> None: ...
+    def set_retry(self, retry: Retry) -> None: ...
+
+class BlockingConnectionPool(ConnectionPool[_ConnectionT]):
+    queue_class: type[asyncio.Queue[_ConnectionT | None]]
+    timeout: int | None
+    pool: asyncio.Queue[_ConnectionT | None]
+
+    @overload
+    def __init__(
+        self: BlockingConnectionPool[_ConnectionT],  # pyright: ignore[reportInvalidTypeVarUse]  #11780
+        max_connections: int,
+        timeout: int | None,
+        connection_class: type[_ConnectionT],
+        queue_class: type[asyncio.Queue[_ConnectionT | None]] = ...,
+        # **kwargs are passed to the constructed connection instances.
+        **connection_kwargs: Any,
+    ) -> None: ...
+    @overload
+    def __init__(
+        self: BlockingConnectionPool[_ConnectionT],  # pyright: ignore[reportInvalidTypeVarUse]  #11780
+        max_connections: int = 50,
+        timeout: int | None = 20,
+        *,
+        connection_class: type[_ConnectionT],
+        queue_class: type[asyncio.Queue[_ConnectionT | None]] = ...,
+        # **kwargs are passed to the constructed connection instances.
+        **connection_kwargs: Any,
+    ) -> None: ...
+    @overload
+    def __init__(
+        self: BlockingConnectionPool[Connection],
+        max_connections: int = 50,
+        timeout: int | None = 20,
+        *,
+        queue_class: type[asyncio.Queue[Connection | None]] = ...,
+        # **kwargs are passed to the constructed connection instances.
+        **connection_kwargs: Any,
+    ) -> None: ...
diff --git a/valkey/asyncio/lock.pyi b/valkey/asyncio/lock.pyi
new file mode 100644
index 00000000..018591c7
--- /dev/null
+++ b/valkey/asyncio/lock.pyi
@@ -0,0 +1,51 @@
+import threading
+from collections.abc import Awaitable
+from types import SimpleNamespace, TracebackType
+from typing import Any, ClassVar
+from typing_extensions import Self
+
+from valkey.asyncio import Valkey
+from valkey.commands.core import AsyncScript
+
+class Lock:
+    lua_release: ClassVar[AsyncScript | None]
+    lua_extend: ClassVar[AsyncScript | None]
+    lua_reacquire: ClassVar[AsyncScript | None]
+    LUA_RELEASE_SCRIPT: ClassVar[str]
+    LUA_EXTEND_SCRIPT: ClassVar[str]
+    LUA_REACQUIRE_SCRIPT: ClassVar[str]
+    valkey: Valkey[Any]
+    name: str | bytes | memoryview
+    timeout: float | None
+    sleep: float
+    blocking: bool
+    blocking_timeout: float | None
+    thread_local: bool
+    local: threading.local | SimpleNamespace
+    def __init__(
+        self,
+        valkey: Valkey[Any],
+        name: str | bytes | memoryview,
+        timeout: float | None = None,
+        sleep: float = 0.1,
+        blocking: bool = True,
+        blocking_timeout: float | None = None,
+        thread_local: bool = True,
+    ) -> None: ...
+    def register_scripts(self) -> None: ...
+    async def __aenter__(self) -> Self: ...
+    async def __aexit__(
+        self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None
+    ) -> None: ...
+    async def acquire(
+        self, blocking: bool | None = None, blocking_timeout: float | None = None, token: str | bytes | None = None
+    ) -> bool: ...
+    async def do_acquire(self, token: str | bytes) -> bool: ...
+    async def locked(self) -> bool: ...
+    async def owned(self) -> bool: ...
+    def release(self) -> Awaitable[None]: ...
+    async def do_release(self, expected_token: bytes) -> None: ...
+    def extend(self, additional_time: float, replace_ttl: bool = False) -> Awaitable[bool]: ...
+    async def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ...
+    def reacquire(self) -> Awaitable[bool]: ...
+    async def do_reacquire(self) -> bool: ...
diff --git a/valkey/asyncio/parser.pyi b/valkey/asyncio/parser.pyi
new file mode 100644
index 00000000..fe5139a8
--- /dev/null
+++ b/valkey/asyncio/parser.pyi
@@ -0,0 +1,9 @@
+from _typeshed import Incomplete
+from typing import Any
+
+# TODO: define and use:
+# from valkey.asyncio.cluster import ClusterNode
+
+class CommandsParser:
+    async def initialize(self, node: Incomplete | None = None) -> None: ...  # TODO: ClusterNode
+    async def get_keys(self, *args: Any) -> tuple[str, ...] | None: ...
diff --git a/valkey/asyncio/retry.pyi b/valkey/asyncio/retry.pyi
new file mode 100644
index 00000000..0970df7b
--- /dev/null
+++ b/valkey/asyncio/retry.pyi
@@ -0,0 +1,12 @@
+from collections.abc import Awaitable, Callable, Iterable
+from typing import TypeVar
+
+from valkey.backoff import AbstractBackoff
+from valkey.exceptions import ValkeyError
+
+_T = TypeVar("_T")
+
+class Retry:
+    def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[ValkeyError], ...] = ...) -> None: ...
+    def update_supported_errors(self, specified_errors: Iterable[type[ValkeyError]]) -> None: ...
+    async def call_with_retry(self, do: Callable[[], Awaitable[_T]], fail: Callable[[ValkeyError], Awaitable[object]]) -> _T: ...
diff --git a/valkey/asyncio/sentinel.pyi b/valkey/asyncio/sentinel.pyi
new file mode 100644
index 00000000..1fa9e5fa
--- /dev/null
+++ b/valkey/asyncio/sentinel.pyi
@@ -0,0 +1,162 @@
+from collections.abc import AsyncIterator, Iterable, Mapping
+from typing import Any, Literal, TypedDict, TypeVar, overload
+
+from valkey.asyncio.client import Valkey
+from valkey.asyncio.connection import (
+    BaseParser,
+    ConnectCallbackT,
+    Connection,
+    ConnectionPool,
+    Encoder,
+    SSLConnection,
+    _ConnectionT,
+    _Sentinel,
+)
+from valkey.asyncio.retry import Retry
+from valkey.commands import AsyncSentinelCommands
+from valkey.credentials import CredentialProvider
+from valkey.exceptions import ConnectionError, ValkeyError
+
+_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any])
+
+class MasterNotFoundError(ConnectionError): ...
+class SlaveNotFoundError(ConnectionError): ...
+
+class SentinelManagedConnection(Connection):
+    connection_pool: ConnectionPool[Any] | None
+    def __init__(
+        self,
+        *,
+        connection_pool: ConnectionPool[Any] | None,
+        # **kwargs forwarded to Connection.
+        host: str = "localhost",
+        port: str | int = 6379,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        socket_type: int = 0,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        encoder_class: type[Encoder] = ...,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    async def connect_to(self, address: tuple[str, int]) -> None: ...
+    async def connect(self) -> None: ...
+
+class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ...
+
+class SentinelConnectionPool(ConnectionPool[_ConnectionT]):
+    is_master: bool
+    check_connection: bool
+    service_name: str
+    sentinel_manager: Sentinel
+    master_address: tuple[str, int] | None
+    slave_rr_counter: int | None
+
+    def __init__(
+        self,
+        service_name: str,
+        sentinel_manager: Sentinel,
+        *,
+        ssl: bool = False,
+        connection_class: type[SentinelManagedConnection] = ...,
+        is_master: bool = True,
+        check_connection: bool = False,
+        # **kwargs ultimately forwarded to construction Connection instances.
+        host: str = "localhost",
+        port: str | int = 6379,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[int, int | bytes] | None = None,
+        socket_type: int = 0,
+        db: str | int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | _Sentinel = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: ConnectCallbackT | None = None,
+        encoder_class: type[Encoder] = ...,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    async def get_master_address(self) -> tuple[str, int]: ...
+    async def rotate_slaves(self) -> AsyncIterator[tuple[str, int]]: ...
+
+_State = TypedDict(
+    "_State", {"ip": str, "port": int, "is_master": bool, "is_sdown": bool, "is_odown": bool, "num-other-sentinels": int}
+)
+
+class Sentinel(AsyncSentinelCommands):
+    sentinel_kwargs: Mapping[str, Any]
+    sentinels: list[Valkey[Any]]
+    min_other_sentinels: int
+    connection_kwargs: Mapping[str, Any]
+    def __init__(
+        self,
+        sentinels: Iterable[tuple[str, int]],
+        min_other_sentinels: int = 0,
+        sentinel_kwargs: Mapping[str, Any] | None = None,
+        **connection_kwargs: Any,
+    ) -> None: ...
+    async def execute_command(self, *args: Any, once: bool = False, **kwargs: Any) -> Literal[True]: ...
+    def check_master_state(self, state: _State, service_name: str) -> bool: ...
+    async def discover_master(self, service_name: str) -> tuple[str, int]: ...
+    def filter_slaves(self, slaves: Iterable[_State]) -> list[tuple[str, int]]: ...
+    async def discover_slaves(self, service_name: str) -> list[tuple[str, int]]: ...
+    @overload
+    def master_for(
+        self,
+        service_name: str,
+        valkey_class: type[_ValkeyT],
+        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
+        # Forwarded to the connection pool constructor.
+        **kwargs: Any,
+    ) -> _ValkeyT: ...
+    @overload
+    def master_for(
+        self,
+        service_name: str,
+        *,
+        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
+        # Forwarded to the connection pool constructor.
+        **kwargs: Any,
+    ) -> Valkey[Any]: ...
+    @overload
+    def slave_for(
+        self,
+        service_name: str,
+        valkey_class: type[_ValkeyT],
+        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
+        # Forwarded to the connection pool constructor.
+        **kwargs: Any,
+    ) -> _ValkeyT: ...
+    @overload
+    def slave_for(
+        self,
+        service_name: str,
+        *,
+        connection_pool_class: type[SentinelConnectionPool[Any]] = ...,
+        # Forwarded to the connection pool constructor.
+        **kwargs: Any,
+    ) -> Valkey[Any]: ...
diff --git a/valkey/asyncio/utils.pyi b/valkey/asyncio/utils.pyi
new file mode 100644
index 00000000..cd3b14df
--- /dev/null
+++ b/valkey/asyncio/utils.pyi
@@ -0,0 +1,15 @@
+from types import TracebackType
+from typing import Any, Generic
+
+from valkey.asyncio.client import Pipeline, Valkey
+from valkey.client import _StrType
+
+def from_url(url: str, **kwargs) -> Valkey[Any]: ...
+
+class pipeline(Generic[_StrType]):
+    p: Pipeline[_StrType]
+    def __init__(self, valkey_obj: Valkey[_StrType]) -> None: ...
+    async def __aenter__(self) -> Pipeline[_StrType]: ...
+    async def __aexit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
diff --git a/valkey/backoff.pyi b/valkey/backoff.pyi
new file mode 100644
index 00000000..40230a13
--- /dev/null
+++ b/valkey/backoff.pyi
@@ -0,0 +1,31 @@
+from abc import ABC, abstractmethod
+
+class AbstractBackoff(ABC):
+    def reset(self) -> None: ...
+    @abstractmethod
+    def compute(self, failures: int) -> float: ...
+
+class ConstantBackoff(AbstractBackoff):
+    def __init__(self, backoff: int) -> None: ...
+    def compute(self, failures: int) -> float: ...
+
+class NoBackoff(ConstantBackoff):
+    def __init__(self) -> None: ...
+
+class ExponentialBackoff(AbstractBackoff):
+    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
+    def compute(self, failures: int) -> float: ...
+
+class FullJitterBackoff(AbstractBackoff):
+    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
+    def compute(self, failures: int) -> float: ...
+
+class EqualJitterBackoff(AbstractBackoff):
+    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
+    def compute(self, failures: int) -> float: ...
+
+class DecorrelatedJitterBackoff(AbstractBackoff):
+    def __init__(self, cap: float = 0.512, base: float = 0.008) -> None: ...
+    def compute(self, failures: int) -> float: ...
+
+def default_backoff() -> EqualJitterBackoff: ...
diff --git a/valkey/client.pyi b/valkey/client.pyi
new file mode 100644
index 00000000..d55b234b
--- /dev/null
+++ b/valkey/client.pyi
@@ -0,0 +1,806 @@
+import threading
+from _typeshed import Incomplete, SupportsItems, Unused
+from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
+from datetime import datetime, timedelta
+from re import Pattern
+from types import TracebackType
+from typing import Any, ClassVar, Literal, TypeVar, overload
+from typing_extensions import Self, TypeAlias
+
+from valkey import ValkeyError
+
+from .commands import CoreCommands, ValkeyModuleCommands, SentinelCommands
+from .connection import ConnectionPool, _ConnectFunc, _ConnectionPoolOptions
+from .credentials import CredentialProvider
+from .lock import Lock
+from .retry import Retry
+from .typing import ChannelT, EncodableT, KeyT, PatternT
+
+_Value: TypeAlias = bytes | float | int | str
+_Key: TypeAlias = str | bytes
+
+# Lib returns str or bytes depending on value of decode_responses
+_StrType = TypeVar("_StrType", bound=str | bytes)
+
+_VT = TypeVar("_VT")
+_T = TypeVar("_T")
+
+# Keyword arguments that are passed to Valkey.parse_response().
+_ParseResponseOptions: TypeAlias = Any
+# Keyword arguments that are passed to Valkey.execute_command().
+_CommandOptions: TypeAlias = _ConnectionPoolOptions | _ParseResponseOptions
+
+SYM_EMPTY: bytes
+EMPTY_RESPONSE: str
+NEVER_DECODE: str
+
+class CaseInsensitiveDict(dict[_StrType, _VT]):
+    def __init__(self, data: SupportsItems[_StrType, _VT]) -> None: ...
+    def update(self, data: SupportsItems[_StrType, _VT]) -> None: ...  # type: ignore[override]
+    @overload
+    def get(self, k: _StrType, default: None = None) -> _VT | None: ...
+    @overload
+    def get(self, k: _StrType, default: _VT | _T) -> _VT | _T: ...
+    # Overrides many other methods too, but without changing signature
+
+def list_or_args(keys, args): ...
+def timestamp_to_datetime(response): ...
+def string_keys_to_dict(key_string, callback): ...
+def parse_debug_object(response): ...
+def parse_object(response, infotype): ...
+def parse_info(response): ...
+
+SENTINEL_STATE_TYPES: dict[str, type[int]]
+
+def parse_sentinel_state(item): ...
+def parse_sentinel_master(response): ...
+def parse_sentinel_masters(response): ...
+def parse_sentinel_slaves_and_sentinels(response): ...
+def parse_sentinel_get_master(response): ...
+def pairs_to_dict(response, decode_keys: bool = False, decode_string_values: bool = False): ...
+def pairs_to_dict_typed(response, type_info): ...
+def zset_score_pairs(response, **options): ...
+def sort_return_tuples(response, **options): ...
+def int_or_none(response): ...
+def float_or_none(response): ...
+def bool_ok(response): ...
+def parse_client_list(response, **options): ...
+def parse_config_get(response, **options): ...
+def parse_scan(response, **options): ...
+def parse_hscan(response, **options): ...
+def parse_zscan(response, **options): ...
+def parse_slowlog_get(response, **options): ...
+
+_LockType = TypeVar("_LockType")
+
+class AbstractValkey:
+    RESPONSE_CALLBACKS: dict[str, Any]
+
+class Valkey(AbstractValkey, ValkeyModuleCommands, CoreCommands[_StrType], SentinelCommands):
+    @overload
+    @classmethod
+    def from_url(
+        cls,
+        url: str,
+        *,
+        host: str | None = ...,
+        port: int | None = ...,
+        db: int | None = ...,
+        password: str | None = ...,
+        socket_timeout: float | None = ...,
+        socket_connect_timeout: float | None = ...,
+        socket_keepalive: bool | None = ...,
+        socket_keepalive_options: Mapping[str, int | str] | None = ...,
+        connection_pool: ConnectionPool | None = ...,
+        unix_socket_path: str | None = ...,
+        encoding: str = ...,
+        encoding_errors: str = ...,
+        charset: str | None = ...,
+        errors: str | None = ...,
+        decode_responses: Literal[True],
+        retry_on_timeout: bool = ...,
+        retry_on_error: list[type[ValkeyError]] | None = ...,
+        ssl: bool = ...,
+        ssl_keyfile: str | None = ...,
+        ssl_certfile: str | None = ...,
+        ssl_cert_reqs: str | int | None = ...,
+        ssl_ca_certs: str | None = ...,
+        ssl_check_hostname: bool = ...,
+        max_connections: int | None = ...,
+        single_connection_client: bool = ...,
+        health_check_interval: float = ...,
+        client_name: str | None = ...,
+        username: str | None = ...,
+        retry: Retry | None = ...,
+    ) -> Valkey[str]: ...
+    @overload
+    @classmethod
+    def from_url(
+        cls,
+        url: str,
+        *,
+        host: str | None = ...,
+        port: int | None = ...,
+        db: int | None = ...,
+        password: str | None = ...,
+        socket_timeout: float | None = ...,
+        socket_connect_timeout: float | None = ...,
+        socket_keepalive: bool | None = ...,
+        socket_keepalive_options: Mapping[str, int | str] | None = ...,
+        connection_pool: ConnectionPool | None = ...,
+        unix_socket_path: str | None = ...,
+        encoding: str = ...,
+        encoding_errors: str = ...,
+        charset: str | None = ...,
+        errors: str | None = ...,
+        decode_responses: Literal[False] = False,
+        retry_on_timeout: bool = ...,
+        retry_on_error: list[type[ValkeyError]] | None = ...,
+        ssl: bool = ...,
+        ssl_keyfile: str | None = ...,
+        ssl_certfile: str | None = ...,
+        ssl_cert_reqs: str | int | None = ...,
+        ssl_ca_certs: str | None = ...,
+        ssl_check_hostname: bool = ...,
+        max_connections: int | None = ...,
+        single_connection_client: bool = ...,
+        health_check_interval: float = ...,
+        client_name: str | None = ...,
+        username: str | None = ...,
+        retry: Retry | None = ...,
+    ) -> Valkey[bytes]: ...
+    connection_pool: Any
+    connection: Any
+    response_callbacks: Any
+    @overload
+    def __init__(
+        self: Valkey[str],
+        host: str,
+        port: int,
+        db: int,
+        password: str | None,
+        socket_timeout: float | None,
+        socket_connect_timeout: float | None,
+        socket_keepalive: bool | None,
+        socket_keepalive_options: Mapping[str, int | str] | None,
+        connection_pool: ConnectionPool | None,
+        unix_socket_path: str | None,
+        encoding: str,
+        encoding_errors: str,
+        charset: str | None,
+        errors: str | None,
+        decode_responses: Literal[True],
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | None = None,
+        ssl: bool = False,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: str | int | None = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_path: Incomplete | None = None,
+        ssl_ca_data: Incomplete | None = None,
+        ssl_check_hostname: bool = False,
+        ssl_password: Incomplete | None = None,
+        ssl_validate_ocsp: bool = False,
+        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
+        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
+        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
+        max_connections: int | None = None,
+        single_connection_client: bool = False,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        lib_name: str | None = None,
+        lib_version: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: _ConnectFunc | None = None,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    @overload
+    def __init__(
+        self: Valkey[str],
+        host: str = "localhost",
+        port: int = 6379,
+        db: int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool | None = None,
+        socket_keepalive_options: Mapping[str, int | str] | None = None,
+        connection_pool: ConnectionPool | None = None,
+        unix_socket_path: str | None = None,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        charset: str | None = None,
+        errors: str | None = None,
+        *,
+        decode_responses: Literal[True],
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | None = None,
+        ssl: bool = False,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: str | int | None = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: Incomplete | None = None,
+        ssl_check_hostname: bool = False,
+        ssl_password: Incomplete | None = None,
+        ssl_validate_ocsp: bool = False,
+        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
+        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
+        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
+        max_connections: int | None = None,
+        single_connection_client: bool = False,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        lib_name: str | None = None,
+        lib_version: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: _ConnectFunc | None = None,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    @overload
+    def __init__(
+        self: Valkey[bytes],
+        host: str = "localhost",
+        port: int = 6379,
+        db: int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool | None = None,
+        socket_keepalive_options: Mapping[str, int | str] | None = None,
+        connection_pool: ConnectionPool | None = None,
+        unix_socket_path: str | None = None,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        charset: str | None = None,
+        errors: str | None = None,
+        decode_responses: Literal[False] = False,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[ValkeyError]] | None = None,
+        ssl: bool = False,
+        ssl_keyfile: str | None = None,
+        ssl_certfile: str | None = None,
+        ssl_cert_reqs: str | int | None = "required",
+        ssl_ca_certs: str | None = None,
+        ssl_ca_data: Incomplete | None = None,
+        ssl_check_hostname: bool = False,
+        ssl_password: Incomplete | None = None,
+        ssl_validate_ocsp: bool = False,
+        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
+        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
+        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
+        max_connections: int | None = None,
+        single_connection_client: bool = False,
+        health_check_interval: float = 0,
+        client_name: str | None = None,
+        lib_name: str | None = None,
+        lib_version: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: _ConnectFunc | None = None,
+        credential_provider: CredentialProvider | None = None,
+    ) -> None: ...
+    def get_encoder(self): ...
+    def get_connection_kwargs(self): ...
+    def set_response_callback(self, command, callback): ...
+    def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ...
+    def transaction(self, func, *watches, **kwargs): ...
+    @overload
+    def lock(
+        self,
+        name: _Key,
+        timeout: float | None = None,
+        sleep: float = 0.1,
+        blocking: bool = True,
+        blocking_timeout: float | None = None,
+        lock_class: None = None,
+        thread_local: bool = True,
+    ) -> Lock: ...
+    @overload
+    def lock(
+        self,
+        name: _Key,
+        timeout: float | None,
+        sleep: float,
+        blocking: bool,
+        blocking_timeout: float | None,
+        lock_class: type[_LockType],
+        thread_local: bool = True,
+    ) -> _LockType: ...
+    @overload
+    def lock(
+        self,
+        name: _Key,
+        timeout: float | None = None,
+        sleep: float = 0.1,
+        blocking: bool = True,
+        blocking_timeout: float | None = None,
+        *,
+        lock_class: type[_LockType],
+        thread_local: bool = True,
+    ) -> _LockType: ...
+    def pubsub(self, *, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ...
+    def execute_command(self, *args, **options: _CommandOptions): ...
+    def parse_response(self, connection, command_name, **options: _ParseResponseOptions): ...
+    def monitor(self) -> Monitor: ...
+    def __enter__(self) -> Valkey[_StrType]: ...
+    def __exit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __del__(self) -> None: ...
+    def close(self) -> None: ...
+    def client(self) -> Valkey[_StrType]: ...
+
+StrictValkey = Valkey
+
+class PubSub:
+    PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, str]]
+    UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, str]]
+    HEALTH_CHECK_MESSAGE: ClassVar[str]
+    connection_pool: Any
+    shard_hint: Any
+    ignore_subscribe_messages: Any
+    connection: Any
+    subscribed_event: threading.Event
+    encoder: Any
+    health_check_response_b: bytes
+    health_check_response: list[str] | list[bytes]
+    def __init__(
+        self,
+        connection_pool,
+        shard_hint: Incomplete | None = None,
+        ignore_subscribe_messages: bool = False,
+        encoder: Incomplete | None = None,
+    ) -> None: ...
+    def __enter__(self) -> Self: ...
+    def __exit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __del__(self): ...
+    channels: Any
+    patterns: Any
+    def reset(self): ...
+    def close(self) -> None: ...
+    def on_connect(self, connection): ...
+    @property
+    def subscribed(self): ...
+    def execute_command(self, *args): ...
+    def clean_health_check_responses(self) -> None: ...
+    def parse_response(self, block: bool = True, timeout: float = 0): ...
+    def is_health_check_response(self, response) -> bool: ...
+    def check_health(self) -> None: ...
+    def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ...
+    def punsubscribe(self, *args: _Key) -> None: ...
+    def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ...
+    def unsubscribe(self, *args: _Key) -> None: ...
+    def listen(self): ...
+    def get_message(self, ignore_subscribe_messages: bool = False, timeout: float = 0.0) -> dict[str, Any] | None: ...
+    def handle_message(self, response, ignore_subscribe_messages: bool = False) -> dict[str, Any] | None: ...
+    def run_in_thread(self, sleep_time: float = 0, daemon: bool = False, exception_handler: Incomplete | None = None): ...
+    def ping(self, message: _Value | None = None) -> None: ...
+
+class PubSubWorkerThread(threading.Thread):
+    daemon: Any
+    pubsub: Any
+    sleep_time: Any
+    exception_handler: Any
+    def __init__(self, pubsub, sleep_time, daemon: bool = False, exception_handler: Incomplete | None = None) -> None: ...
+    def run(self) -> None: ...
+    def stop(self) -> None: ...
+
+class Pipeline(Valkey[_StrType]):
+    UNWATCH_COMMANDS: Any
+    connection_pool: Any
+    connection: Any
+    response_callbacks: Any
+    transaction: bool
+    shard_hint: Any
+    watching: bool
+
+    command_stack: Any
+    scripts: Any
+    explicit_transaction: Any
+    def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ...
+    def __enter__(self) -> Pipeline[_StrType]: ...
+    def __exit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __del__(self) -> None: ...
+    def __len__(self) -> int: ...
+    def __bool__(self) -> bool: ...
+    def discard(self) -> None: ...
+    def reset(self) -> None: ...
+    def multi(self) -> None: ...
+    def execute_command(self, *args, **options): ...
+    def immediate_execute_command(self, *args, **options): ...
+    def pipeline_execute_command(self, *args, **options): ...
+    def raise_first_error(self, commands, response): ...
+    def annotate_exception(self, exception, number, command): ...
+    def parse_response(self, connection, command_name, **options): ...
+    def load_scripts(self): ...
+    def execute(self, raise_on_error: bool = True) -> list[Any]: ...
+    def watch(self, *names: _Key) -> bool: ...
+    def unwatch(self) -> bool: ...
+    # in the Valkey implementation, the following methods are inherited from client.
+    def set_response_callback(self, command, callback): ...
+    def pipeline(self, transaction: bool = True, shard_hint: Any = None) -> Pipeline[_StrType]: ...
+    def acl_cat(self, category: str | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def acl_deluser(self, username: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def acl_genpass(self, bits: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def acl_getuser(self, username: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def acl_list(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def acl_load(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def acl_setuser(  # type: ignore[override]
+        self,
+        username: str,
+        enabled: bool = False,
+        nopass: bool = False,
+        passwords: Sequence[str] | None = None,
+        hashed_passwords: Sequence[str] | None = None,
+        categories: Sequence[str] | None = None,
+        commands: Sequence[str] | None = None,
+        keys: Sequence[str] | None = None,
+        channels: Iterable[ChannelT] | None = None,
+        selectors: Iterable[tuple[str, KeyT]] | None = None,
+        reset: bool = False,
+        reset_keys: bool = False,
+        reset_channels: bool = False,
+        reset_passwords: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> Pipeline[_StrType]: ...
+    def acl_users(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def acl_whoami(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def bgrewriteaof(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def bgsave(self, schedule: bool = True) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def client_id(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def client_kill(self, address: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def client_list(self, _type: str | None = None, client_id: list[str] = []) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def client_getname(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def client_setname(self, name: str) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def readwrite(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def readonly(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ...
+    def config_set(
+        self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions
+    ) -> Pipeline[_StrType]: ...
+    def config_resetstat(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def config_rewrite(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def dbsize(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def debug_object(self, key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def echo(self, value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def flushall(self, asynchronous: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def flushdb(self, asynchronous: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def lastsave(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def object(self, infotype, key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def ping(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def save(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ...
+    def sentinel_master(self, service_name) -> Pipeline[_StrType]: ...
+    def sentinel_masters(self) -> Pipeline[_StrType]: ...
+    def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ...
+    def sentinel_remove(self, name) -> Pipeline[_StrType]: ...
+    def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ...
+    def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ...
+    def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ...
+    def slaveof(self, host=None, port=None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def slowlog_get(self, num=None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def slowlog_len(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def slowlog_reset(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def time(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def append(self, key, value) -> Pipeline[_StrType]: ...
+    def bitcount(  # type: ignore[override]
+        self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None
+    ) -> Pipeline[_StrType]: ...
+    def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ...
+    def bitpos(self, key, bit, start=None, end=None, mode: str | None = None) -> Pipeline[_StrType]: ...
+    def decr(self, name, amount=1) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def delete(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def __delitem__(self, _Key) -> None: ...
+    def dump(self, name) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def exists(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def expire(  # type: ignore[override]
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Pipeline[_StrType]: ...
+    def expireat(
+        self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Pipeline[_StrType]: ...
+    def get(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def __getitem__(self, name) -> Pipeline[_StrType]: ...
+    def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def getrange(self, key, start, end) -> Pipeline[_StrType]: ...
+    def getset(self, name, value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def incr(self, name, amount=1) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def incrby(self, name, amount=1) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def incrbyfloat(self, name, amount=1.0) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def keys(self, pattern: _Key = "*") -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def persist(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def pexpire(  # type: ignore[override]
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Pipeline[_StrType]: ...
+    def pexpireat(  # type: ignore[override]
+        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Pipeline[_StrType]: ...
+    def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ...
+    def pttl(self, name) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def randomkey(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def rename(self, src, dst) -> Pipeline[_StrType]: ...
+    def renamenx(self, src, dst) -> Pipeline[_StrType]: ...
+    def restore(
+        self,
+        name,
+        ttl,
+        value,
+        replace: bool = False,
+        absttl: bool = False,
+        idletime: Incomplete | None = None,
+        frequency: Incomplete | None = None,
+    ) -> Pipeline[_StrType]: ...
+    def set(  # type: ignore[override]
+        self,
+        name: _Key,
+        value: _Value,
+        ex: None | int | timedelta = None,
+        px: None | int | timedelta = None,
+        nx: bool = False,
+        xx: bool = False,
+        keepttl: bool = False,
+        get: bool = False,
+        exat: Incomplete | None = None,
+        pxat: Incomplete | None = None,
+    ) -> Pipeline[_StrType]: ...
+    def __setitem__(self, name, value) -> None: ...
+    def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def setnx(self, name, value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def setrange(self, name, offset, value) -> Pipeline[_StrType]: ...
+    def strlen(self, name) -> Pipeline[_StrType]: ...
+    def substr(self, name, start, end=-1) -> Pipeline[_StrType]: ...
+    def ttl(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def type(self, name) -> Pipeline[_StrType]: ...
+    def unlink(self, *names: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def blmove(  # type: ignore[override]
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        timeout: float,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> Pipeline[_StrType]: ...
+    def blpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def brpop(self, keys: _Value | Iterable[_Value], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def brpoplpush(self, src, dst, timeout=0) -> Pipeline[_StrType]: ...
+    def lindex(self, name: _Key, index: int | str) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def linsert(  # type: ignore[override]
+        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
+    ) -> Pipeline[_StrType]: ...
+    def llen(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def lmove(  # type: ignore[override]
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> Pipeline[_StrType]: ...
+    def lpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ...
+    def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def lpushx(self, name, value) -> Pipeline[_StrType]: ...
+    def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def rpop(self, name, count: int | None = None) -> Pipeline[_StrType]: ...
+    def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ...
+    def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def rpushx(self, name, value) -> Pipeline[_StrType]: ...
+    def sort(  # type: ignore[override]
+        self,
+        name: _Key,
+        start: int | None = None,
+        num: int | None = None,
+        by: _Key | None = None,
+        get: _Key | Sequence[_Key] | None = None,
+        desc: bool = False,
+        alpha: bool = False,
+        store: _Key | None = None,
+        groups: bool = False,
+    ) -> Pipeline[_StrType]: ...
+    def scan(  # type: ignore[override]
+        self, cursor: int = 0, match: _Key | None = None, count: int | None = None, _type: str | None = None
+    ) -> Pipeline[_StrType]: ...
+    def scan_iter(self, match: _Key | None = None, count: int | None = None, _type: str | None = None) -> Iterator[Any]: ...  # type: ignore[override]
+    def sscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[Any]: ...
+    def hscan(self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hscan_iter(self, name, match: _Key | None = None, count: int | None = None, no_values: bool | None = None) -> Iterator[Any]: ...
+    def zscan_iter(
+        self, name: _Key, match: _Key | None = None, count: int | None = None, score_cast_func: Callable[[_StrType], Any] = ...
+    ) -> Iterator[Any]: ...
+    def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def scard(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def smembers(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def spop(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def srandmember(self, name: _Key, number: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ...
+    def xadd(
+        self,
+        name,
+        fields,
+        id="*",
+        maxlen=None,
+        approximate: bool = True,
+        nomkstream: bool = False,
+        minid: Incomplete | None = None,
+        limit: int | None = None,
+    ) -> Pipeline[_StrType]: ...
+    def xclaim(
+        self,
+        name,
+        groupname,
+        consumername,
+        min_idle_time,
+        message_ids,
+        idle=None,
+        time=None,
+        retrycount=None,
+        force=False,
+        justid=False,
+    ) -> Pipeline[_StrType]: ...
+    def xdel(self, name, *ids) -> Pipeline[_StrType]: ...
+    def xgroup_create(self, name, groupname, id="$", mkstream=False, entries_read: int | None = None) -> Pipeline[_StrType]: ...
+    def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ...
+    def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ...
+    def xgroup_setid(self, name, groupname, id, entries_read: int | None = None) -> Pipeline[_StrType]: ...
+    def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ...
+    def xinfo_groups(self, name) -> Pipeline[_StrType]: ...
+    def xinfo_stream(self, name, full: bool = False) -> Pipeline[_StrType]: ...
+    def xlen(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def xpending(self, name, groupname) -> Pipeline[_StrType]: ...
+    def xpending_range(
+        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
+    ) -> Pipeline[_StrType]: ...
+    def xrange(self, name, min="-", max="+", count=None) -> Pipeline[_StrType]: ...
+    def xread(self, streams, count=None, block=None) -> Pipeline[_StrType]: ...
+    def xreadgroup(self, groupname, consumername, streams, count=None, block=None, noack=False) -> Pipeline[_StrType]: ...
+    def xrevrange(self, name, max="+", min="-", count=None) -> Pipeline[_StrType]: ...
+    def xtrim(
+        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
+    ) -> Pipeline[_StrType]: ...
+    def zadd(  # type: ignore[override]
+        self,
+        name: _Key,
+        mapping: Mapping[_Key, _Value],
+        nx: bool = False,
+        xx: bool = False,
+        ch: bool = False,
+        incr: bool = False,
+        gt: Incomplete | None = False,
+        lt: Incomplete | None = False,
+    ) -> Pipeline[_StrType]: ...
+    def zcard(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zinterstore(  # type: ignore[override]
+        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
+    ) -> Pipeline[_StrType]: ...
+    def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zpopmax(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zpopmin(self, name: _Key, count: int | None = None) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = 0) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zrange(  # type: ignore[override]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool = False,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> Pipeline[_StrType]: ...
+    def zrangebylex(  # type: ignore[override]
+        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None
+    ) -> Pipeline[_StrType]: ...
+    def zrangebyscore(  # type: ignore[override]
+        self,
+        name: _Key,
+        min: _Value,
+        max: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> Pipeline[_StrType]: ...
+    def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zrevrange(  # type: ignore[override]
+        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[_StrType], Any] = ...
+    ) -> Pipeline[_StrType]: ...
+    def zrevrangebyscore(  # type: ignore[override]
+        self,
+        name: _Key,
+        max: _Value,
+        min: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> Pipeline[_StrType]: ...
+    def zrevrangebylex(  # type: ignore[override]
+        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None
+    ) -> Pipeline[_StrType]: ...
+    def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def zunionstore(  # type: ignore[override]
+        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
+    ) -> Pipeline[_StrType]: ...
+    def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def pfcount(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hgetall(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hkeys(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hlen(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    @overload  # type: ignore[override]
+    def hset(
+        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
+    ) -> Pipeline[_StrType]: ...
+    @overload
+    def hset(
+        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
+    ) -> Pipeline[_StrType]: ...
+    @overload
+    def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> Pipeline[_StrType]: ...
+    def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def hvals(self, name: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ...
+    def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ...
+    def script_exists(self, *args) -> Pipeline[_StrType]: ...
+    def script_flush(self, sync_type: Incomplete | None = None) -> Pipeline[_StrType]: ...
+    def script_kill(self) -> Pipeline[_StrType]: ...
+    def script_load(self, script) -> Pipeline[_StrType]: ...
+    def pubsub_channels(self, pattern: _Key = "*") -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def pubsub_numpat(self) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def monitor(self) -> Monitor: ...
+    def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ...  # type: ignore[override]
+    def client(self) -> Any: ...
+
+class Monitor:
+    command_re: Pattern[str]
+    monitor_re: Pattern[str]
+    def __init__(self, connection_pool) -> None: ...
+    def __enter__(self) -> Self: ...
+    def __exit__(self, *args: Unused) -> None: ...
+    def next_command(self) -> dict[str, Any]: ...
+    def listen(self) -> Iterable[dict[str, Any]]: ...
diff --git a/valkey/cluster.pyi b/valkey/cluster.pyi
new file mode 100644
index 00000000..f6bb7b6d
--- /dev/null
+++ b/valkey/cluster.pyi
@@ -0,0 +1,265 @@
+from _typeshed import Incomplete, Unused
+from collections.abc import Callable, Iterable, Sequence
+from threading import Lock
+from types import TracebackType
+from typing import Any, ClassVar, Literal, NoReturn, Protocol
+from typing_extensions import Self
+
+from valkey.client import CaseInsensitiveDict, PubSub, Valkey, _ParseResponseOptions
+from valkey.commands import CommandsParser, ValkeyClusterCommands
+from valkey.commands.core import _StrType
+from valkey.connection import BaseParser, Connection, ConnectionPool, Encoder, _ConnectionPoolOptions, _Encodable
+from valkey.exceptions import MovedError, ValkeyError
+from valkey.retry import Retry
+from valkey.typing import EncodableT
+
+def get_node_name(host: str, port: str | int) -> str: ...
+def get_connection(valkey_node: Valkey[Any], *args, **options: _ConnectionPoolOptions) -> Connection: ...
+def parse_scan_result(command: Unused, res, **options): ...
+def parse_pubsub_numsub(command: Unused, res, **options: Unused): ...
+def parse_cluster_slots(resp, **options) -> dict[tuple[int, int], dict[str, Any]]: ...
+def parse_cluster_myshardid(resp: bytes, **options: Unused) -> str: ...
+
+PRIMARY: str
+REPLICA: str
+SLOT_ID: str
+VALKEY_ALLOWED_KEYS: tuple[str, ...]
+KWARGS_DISABLED_KEYS: tuple[str, ...]
+PIPELINE_BLOCKED_COMMANDS: tuple[str, ...]
+
+def cleanup_kwargs(**kwargs: Any) -> dict[str, Any]: ...
+
+# It uses `DefaultParser` in real life, but it is a dynamic base class.
+class ClusterParser(BaseParser): ...
+
+class AbstractValkeyCluster:
+    ValkeyClusterRequestTTL: ClassVar[int]
+    PRIMARIES: ClassVar[str]
+    REPLICAS: ClassVar[str]
+    ALL_NODES: ClassVar[str]
+    RANDOM: ClassVar[str]
+    DEFAULT_NODE: ClassVar[str]
+    NODE_FLAGS: ClassVar[set[str]]
+    COMMAND_FLAGS: ClassVar[dict[str, str]]
+    CLUSTER_COMMANDS_RESPONSE_CALLBACKS: ClassVar[dict[str, Any]]
+    RESULT_CALLBACKS: ClassVar[dict[str, Callable[[Incomplete, Incomplete], Incomplete]]]
+    ERRORS_ALLOW_RETRY: ClassVar[tuple[type[ValkeyError], ...]]
+
+class ValkeyCluster(AbstractValkeyCluster, ValkeyClusterCommands[_StrType]):
+    user_on_connect_func: Callable[[Connection], object] | None
+    encoder: Encoder
+    cluster_error_retry_attempts: int
+    command_flags: dict[str, str]
+    node_flags: set[str]
+    read_from_replicas: bool
+    reinitialize_counter: int
+    reinitialize_steps: int
+    nodes_manager: NodesManager
+    cluster_response_callbacks: CaseInsensitiveDict[str, Callable[..., Incomplete]]
+    result_callbacks: CaseInsensitiveDict[str, Callable[[Incomplete, Incomplete], Incomplete]]
+    commands_parser: CommandsParser
+    def __init__(  # TODO: make @overloads, either `url` or `host:port` can be passed
+        self,
+        host: str | None = None,
+        port: int | None = 6379,
+        startup_nodes: list[ClusterNode] | None = None,
+        cluster_error_retry_attempts: int = 3,
+        retry: Retry | None = None,
+        require_full_coverage: bool = False,
+        reinitialize_steps: int = 5,
+        read_from_replicas: bool = False,
+        dynamic_startup_nodes: bool = True,
+        url: str | None = None,
+        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
+        **kwargs,
+    ) -> None: ...
+    def __enter__(self) -> Self: ...
+    def __exit__(
+        self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None
+    ) -> None: ...
+    def __del__(self) -> None: ...
+    def disconnect_connection_pools(self) -> None: ...
+    @classmethod
+    def from_url(cls, url: str, **kwargs) -> Self: ...
+    def on_connect(self, connection: Connection) -> None: ...
+    def get_valkey_connection(self, node: ClusterNode) -> Valkey[Any]: ...
+    def get_node(
+        self, host: str | None = None, port: str | int | None = None, node_name: str | None = None
+    ) -> ClusterNode | None: ...
+    def get_primaries(self) -> list[ClusterNode]: ...
+    def get_replicas(self) -> list[ClusterNode]: ...
+    def get_random_node(self) -> ClusterNode: ...
+    def get_nodes(self) -> list[ClusterNode]: ...
+    def get_node_from_key(self, key: _Encodable, replica: bool = False) -> ClusterNode | None: ...
+    def get_default_node(self) -> ClusterNode | None: ...
+    def set_default_node(self, node: ClusterNode | None) -> bool: ...
+    def monitor(self, target_node: Incomplete | None = None): ...
+    def pubsub(
+        self, node: Incomplete | None = None, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs
+    ): ...
+    def pipeline(self, transaction: Incomplete | None = None, shard_hint: Incomplete | None = None): ...
+    def lock(
+        self,
+        name: str,
+        timeout: float | None = None,
+        sleep: float = 0.1,
+        blocking: bool = True,
+        blocking_timeout: float | None = None,
+        lock_class: type[Incomplete] | None = None,
+        thread_local: bool = True,
+    ): ...
+    def keyslot(self, key: _Encodable) -> int: ...
+    def determine_slot(self, *args): ...
+    def get_encoder(self) -> Encoder: ...
+    def get_connection_kwargs(self) -> dict[str, Any]: ...
+    def execute_command(self, *args, **kwargs): ...
+    def close(self) -> None: ...
+
+class ClusterNode:
+    host: str
+    port: int
+    name: str
+    server_type: str | None
+    valkey_connection: Valkey[Incomplete] | None
+    def __init__(
+        self, host: str, port: int, server_type: str | None = None, valkey_connection: Valkey[Incomplete] | None = None
+    ) -> None: ...
+    def __eq__(self, obj: object) -> bool: ...
+    def __del__(self) -> None: ...
+
+class LoadBalancer:
+    primary_to_idx: dict[str, int]
+    start_index: int
+    def __init__(self, start_index: int = 0) -> None: ...
+    def get_server_index(self, primary: str, list_size: int) -> int: ...
+    def reset(self) -> None: ...
+
+class NodesManager:
+    nodes_cache: dict[str, ClusterNode]
+    slots_cache: dict[str, list[ClusterNode]]
+    startup_nodes: dict[str, ClusterNode]
+    default_node: ClusterNode | None
+    from_url: bool
+    connection_pool_class: type[ConnectionPool]
+    connection_kwargs: dict[str, Incomplete]  # TODO: could be a TypedDict
+    read_load_balancer: LoadBalancer
+    address_remap: Callable[[str, int], tuple[str, int]] | None
+    def __init__(
+        self,
+        startup_nodes: Iterable[ClusterNode],
+        from_url: bool = False,
+        require_full_coverage: bool = False,
+        lock: Lock | None = None,
+        dynamic_startup_nodes: bool = True,
+        connection_pool_class: type[ConnectionPool] = ...,
+        address_remap: Callable[[str, int], tuple[str, int]] | None = None,
+        **kwargs,  # TODO: same type as connection_kwargs
+    ) -> None: ...
+    def get_node(
+        self, host: str | None = None, port: int | str | None = None, node_name: str | None = None
+    ) -> ClusterNode | None: ...
+    def update_moved_exception(self, exception: MovedError) -> None: ...
+    def get_node_from_slot(self, slot: str, read_from_replicas: bool = False, server_type: str | None = None) -> ClusterNode: ...
+    def get_nodes_by_server_type(self, server_type: str) -> list[ClusterNode]: ...
+    def populate_startup_nodes(self, nodes: Iterable[ClusterNode]) -> None: ...
+    def check_slots_coverage(self, slots_cache: dict[str, list[ClusterNode]]) -> bool: ...
+    def create_valkey_connections(self, nodes: Iterable[ClusterNode]) -> None: ...
+    def create_valkey_node(self, host: str, port: int | str, **kwargs: Any) -> Valkey[Incomplete]: ...
+    def initialize(self) -> None: ...
+    def close(self) -> None: ...
+    def reset(self) -> None: ...
+    def remap_host_port(self, host: str, port: int) -> tuple[str, int]: ...
+
+class ClusterPubSub(PubSub):
+    node: ClusterNode | None
+    cluster: ValkeyCluster[Any]
+    def __init__(
+        self,
+        valkey_cluster: ValkeyCluster[Any],
+        node: ClusterNode | None = None,
+        host: str | None = None,
+        port: int | None = None,
+        **kwargs,
+    ) -> None: ...
+    def set_pubsub_node(
+        self, cluster: ValkeyCluster[Any], node: ClusterNode | None = None, host: str | None = None, port: int | None = None
+    ) -> None: ...
+    def get_pubsub_node(self) -> ClusterNode | None: ...
+    def execute_command(self, *args, **kwargs) -> None: ...
+    def get_valkey_connection(self) -> Valkey[Any] | None: ...
+
+class ClusterPipeline(ValkeyCluster[_StrType]):
+    command_stack: list[Incomplete]
+    nodes_manager: Incomplete
+    refresh_table_asap: bool
+    result_callbacks: Incomplete
+    startup_nodes: Incomplete
+    read_from_replicas: bool
+    command_flags: Incomplete
+    cluster_response_callbacks: Incomplete
+    cluster_error_retry_attempts: int
+    reinitialize_counter: int
+    reinitialize_steps: int
+    encoder: Encoder
+    commands_parser: Incomplete
+    def __init__(
+        self,
+        nodes_manager,
+        commands_parser,
+        result_callbacks: Incomplete | None = None,
+        cluster_response_callbacks: Incomplete | None = None,
+        startup_nodes: Incomplete | None = None,
+        read_from_replicas: bool = False,
+        cluster_error_retry_attempts: int = 3,
+        reinitialize_steps: int = 5,
+        lock: Lock | None = None,
+        **kwargs,
+    ) -> None: ...
+    def __len__(self) -> int: ...
+    def __bool__(self) -> Literal[True]: ...
+    def execute_command(self, *args, **kwargs): ...
+    def pipeline_execute_command(self, *args, **options): ...
+    def raise_first_error(self, stack) -> None: ...
+    def annotate_exception(self, exception, number, command) -> None: ...
+    def execute(self, raise_on_error: bool = True): ...
+    scripts: set[Any]  # is only set in `reset()`
+    watching: bool  # is only set in `reset()`
+    explicit_transaction: bool  # is only set in `reset()`
+    def reset(self) -> None: ...
+    def send_cluster_commands(self, stack, raise_on_error: bool = True, allow_redirections: bool = True): ...
+    def eval(self) -> None: ...
+    def multi(self) -> None: ...
+    def immediate_execute_command(self, *args, **options) -> None: ...
+    def load_scripts(self) -> None: ...
+    def watch(self, *names) -> None: ...
+    def unwatch(self) -> None: ...
+    def script_load_for_pipeline(self, *args, **kwargs) -> None: ...
+    def delete(self, *names): ...
+
+def block_pipeline_command(name: str) -> Callable[..., NoReturn]: ...
+
+class PipelineCommand:
+    args: Sequence[EncodableT]
+    options: _ParseResponseOptions
+    position: int | None
+    result: Any | Exception | None
+    node: Incomplete | None
+    asking: bool
+    def __init__(
+        self, args: Sequence[EncodableT], options: _ParseResponseOptions | None = None, position: int | None = None
+    ) -> None: ...
+
+class _ParseResponseCallback(Protocol):
+    def __call__(self, connection: Connection, command: EncodableT, /, **kwargs) -> Any: ...
+
+class NodeCommands:
+    parse_response: _ParseResponseCallback
+    connection_pool: ConnectionPool
+    connection: Connection
+    commands: list[PipelineCommand]
+    def __init__(
+        self, parse_response: _ParseResponseCallback, connection_pool: ConnectionPool, connection: Connection
+    ) -> None: ...
+    def append(self, c: PipelineCommand) -> None: ...
+    def write(self) -> None: ...
+    def read(self) -> None: ...
diff --git a/valkey/commands/__init__.pyi b/valkey/commands/__init__.pyi
new file mode 100644
index 00000000..1abccc40
--- /dev/null
+++ b/valkey/commands/__init__.pyi
@@ -0,0 +1,17 @@
+from .cluster import ValkeyClusterCommands as ValkeyClusterCommands
+from .core import AsyncCoreCommands as AsyncCoreCommands, CoreCommands as CoreCommands
+from .helpers import list_or_args as list_or_args
+from .parser import CommandsParser as CommandsParser
+from .valkeymodules import ValkeyModuleCommands as ValkeyModuleCommands
+from .sentinel import AsyncSentinelCommands as AsyncSentinelCommands, SentinelCommands as SentinelCommands
+
+__all__ = [
+    "ValkeyClusterCommands",
+    "CommandsParser",
+    "AsyncCoreCommands",
+    "CoreCommands",
+    "list_or_args",
+    "ValkeyModuleCommands",
+    "AsyncSentinelCommands",
+    "SentinelCommands",
+]
diff --git a/valkey/commands/bf/__init__.pyi b/valkey/commands/bf/__init__.pyi
new file mode 100644
index 00000000..d5ef70ee
--- /dev/null
+++ b/valkey/commands/bf/__init__.pyi
@@ -0,0 +1,58 @@
+from typing import Any
+
+from .commands import *
+from .info import BFInfo as BFInfo, CFInfo as CFInfo, CMSInfo as CMSInfo, TDigestInfo as TDigestInfo, TopKInfo as TopKInfo
+
+class AbstractBloom:
+    @staticmethod
+    def append_items(params, items) -> None: ...
+    @staticmethod
+    def append_error(params, error) -> None: ...
+    @staticmethod
+    def append_capacity(params, capacity) -> None: ...
+    @staticmethod
+    def append_expansion(params, expansion) -> None: ...
+    @staticmethod
+    def append_no_scale(params, noScale) -> None: ...
+    @staticmethod
+    def append_weights(params, weights) -> None: ...
+    @staticmethod
+    def append_no_create(params, noCreate) -> None: ...
+    @staticmethod
+    def append_items_and_increments(params, items, increments) -> None: ...
+    @staticmethod
+    def append_values_and_weights(params, items, weights) -> None: ...
+    @staticmethod
+    def append_max_iterations(params, max_iterations) -> None: ...
+    @staticmethod
+    def append_bucket_size(params, bucket_size) -> None: ...
+
+class CMSBloom(CMSCommands, AbstractBloom):
+    client: Any
+    commandmixin: Any
+    execute_command: Any
+    def __init__(self, client, **kwargs) -> None: ...
+
+class TOPKBloom(TOPKCommands, AbstractBloom):
+    client: Any
+    commandmixin: Any
+    execute_command: Any
+    def __init__(self, client, **kwargs) -> None: ...
+
+class CFBloom(CFCommands, AbstractBloom):
+    client: Any
+    commandmixin: Any
+    execute_command: Any
+    def __init__(self, client, **kwargs) -> None: ...
+
+class TDigestBloom(TDigestCommands, AbstractBloom):
+    client: Any
+    commandmixin: Any
+    execute_command: Any
+    def __init__(self, client, **kwargs) -> None: ...
+
+class BFBloom(BFCommands, AbstractBloom):
+    client: Any
+    commandmixin: Any
+    execute_command: Any
+    def __init__(self, client, **kwargs) -> None: ...
diff --git a/valkey/commands/bf/commands.pyi b/valkey/commands/bf/commands.pyi
new file mode 100644
index 00000000..99a296fd
--- /dev/null
+++ b/valkey/commands/bf/commands.pyi
@@ -0,0 +1,112 @@
+from _typeshed import Incomplete
+
+BF_RESERVE: str
+BF_ADD: str
+BF_MADD: str
+BF_INSERT: str
+BF_EXISTS: str
+BF_MEXISTS: str
+BF_SCANDUMP: str
+BF_LOADCHUNK: str
+BF_INFO: str
+CF_RESERVE: str
+CF_ADD: str
+CF_ADDNX: str
+CF_INSERT: str
+CF_INSERTNX: str
+CF_EXISTS: str
+CF_DEL: str
+CF_COUNT: str
+CF_SCANDUMP: str
+CF_LOADCHUNK: str
+CF_INFO: str
+CMS_INITBYDIM: str
+CMS_INITBYPROB: str
+CMS_INCRBY: str
+CMS_QUERY: str
+CMS_MERGE: str
+CMS_INFO: str
+TOPK_RESERVE: str
+TOPK_ADD: str
+TOPK_INCRBY: str
+TOPK_QUERY: str
+TOPK_COUNT: str
+TOPK_LIST: str
+TOPK_INFO: str
+TDIGEST_CREATE: str
+TDIGEST_RESET: str
+TDIGEST_ADD: str
+TDIGEST_MERGE: str
+TDIGEST_CDF: str
+TDIGEST_QUANTILE: str
+TDIGEST_MIN: str
+TDIGEST_MAX: str
+TDIGEST_INFO: str
+
+class BFCommands:
+    def create(self, key, errorRate, capacity, expansion: Incomplete | None = None, noScale: Incomplete | None = None): ...
+    def add(self, key, item): ...
+    def madd(self, key, *items): ...
+    def insert(
+        self,
+        key,
+        items,
+        capacity: Incomplete | None = None,
+        error: Incomplete | None = None,
+        noCreate: Incomplete | None = None,
+        expansion: Incomplete | None = None,
+        noScale: Incomplete | None = None,
+    ): ...
+    def exists(self, key, item): ...
+    def mexists(self, key, *items): ...
+    def scandump(self, key, iter): ...
+    def loadchunk(self, key, iter, data): ...
+    def info(self, key): ...
+
+class CFCommands:
+    def create(
+        self,
+        key,
+        capacity,
+        expansion: Incomplete | None = None,
+        bucket_size: Incomplete | None = None,
+        max_iterations: Incomplete | None = None,
+    ): ...
+    def add(self, key, item): ...
+    def addnx(self, key, item): ...
+    def insert(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
+    def insertnx(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
+    def exists(self, key, item): ...
+    def delete(self, key, item): ...
+    def count(self, key, item): ...
+    def scandump(self, key, iter): ...
+    def loadchunk(self, key, iter, data): ...
+    def info(self, key): ...
+
+class TOPKCommands:
+    def reserve(self, key, k, width, depth, decay): ...
+    def add(self, key, *items): ...
+    def incrby(self, key, items, increments): ...
+    def query(self, key, *items): ...
+    def count(self, key, *items): ...
+    def list(self, key, withcount: bool = False): ...
+    def info(self, key): ...
+
+class TDigestCommands:
+    def create(self, key, compression: int = 100): ...
+    def reset(self, key): ...
+    def add(self, key, values): ...
+    def merge(self, destination_key, num_keys, *keys, compression: int | None = None, override: bool = False): ...
+    def min(self, key): ...
+    def max(self, key): ...
+    def quantile(self, key, quantile, *quantiles): ...
+    def cdf(self, key, value, *values): ...
+    def info(self, key): ...
+
+class CMSCommands:
+    def initbydim(self, key, width, depth): ...
+    def initbyprob(self, key, error, probability): ...
+    def incrby(self, key, items, increments): ...
+    def query(self, key, *items): ...
+    def merge(self, destKey, numKeys, srcKeys, weights=[]): ...
+    def info(self, key): ...
diff --git a/valkey/commands/bf/info.pyi b/valkey/commands/bf/info.pyi
new file mode 100644
index 00000000..54d1cf04
--- /dev/null
+++ b/valkey/commands/bf/info.pyi
@@ -0,0 +1,43 @@
+from typing import Any
+
+class BFInfo:
+    capacity: Any
+    size: Any
+    filterNum: Any
+    insertedNum: Any
+    expansionRate: Any
+    def __init__(self, args) -> None: ...
+
+class CFInfo:
+    size: Any
+    bucketNum: Any
+    filterNum: Any
+    insertedNum: Any
+    deletedNum: Any
+    bucketSize: Any
+    expansionRate: Any
+    maxIteration: Any
+    def __init__(self, args) -> None: ...
+
+class CMSInfo:
+    width: Any
+    depth: Any
+    count: Any
+    def __init__(self, args) -> None: ...
+
+class TopKInfo:
+    k: Any
+    width: Any
+    depth: Any
+    decay: Any
+    def __init__(self, args) -> None: ...
+
+class TDigestInfo:
+    compression: Any
+    capacity: Any
+    mergedNodes: Any
+    unmergedNodes: Any
+    mergedWeight: Any
+    unmergedWeight: Any
+    totalCompressions: Any
+    def __init__(self, args) -> None: ...
diff --git a/valkey/commands/cluster.pyi b/valkey/commands/cluster.pyi
new file mode 100644
index 00000000..2654a73f
--- /dev/null
+++ b/valkey/commands/cluster.pyi
@@ -0,0 +1,60 @@
+from _typeshed import Incomplete
+from typing import NoReturn
+
+from .core import ACLCommands, DataAccessCommands, ManagementCommands, PubSubCommands, _StrType
+
+class ClusterMultiKeyCommands:
+    def mget_nonatomic(self, keys, *args): ...
+    def mset_nonatomic(self, mapping): ...
+    def exists(self, *keys): ...
+    def delete(self, *keys): ...
+    def touch(self, *keys): ...
+    def unlink(self, *keys): ...
+
+class ClusterManagementCommands(ManagementCommands):
+    def slaveof(self, *args, **kwargs) -> None: ...
+    def replicaof(self, *args, **kwargs) -> None: ...
+    def swapdb(self, *args, **kwargs) -> None: ...
+
+class ClusterDataAccessCommands(DataAccessCommands[_StrType]):
+    def stralgo(
+        self,
+        algo,
+        value1,
+        value2,
+        specific_argument: str = "strings",
+        len: bool = False,
+        idx: bool = False,
+        minmatchlen: Incomplete | None = None,
+        withmatchlen: bool = False,
+        **kwargs,
+    ): ...
+
+class ValkeyClusterCommands(
+    ClusterMultiKeyCommands, ClusterManagementCommands, ACLCommands[_StrType], PubSubCommands, ClusterDataAccessCommands[_StrType]
+):
+    def cluster_addslots(self, target_node, *slots): ...
+    def cluster_countkeysinslot(self, slot_id): ...
+    def cluster_count_failure_report(self, node_id): ...
+    def cluster_delslots(self, *slots): ...
+    def cluster_failover(self, target_node, option: Incomplete | None = None): ...
+    def cluster_info(self, target_nodes: Incomplete | None = None): ...
+    def cluster_keyslot(self, key): ...
+    def cluster_meet(self, host, port, target_nodes: Incomplete | None = None): ...
+    def cluster_nodes(self): ...
+    def cluster_replicate(self, target_nodes, node_id): ...
+    def cluster_reset(self, soft: bool = True, target_nodes: Incomplete | None = None): ...
+    def cluster_save_config(self, target_nodes: Incomplete | None = None): ...
+    def cluster_get_keys_in_slot(self, slot, num_keys): ...
+    def cluster_set_config_epoch(self, epoch, target_nodes: Incomplete | None = None): ...
+    def cluster_setslot(self, target_node, node_id, slot_id, state): ...
+    def cluster_setslot_stable(self, slot_id): ...
+    def cluster_replicas(self, node_id, target_nodes: Incomplete | None = None): ...
+    def cluster_slots(self, target_nodes: Incomplete | None = None): ...
+    def cluster_myshardid(self, target_nodes: Incomplete | None = None): ...
+    def cluster_links(self, target_node): ...
+    def cluster_flushslots(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
+    def cluster_bumpepoch(self, target_nodes: Incomplete | None = None) -> NoReturn: ...
+    read_from_replicas: bool
+    def readonly(self, target_nodes: Incomplete | None = None): ...
+    def readwrite(self, target_nodes: Incomplete | None = None): ...
diff --git a/valkey/commands/core.pyi b/valkey/commands/core.pyi
new file mode 100644
index 00000000..d69b6710
--- /dev/null
+++ b/valkey/commands/core.pyi
@@ -0,0 +1,1771 @@
+import builtins
+from _typeshed import Incomplete, SupportsItems
+from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Iterator, Mapping, Sequence
+from datetime import datetime, timedelta
+from typing import Any, Generic, Literal, TypeVar, overload
+
+from ..asyncio.client import Valkey as AsyncValkey
+from ..client import _CommandOptions, _Key, _Value
+from ..typing import ChannelT, EncodableT, KeyT, PatternT, ScriptTextT, StreamIdT
+
+_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn")
+_StrType = TypeVar("_StrType", bound=str | bytes)
+
+class ACLCommands(Generic[_StrType]):
+    def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ...
+    def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ...
+    def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ...
+    def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ...
+    def acl_help(self, **kwargs: _CommandOptions): ...
+    def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ...
+    def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ...
+    def acl_log_reset(self, **kwargs: _CommandOptions): ...
+    def acl_load(self, **kwargs: _CommandOptions) -> bool: ...
+    def acl_save(self, **kwargs: _CommandOptions): ...
+    def acl_setuser(
+        self,
+        username: str,
+        enabled: bool = False,
+        nopass: bool = False,
+        passwords: Sequence[str] | None = None,
+        hashed_passwords: Sequence[str] | None = None,
+        categories: Sequence[str] | None = None,
+        commands: Sequence[str] | None = None,
+        keys: Sequence[str] | None = None,
+        channels: Iterable[ChannelT] | None = None,
+        selectors: Iterable[tuple[str, KeyT]] | None = None,
+        reset: bool = False,
+        reset_keys: bool = False,
+        reset_channels: bool = False,
+        reset_passwords: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> bool: ...
+    def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ...
+    def acl_whoami(self, **kwargs: _CommandOptions) -> str: ...
+
+class AsyncACLCommands(Generic[_StrType]):
+    async def acl_cat(self, category: str | None = None, **kwargs: _CommandOptions) -> list[str]: ...
+    async def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ...
+    async def acl_genpass(self, bits: int | None = None, **kwargs: _CommandOptions) -> str: ...
+    async def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ...
+    async def acl_help(self, **kwargs: _CommandOptions): ...
+    async def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ...
+    async def acl_log(self, count: int | None = None, **kwargs: _CommandOptions): ...
+    async def acl_log_reset(self, **kwargs: _CommandOptions): ...
+    async def acl_load(self, **kwargs: _CommandOptions) -> bool: ...
+    async def acl_save(self, **kwargs: _CommandOptions): ...
+    async def acl_setuser(
+        self,
+        username: str,
+        enabled: bool = False,
+        nopass: bool = False,
+        passwords: Sequence[str] | None = None,
+        hashed_passwords: Sequence[str] | None = None,
+        categories: Sequence[str] | None = None,
+        commands: Sequence[str] | None = None,
+        keys: Sequence[str] | None = None,
+        channels: Iterable[ChannelT] | None = None,
+        selectors: Iterable[tuple[str, KeyT]] | None = None,
+        reset: bool = False,
+        reset_keys: bool = False,
+        reset_channels: bool = False,
+        reset_passwords: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> bool: ...
+    async def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ...
+    async def acl_whoami(self, **kwargs: _CommandOptions) -> str: ...
+
+class ManagementCommands:
+    def bgrewriteaof(self, **kwargs: _CommandOptions): ...
+    def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ...
+    def role(self): ...
+    def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ...
+    def client_kill_filter(
+        self,
+        _id: Incomplete | None = None,
+        _type: Incomplete | None = None,
+        addr: Incomplete | None = None,
+        skipme: Incomplete | None = None,
+        laddr: Incomplete | None = None,
+        user: Incomplete | None = None,
+        **kwargs: _CommandOptions,
+    ): ...
+    def client_info(self, **kwargs: _CommandOptions): ...
+    def client_list(
+        self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions
+    ) -> list[dict[str, str]]: ...
+    def client_getname(self, **kwargs: _CommandOptions) -> str | None: ...
+    def client_getredir(self, **kwargs: _CommandOptions): ...
+    def client_reply(self, reply, **kwargs: _CommandOptions): ...
+    def client_id(self, **kwargs: _CommandOptions) -> int: ...
+    def client_tracking_on(
+        self,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+    ): ...
+    def client_tracking_off(
+        self,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+    ): ...
+    def client_tracking(
+        self,
+        on: bool = True,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+        **kwargs: _CommandOptions,
+    ): ...
+    def client_trackinginfo(self, **kwargs: _CommandOptions): ...
+    def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ...
+    def client_setinfo(self, attr: str, value: str, **kwargs: _CommandOptions) -> bool: ...
+    def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ...
+    def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ...
+    def client_unpause(self, **kwargs: _CommandOptions): ...
+    def client_no_evict(self, mode: str): ...
+    def client_no_touch(self, mode: str): ...
+    def command(self, **kwargs: _CommandOptions): ...
+    def command_info(self, **kwargs: _CommandOptions): ...
+    def command_count(self, **kwargs: _CommandOptions): ...
+    def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ...
+    def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ...
+    def config_resetstat(self, **kwargs: _CommandOptions): ...
+    def config_rewrite(self, **kwargs: _CommandOptions): ...
+    def dbsize(self, **kwargs: _CommandOptions) -> int: ...
+    def debug_object(self, key, **kwargs: _CommandOptions): ...
+    def debug_segfault(self, **kwargs: _CommandOptions): ...
+    def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ...
+    def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
+    def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
+    def sync(self): ...
+    def psync(self, replicationid, offset): ...
+    def swapdb(self, first, second, **kwargs: _CommandOptions): ...
+    def select(self, index, **kwargs: _CommandOptions): ...
+    def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ...
+    def lastsave(self, **kwargs: _CommandOptions): ...
+    def latency_doctor(self): ...
+    def latency_graph(self): ...
+    def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ...
+    def reset(self) -> None: ...
+    def migrate(
+        self,
+        host,
+        port,
+        keys,
+        destination_db,
+        timeout,
+        copy: bool = False,
+        replace: bool = False,
+        auth: Incomplete | None = None,
+        **kwargs: _CommandOptions,
+    ): ...
+    def object(self, infotype, key, **kwargs: _CommandOptions): ...
+    def memory_doctor(self, **kwargs: _CommandOptions): ...
+    def memory_help(self, **kwargs: _CommandOptions): ...
+    def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ...
+    def memory_malloc_stats(self, **kwargs: _CommandOptions): ...
+    def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ...
+    def memory_purge(self, **kwargs: _CommandOptions): ...
+    def latency_histogram(self, *args): ...
+    def latency_history(self, event: str): ...
+    def latency_latest(self): ...
+    def latency_reset(self, *events: str) -> bool: ...
+    def ping(self, **kwargs: _CommandOptions) -> bool: ...
+    def quit(self, **kwargs: _CommandOptions): ...
+    def replicaof(self, *args, **kwargs: _CommandOptions): ...
+    def save(self, **kwargs: _CommandOptions) -> bool: ...
+    def shutdown(
+        self,
+        save: bool = False,
+        nosave: bool = False,
+        now: bool = False,
+        force: bool = False,
+        abort: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> None: ...
+    def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ...
+    def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ...
+    def slowlog_len(self, **kwargs: _CommandOptions): ...
+    def slowlog_reset(self, **kwargs: _CommandOptions): ...
+    def time(self, **kwargs: _CommandOptions): ...
+    def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ...
+
+class AsyncManagementCommands:
+    async def bgrewriteaof(self, **kwargs: _CommandOptions): ...
+    async def bgsave(self, schedule: bool = True, **kwargs: _CommandOptions): ...
+    async def role(self): ...
+    async def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ...
+    async def client_kill_filter(
+        self,
+        _id: Incomplete | None = None,
+        _type: Incomplete | None = None,
+        addr: Incomplete | None = None,
+        skipme: Incomplete | None = None,
+        laddr: Incomplete | None = None,
+        user: Incomplete | None = None,
+        **kwargs: _CommandOptions,
+    ): ...
+    async def client_info(self, **kwargs: _CommandOptions): ...
+    async def client_list(
+        self, _type: str | None = None, client_id: list[str] = [], **kwargs: _CommandOptions
+    ) -> list[dict[str, str]]: ...
+    async def client_getname(self, **kwargs: _CommandOptions) -> str | None: ...
+    async def client_getredir(self, **kwargs: _CommandOptions): ...
+    async def client_reply(self, reply, **kwargs: _CommandOptions): ...
+    async def client_id(self, **kwargs: _CommandOptions) -> int: ...
+    async def client_tracking_on(
+        self,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+    ): ...
+    async def client_tracking_off(
+        self,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+    ): ...
+    async def client_tracking(
+        self,
+        on: bool = True,
+        clientid: Incomplete | None = None,
+        prefix=[],
+        bcast: bool = False,
+        optin: bool = False,
+        optout: bool = False,
+        noloop: bool = False,
+        **kwargs: _CommandOptions,
+    ): ...
+    async def client_trackinginfo(self, **kwargs: _CommandOptions): ...
+    async def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ...
+    async def client_setinfo(self, attr: str, value: str, **kwargs: _CommandOptions) -> bool: ...
+    async def client_unblock(self, client_id, error: bool = False, **kwargs: _CommandOptions): ...
+    async def client_no_evict(self, mode: str): ...
+    async def client_no_touch(self, mode: str): ...
+    async def client_pause(self, timeout, all: bool = True, **kwargs: _CommandOptions): ...
+    async def client_unpause(self, **kwargs: _CommandOptions): ...
+    async def command(self, **kwargs: _CommandOptions): ...
+    async def command_info(self, **kwargs: _CommandOptions): ...
+    async def command_count(self, **kwargs: _CommandOptions): ...
+    async def config_get(self, pattern: PatternT = "*", *args: PatternT, **kwargs: _CommandOptions): ...
+    async def config_set(self, name: KeyT, value: EncodableT, *args: KeyT | EncodableT, **kwargs: _CommandOptions): ...
+    async def config_resetstat(self, **kwargs: _CommandOptions): ...
+    async def config_rewrite(self, **kwargs: _CommandOptions): ...
+    async def dbsize(self, **kwargs: _CommandOptions) -> int: ...
+    async def debug_object(self, key, **kwargs: _CommandOptions): ...
+    async def debug_segfault(self, **kwargs: _CommandOptions): ...
+    async def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ...
+    async def flushall(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
+    async def flushdb(self, asynchronous: bool = False, **kwargs: _CommandOptions) -> bool: ...
+    async def sync(self): ...
+    async def psync(self, replicationid, offset): ...
+    async def swapdb(self, first, second, **kwargs: _CommandOptions): ...
+    async def select(self, index, **kwargs: _CommandOptions): ...
+    async def info(self, section: _Key | None = None, *args: _Key, **kwargs: _CommandOptions) -> Mapping[str, Any]: ...
+    async def lastsave(self, **kwargs: _CommandOptions): ...
+    async def latency_doctor(self): ...
+    async def latency_graph(self): ...
+    async def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ...
+    async def reset(self) -> None: ...
+    async def migrate(
+        self,
+        host,
+        port,
+        keys,
+        destination_db,
+        timeout,
+        copy: bool = False,
+        replace: bool = False,
+        auth: Incomplete | None = None,
+        **kwargs: _CommandOptions,
+    ): ...
+    async def object(self, infotype, key, **kwargs: _CommandOptions): ...
+    async def memory_doctor(self, **kwargs: _CommandOptions): ...
+    async def memory_help(self, **kwargs: _CommandOptions): ...
+    async def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ...
+    async def memory_malloc_stats(self, **kwargs: _CommandOptions): ...
+    async def memory_usage(self, key, samples: Incomplete | None = None, **kwargs: _CommandOptions): ...
+    async def memory_purge(self, **kwargs: _CommandOptions): ...
+    async def latency_histogram(self, *args): ...
+    async def latency_history(self, event: str): ...
+    async def latency_latest(self): ...
+    async def latency_reset(self, *events: str) -> bool: ...
+    async def ping(self, **kwargs: _CommandOptions) -> bool: ...
+    async def quit(self, **kwargs: _CommandOptions): ...
+    async def replicaof(self, *args, **kwargs: _CommandOptions): ...
+    async def save(self, **kwargs: _CommandOptions) -> bool: ...
+    async def shutdown(
+        self,
+        save: bool = False,
+        nosave: bool = False,
+        now: bool = False,
+        force: bool = False,
+        abort: bool = False,
+        **kwargs: _CommandOptions,
+    ) -> None: ...
+    async def slaveof(self, host: Incomplete | None = None, port: Incomplete | None = None, **kwargs: _CommandOptions): ...
+    async def slowlog_get(self, num: Incomplete | None = None, **kwargs: _CommandOptions): ...
+    async def slowlog_len(self, **kwargs: _CommandOptions): ...
+    async def slowlog_reset(self, **kwargs: _CommandOptions): ...
+    async def time(self, **kwargs: _CommandOptions): ...
+    async def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ...
+
+class BasicKeyCommands(Generic[_StrType]):
+    def append(self, key, value): ...
+    def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ...
+    def bitfield(self, key, default_overflow: Incomplete | None = None): ...
+    def bitfield_ro(self, key, encoding: str, offset: int, items: list[tuple[str, int]] | None = None): ...
+    def bitop(self, operation, dest, *keys): ...
+    def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ...
+    def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ...
+    def decr(self, name, amount: int = 1) -> int: ...
+    def decrby(self, name, amount: int = 1) -> int: ...
+    def delete(self, *names: _Key) -> int: ...
+    def __delitem__(self, name: _Key) -> None: ...
+    def dump(self, name: _Key) -> _StrType | None: ...
+    def exists(self, *names: _Key) -> int: ...
+    __contains__ = exists
+    def expire(
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> bool: ...
+    def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ...
+    def get(self, name: _Key) -> _StrType | None: ...
+    def getdel(self, name: _Key) -> _StrType | None: ...
+    def getex(
+        self,
+        name,
+        ex: Incomplete | None = None,
+        px: Incomplete | None = None,
+        exat: Incomplete | None = None,
+        pxat: Incomplete | None = None,
+        persist: bool = False,
+    ): ...
+    def __getitem__(self, name: str): ...
+    def getbit(self, name: _Key, offset: int) -> int: ...
+    def getrange(self, key, start, end): ...
+    def getset(self, name, value) -> _StrType | None: ...
+    def incr(self, name: _Key, amount: int = 1) -> int: ...
+    def incrby(self, name: _Key, amount: int = 1) -> int: ...
+    def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ...
+    def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ...
+    def lmove(
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> _Value: ...
+    def blmove(
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        timeout: float,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> _Value | None: ...
+    def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
+    def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ...
+    def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ...
+    def move(self, name: _Key, db: int) -> bool: ...
+    def persist(self, name: _Key) -> bool: ...
+    def pexpire(
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Literal[1, 0]: ...
+    def pexpireat(
+        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Literal[1, 0]: ...
+    def psetex(self, name, time_ms, value): ...
+    def pttl(self, name: _Key) -> int: ...
+    def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ...
+    def randomkey(self, **kwargs: _CommandOptions): ...
+    def rename(self, src, dst): ...
+    def renamenx(self, src, dst): ...
+    def restore(
+        self,
+        name,
+        ttl,
+        value,
+        replace: bool = False,
+        absttl: bool = False,
+        idletime: Incomplete | None = None,
+        frequency: Incomplete | None = None,
+    ): ...
+    def set(
+        self,
+        name: _Key,
+        value: _Value,
+        ex: None | float | timedelta = None,
+        px: None | float | timedelta = None,
+        nx: bool = False,
+        xx: bool = False,
+        keepttl: bool = False,
+        get: bool = False,
+        exat: Incomplete | None = None,
+        pxat: Incomplete | None = None,
+    ) -> bool | None: ...
+    def __setitem__(self, name, value) -> None: ...
+    def setbit(self, name: _Key, offset: int, value: int) -> int: ...
+    def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ...
+    def setnx(self, name: _Key, value: _Value) -> bool: ...
+    def setrange(self, name, offset, value): ...
+    def stralgo(
+        self,
+        algo,
+        value1,
+        value2,
+        specific_argument: str = "strings",
+        len: bool = False,
+        idx: bool = False,
+        minmatchlen: Incomplete | None = None,
+        withmatchlen: bool = False,
+        **kwargs: _CommandOptions,
+    ): ...
+    def strlen(self, name): ...
+    def substr(self, name, start, end: int = -1): ...
+    def touch(self, *args): ...
+    def ttl(self, name: _Key) -> int: ...
+    def type(self, name): ...
+    def watch(self, *names): ...
+    def unwatch(self): ...
+    def unlink(self, *names: _Key) -> int: ...
+
+class AsyncBasicKeyCommands(Generic[_StrType]):
+    async def append(self, key, value): ...
+    async def bitcount(self, key: _Key, start: int | None = None, end: int | None = None, mode: str | None = None) -> int: ...
+    def bitfield(self, key, default_overflow: Incomplete | None = None): ...
+    async def bitfield_ro(self, key, encoding: str, offset: int, items: list[tuple[str, int]] | None = None): ...
+    async def bitop(self, operation, dest, *keys): ...
+    async def bitpos(self, key: _Key, bit: int, start: int | None = None, end: int | None = None, mode: str | None = None): ...
+    async def copy(self, source, destination, destination_db: Incomplete | None = None, replace: bool = False): ...
+    async def decr(self, name, amount: int = 1) -> int: ...
+    async def decrby(self, name, amount: int = 1) -> int: ...
+    async def delete(self, *names: _Key) -> int: ...
+    async def dump(self, name: _Key) -> _StrType | None: ...
+    async def exists(self, *names: _Key) -> int: ...
+    async def expire(
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> bool: ...
+    async def expireat(self, name, when, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False): ...
+    async def get(self, name: _Key) -> _StrType | None: ...
+    async def getdel(self, name: _Key) -> _StrType | None: ...
+    async def getex(
+        self,
+        name,
+        ex: Incomplete | None = None,
+        px: Incomplete | None = None,
+        exat: Incomplete | None = None,
+        pxat: Incomplete | None = None,
+        persist: bool = False,
+    ): ...
+    async def getbit(self, name: _Key, offset: int) -> int: ...
+    async def getrange(self, key, start, end): ...
+    async def getset(self, name, value) -> _StrType | None: ...
+    async def incr(self, name: _Key, amount: int = 1) -> int: ...
+    async def incrby(self, name: _Key, amount: int = 1) -> int: ...
+    async def incrbyfloat(self, name: _Key, amount: float = 1.0) -> float: ...
+    async def keys(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[_StrType]: ...
+    async def lmove(
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> _Value: ...
+    async def blmove(
+        self,
+        first_list: _Key,
+        second_list: _Key,
+        timeout: float,
+        src: Literal["LEFT", "RIGHT"] = "LEFT",
+        dest: Literal["LEFT", "RIGHT"] = "RIGHT",
+    ) -> _Value | None: ...
+    async def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
+    async def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ...
+    async def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ...
+    async def move(self, name: _Key, db: int) -> bool: ...
+    async def persist(self, name: _Key) -> bool: ...
+    async def pexpire(
+        self, name: _Key, time: int | timedelta, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Literal[1, 0]: ...
+    async def pexpireat(
+        self, name: _Key, when: int | datetime, nx: bool = False, xx: bool = False, gt: bool = False, lt: bool = False
+    ) -> Literal[1, 0]: ...
+    async def psetex(self, name, time_ms, value): ...
+    async def pttl(self, name: _Key) -> int: ...
+    async def hrandfield(self, key, count: Incomplete | None = None, withvalues: bool = False): ...
+    async def randomkey(self, **kwargs: _CommandOptions): ...
+    async def rename(self, src, dst): ...
+    async def renamenx(self, src, dst): ...
+    async def restore(
+        self,
+        name,
+        ttl,
+        value,
+        replace: bool = False,
+        absttl: bool = False,
+        idletime: Incomplete | None = None,
+        frequency: Incomplete | None = None,
+    ): ...
+    async def set(
+        self,
+        name: _Key,
+        value: _Value,
+        ex: None | float | timedelta = None,
+        px: None | float | timedelta = None,
+        nx: bool = False,
+        xx: bool = False,
+        keepttl: bool = False,
+        get: bool = False,
+        exat: Incomplete | None = None,
+        pxat: Incomplete | None = None,
+    ) -> bool | None: ...
+    async def setbit(self, name: _Key, offset: int, value: int) -> int: ...
+    async def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ...
+    async def setnx(self, name: _Key, value: _Value) -> bool: ...
+    async def setrange(self, name, offset, value): ...
+    async def stralgo(
+        self,
+        algo,
+        value1,
+        value2,
+        specific_argument: str = "strings",
+        len: bool = False,
+        idx: bool = False,
+        minmatchlen: Incomplete | None = None,
+        withmatchlen: bool = False,
+        **kwargs: _CommandOptions,
+    ): ...
+    async def strlen(self, name): ...
+    async def substr(self, name, start, end: int = -1): ...
+    async def touch(self, *args): ...
+    async def ttl(self, name: _Key) -> int: ...
+    async def type(self, name): ...
+    async def watch(self, *names): ...
+    async def unwatch(self): ...
+    async def unlink(self, *names: _Key) -> int: ...
+    def __getitem__(self, name: str): ...
+    def __setitem__(self, name, value) -> None: ...
+    def __delitem__(self, name: _Key) -> None: ...
+    def __contains__(self, name: _Key) -> None: ...
+
+class ListCommands(Generic[_StrType]):
+    @overload
+    def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
+    @overload
+    def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
+    @overload
+    def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
+    @overload
+    def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
+    def brpoplpush(self, src, dst, timeout: int | None = 0): ...
+    def lindex(self, name: _Key, index: int | str) -> _StrType | None: ...
+    def linsert(
+        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
+    ) -> int: ...
+    def llen(self, name: _Key) -> int: ...
+    def lpop(self, name, count: int | None = None): ...
+    def lpush(self, name: _Value, *values: _Value) -> int: ...
+    def lpushx(self, name, value): ...
+    def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ...
+    def lrem(self, name: _Key, count: int, value: _Value) -> int: ...
+    def lset(self, name: _Key, index: int, value: _Value) -> bool: ...
+    def ltrim(self, name: _Key, start: int, end: int) -> bool: ...
+    def rpop(self, name, count: int | None = None): ...
+    def rpoplpush(self, src, dst): ...
+    def rpush(self, name: _Value, *values: _Value) -> int: ...
+    def rpushx(self, name, value): ...
+    def lpos(
+        self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None
+    ): ...
+    @overload
+    def sort(
+        self,
+        name: _Key,
+        start: int | None = None,
+        num: int | None = None,
+        by: _Key | None = None,
+        get: _Key | Sequence[_Key] | None = None,
+        desc: bool = False,
+        alpha: bool = False,
+        store: None = None,
+        groups: bool = False,
+    ) -> list[_StrType]: ...
+    @overload
+    def sort(
+        self,
+        name: _Key,
+        start: int | None = None,
+        num: int | None = None,
+        by: _Key | None = None,
+        get: _Key | Sequence[_Key] | None = None,
+        desc: bool = False,
+        alpha: bool = False,
+        *,
+        store: _Key,
+        groups: bool = False,
+    ) -> int: ...
+    @overload
+    def sort(
+        self,
+        name: _Key,
+        start: int | None,
+        num: int | None,
+        by: _Key | None,
+        get: _Key | Sequence[_Key] | None,
+        desc: bool,
+        alpha: bool,
+        store: _Key,
+        groups: bool = False,
+    ) -> int: ...
+
+class AsyncListCommands(Generic[_StrType]):
+    @overload
+    async def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
+    @overload
+    async def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
+    @overload
+    async def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = 0) -> tuple[_StrType, _StrType]: ...
+    @overload
+    async def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ...
+    async def brpoplpush(self, src, dst, timeout: int | None = 0): ...
+    async def lindex(self, name: _Key, index: int | str) -> _StrType | None: ...
+    async def linsert(
+        self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value
+    ) -> int: ...
+    async def llen(self, name: _Key) -> int: ...
+    async def lpop(self, name, count: int | None = None): ...
+    async def lpush(self, name: _Value, *values: _Value) -> int: ...
+    async def lpushx(self, name, value): ...
+    async def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ...
+    async def lrem(self, name: _Key, count: int, value: _Value) -> int: ...
+    async def lset(self, name: _Key, index: int, value: _Value) -> bool: ...
+    async def ltrim(self, name: _Key, start: int, end: int) -> bool: ...
+    async def rpop(self, name, count: int | None = None): ...
+    async def rpoplpush(self, src, dst): ...
+    async def rpush(self, name: _Value, *values: _Value) -> int: ...
+    async def rpushx(self, name, value): ...
+    async def lpos(
+        self, name, value, rank: Incomplete | None = None, count: Incomplete | None = None, maxlen: Incomplete | None = None
+    ): ...
+    @overload
+    async def sort(
+        self,
+        name: _Key,
+        start: int | None = None,
+        num: int | None = None,
+        by: _Key | None = None,
+        get: _Key | Sequence[_Key] | None = None,
+        desc: bool = False,
+        alpha: bool = False,
+        store: None = None,
+        groups: bool = False,
+    ) -> list[_StrType]: ...
+    @overload
+    async def sort(
+        self,
+        name: _Key,
+        start: int | None = None,
+        num: int | None = None,
+        by: _Key | None = None,
+        get: _Key | Sequence[_Key] | None = None,
+        desc: bool = False,
+        alpha: bool = False,
+        *,
+        store: _Key,
+        groups: bool = False,
+    ) -> int: ...
+    @overload
+    async def sort(
+        self,
+        name: _Key,
+        start: int | None,
+        num: int | None,
+        by: _Key | None,
+        get: _Key | Sequence[_Key] | None,
+        desc: bool,
+        alpha: bool,
+        store: _Key,
+        groups: bool = False,
+    ) -> int: ...
+
+class ScanCommands(Generic[_StrType]):
+    def scan(
+        self,
+        cursor: int = 0,
+        match: _Key | None = None,
+        count: int | None = None,
+        _type: str | None = None,
+        **kwargs: _CommandOptions,
+    ) -> tuple[int, list[_StrType]]: ...
+    def scan_iter(
+        self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions
+    ) -> Iterator[_StrType]: ...
+    def sscan(
+        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
+    ) -> tuple[int, list[_StrType]]: ...
+    def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[_StrType]: ...
+    def hscan(
+        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None,
+        no_values: bool | None = None,
+    ) -> tuple[int, dict[_StrType, _StrType]]: ...
+    def hscan_iter(
+        self, name: _Key, match: _Key | None = None, count: int | None = None,
+        no_values: bool | None = None,
+    ) -> Iterator[tuple[_StrType, _StrType]]: ...
+    @overload
+    def zscan(
+        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
+    ) -> tuple[int, list[tuple[_StrType, float]]]: ...
+    @overload
+    def zscan(
+        self,
+        name: _Key,
+        cursor: int = 0,
+        match: _Key | None = None,
+        count: int | None = None,
+        *,
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
+    @overload
+    def zscan(
+        self,
+        name: _Key,
+        cursor: int,
+        match: _Key | None,
+        count: int | None,
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
+    @overload
+    def zscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> Iterator[tuple[_StrType, float]]: ...
+    @overload
+    def zscan_iter(
+        self,
+        name: _Key,
+        match: _Key | None = None,
+        count: int | None = None,
+        *,
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    def zscan_iter(
+        self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn]
+    ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+
+class AsyncScanCommands(Generic[_StrType]):
+    async def scan(
+        self,
+        cursor: int = 0,
+        match: _Key | None = None,
+        count: int | None = None,
+        _type: str | None = None,
+        **kwargs: _CommandOptions,
+    ) -> tuple[int, list[_StrType]]: ...
+    def scan_iter(
+        self, match: _Key | None = None, count: int | None = None, _type: str | None = None, **kwargs: _CommandOptions
+    ) -> AsyncIterator[_StrType]: ...
+    async def sscan(
+        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
+    ) -> tuple[int, list[_StrType]]: ...
+    def sscan_iter(self, name: _Key, match: _Key | None = None, count: int | None = None) -> AsyncIterator[_StrType]: ...
+    async def hscan(
+        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None,
+        no_values: bool | None = None,
+    ) -> tuple[int, dict[_StrType, _StrType]]: ...
+    def hscan_iter(
+        self, name: _Key, match: _Key | None = None, count: int | None = None,
+        no_values: bool | None = None,
+    ) -> AsyncIterator[tuple[_StrType, _StrType]]: ...
+    @overload
+    async def zscan(
+        self, name: _Key, cursor: int = 0, match: _Key | None = None, count: int | None = None
+    ) -> tuple[int, list[tuple[_StrType, float]]]: ...
+    @overload
+    async def zscan(
+        self,
+        name: _Key,
+        cursor: int = 0,
+        match: _Key | None = None,
+        count: int | None = None,
+        *,
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
+    @overload
+    async def zscan(
+        self,
+        name: _Key,
+        cursor: int,
+        match: _Key | None,
+        count: int | None,
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ...
+    @overload
+    def zscan_iter(
+        self, name: _Key, match: _Key | None = None, count: int | None = None
+    ) -> AsyncIterator[tuple[_StrType, float]]: ...
+    @overload
+    def zscan_iter(
+        self,
+        name: _Key,
+        match: _Key | None = None,
+        count: int | None = None,
+        *,
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    def zscan_iter(
+        self, name: _Key, match: _Key | None, count: int | None, score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn]
+    ) -> AsyncIterator[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+
+class SetCommands(Generic[_StrType]):
+    def sadd(self, name: _Key, *values: _Value) -> int: ...
+    def scard(self, name: _Key) -> int: ...
+    def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
+    def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
+    def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
+    def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
+    def sismember(self, name: _Key, value: _Value) -> bool: ...
+    def smembers(self, name: _Key) -> builtins.list[_StrType]: ...
+    def smismember(self, name, values, *args): ...
+    def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ...
+    @overload
+    def spop(self, name: _Key, count: None = None) -> _Value | None: ...
+    @overload
+    def spop(self, name: _Key, count: int) -> list[_Value]: ...
+    @overload
+    def srandmember(self, name: _Key, number: None = None) -> _Value | None: ...
+    @overload
+    def srandmember(self, name: _Key, number: int) -> list[_Value]: ...
+    def srem(self, name: _Key, *values: _Value) -> int: ...
+    def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
+    def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
+
+class AsyncSetCommands(Generic[_StrType]):
+    async def sadd(self, name: _Key, *values: _Value) -> int: ...
+    async def scard(self, name: _Key) -> int: ...
+    async def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
+    async def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
+    async def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
+    async def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
+    async def sismember(self, name: _Key, value: _Value) -> bool: ...
+    async def smembers(self, name: _Key) -> builtins.list[_StrType]: ...
+    async def smismember(self, name, values, *args): ...
+    async def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ...
+    @overload
+    async def spop(self, name: _Key, count: None = None) -> _Value | None: ...
+    @overload
+    async def spop(self, name: _Key, count: int) -> list[_Value]: ...
+    @overload
+    async def srandmember(self, name: _Key, number: None = None) -> _Value | None: ...
+    @overload
+    async def srandmember(self, name: _Key, number: int) -> list[_Value]: ...
+    async def srem(self, name: _Key, *values: _Value) -> int: ...
+    async def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ...
+    async def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ...
+
+class StreamCommands:
+    def xack(self, name, groupname, *ids): ...
+    def xadd(
+        self,
+        name: KeyT,
+        # Only accepts dict objects, but for variance reasons we use a looser annotation
+        fields: SupportsItems[bytes | memoryview | str | float, Any],
+        id: str | int | bytes | memoryview = "*",
+        maxlen=None,
+        approximate: bool = True,
+        nomkstream: bool = False,
+        minid: Incomplete | None = None,
+        limit: Incomplete | None = None,
+    ): ...
+    def xautoclaim(
+        self,
+        name,
+        groupname,
+        consumername,
+        min_idle_time,
+        start_id: StreamIdT = "0-0",
+        count: Incomplete | None = None,
+        justid: bool = False,
+    ): ...
+    def xclaim(
+        self,
+        name,
+        groupname,
+        consumername,
+        min_idle_time,
+        message_ids,
+        idle=None,
+        time=None,
+        retrycount=None,
+        force=False,
+        justid=False,
+    ): ...
+    def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ...
+    def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None): ...
+    def xgroup_delconsumer(self, name, groupname, consumername): ...
+    def xgroup_destroy(self, name, groupname): ...
+    def xgroup_createconsumer(self, name, groupname, consumername): ...
+    def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ...
+    def xinfo_consumers(self, name, groupname): ...
+    def xinfo_groups(self, name): ...
+    def xinfo_stream(self, name, full: bool = False): ...
+    def xlen(self, name: _Key) -> int: ...
+    def xpending(self, name, groupname): ...
+    def xpending_range(
+        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
+    ): ...
+    def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ...
+    def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ...
+    def xreadgroup(
+        self,
+        groupname,
+        consumername,
+        streams,
+        count: Incomplete | None = None,
+        block: Incomplete | None = None,
+        noack: bool = False,
+    ): ...
+    def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ...
+    def xtrim(
+        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
+    ): ...
+
+class AsyncStreamCommands:
+    async def xack(self, name, groupname, *ids): ...
+    async def xadd(
+        self,
+        name: KeyT,
+        # Only accepts dict objects, but for variance reasons we use a looser annotation
+        fields: SupportsItems[bytes | memoryview | str | float, Any],
+        id: str | int | bytes | memoryview = "*",
+        maxlen=None,
+        approximate: bool = True,
+        nomkstream: bool = False,
+        minid: Incomplete | None = None,
+        limit: Incomplete | None = None,
+    ): ...
+    async def xautoclaim(
+        self,
+        name,
+        groupname,
+        consumername,
+        min_idle_time,
+        start_id: StreamIdT = "0-0",
+        count: Incomplete | None = None,
+        justid: bool = False,
+    ): ...
+    async def xclaim(
+        self,
+        name,
+        groupname,
+        consumername,
+        min_idle_time,
+        message_ids,
+        idle=None,
+        time=None,
+        retrycount=None,
+        force=False,
+        justid=False,
+    ): ...
+    async def xdel(self, name: KeyT, *ids: str | int | bytes | memoryview): ...
+    async def xgroup_create(self, name, groupname, id: str | int = "$", mkstream: bool = False, entries_read: int | None = None): ...
+    async def xgroup_delconsumer(self, name, groupname, consumername): ...
+    async def xgroup_destroy(self, name, groupname): ...
+    async def xgroup_createconsumer(self, name, groupname, consumername): ...
+    async def xgroup_setid(self, name, groupname, id, entries_read: int | None = None): ...
+    async def xinfo_consumers(self, name, groupname): ...
+    async def xinfo_groups(self, name): ...
+    async def xinfo_stream(self, name, full: bool = False): ...
+    async def xlen(self, name: _Key) -> int: ...
+    async def xpending(self, name, groupname): ...
+    async def xpending_range(
+        self, name: _Key, groupname, min, max, count: int, consumername: Incomplete | None = None, idle: int | None = None
+    ): ...
+    async def xrange(self, name, min: str = "-", max: str = "+", count: Incomplete | None = None): ...
+    async def xread(self, streams, count: Incomplete | None = None, block: Incomplete | None = None): ...
+    async def xreadgroup(
+        self,
+        groupname,
+        consumername,
+        streams,
+        count: Incomplete | None = None,
+        block: Incomplete | None = None,
+        noack: bool = False,
+    ): ...
+    async def xrevrange(self, name, max: str = "+", min: str = "-", count: Incomplete | None = None): ...
+    async def xtrim(
+        self, name, maxlen: int | None = None, approximate: bool = True, minid: Incomplete | None = None, limit: int | None = None
+    ): ...
+
+class SortedSetCommands(Generic[_StrType]):
+    def zadd(
+        self,
+        name: _Key,
+        mapping: Mapping[_Key, _Value],
+        nx: bool = False,
+        xx: bool = False,
+        ch: bool = False,
+        incr: bool = False,
+        gt: Incomplete | None = False,
+        lt: Incomplete | None = False,
+    ) -> int: ...
+    def zcard(self, name: _Key) -> int: ...
+    def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    def zdiff(self, keys, withscores: bool = False): ...
+    def zdiffstore(self, dest, keys): ...
+    def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ...
+    def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
+    def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ...
+    def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
+    def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
+    def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ...
+    @overload
+    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
+    @overload
+    def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
+    @overload
+    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
+    @overload
+    def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
+    @overload
+    def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], float] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], float] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    def zrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool = False,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[_StrType]: ...
+    @overload
+    def zrevrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    def zrevrange(  # type: ignore[overload-overlap]
+        self, name: _Key, start: int, end: int, withscores: Literal[True]
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    def zrevrange(
+        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ...
+    ) -> list[_StrType]: ...
+    def zrangestore(
+        self,
+        dest,
+        name,
+        start,
+        end,
+        byscore: bool = False,
+        bylex: bool = False,
+        desc: bool = False,
+        offset: Incomplete | None = None,
+        num: Incomplete | None = None,
+    ): ...
+    def zrangebylex(
+        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None
+    ) -> list[_StrType]: ...
+    def zrevrangebylex(
+        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None
+    ) -> list[_StrType]: ...
+    @overload
+    def zrangebyscore(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        min: _Value,
+        max: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    def zrangebyscore(  # type: ignore[overload-overlap]
+        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    def zrangebyscore(
+        self,
+        name: _Key,
+        min: _Value,
+        max: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> list[_StrType]: ...
+    @overload
+    def zrevrangebyscore(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        max: _Value,
+        min: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    def zrevrangebyscore(  # type: ignore[overload-overlap]
+        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    def zrevrangebyscore(
+        self,
+        name: _Key,
+        max: _Value,
+        min: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> list[_StrType]: ...
+    def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
+    def zrem(self, name: _Key, *values: _Value) -> int: ...
+    def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ...
+    def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
+    def zscore(self, name: _Key, value: _Value) -> float | None: ...
+    def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
+    def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None) -> int: ...
+    def zmscore(self, key, members): ...
+
+class AsyncSortedSetCommands(Generic[_StrType]):
+    async def zadd(
+        self,
+        name: _Key,
+        mapping: Mapping[_Key, _Value],
+        nx: bool = False,
+        xx: bool = False,
+        ch: bool = False,
+        incr: bool = False,
+        gt: Incomplete | None = False,
+        lt: Incomplete | None = False,
+    ) -> int: ...
+    async def zcard(self, name: _Key) -> int: ...
+    async def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    async def zdiff(self, keys, withscores: bool = False): ...
+    async def zdiffstore(self, dest, keys): ...
+    async def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ...
+    async def zinter(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
+    async def zinterstore(
+        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
+    ) -> int: ...
+    async def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    async def zpopmax(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
+    async def zpopmin(self, name: _Key, count: int | None = None) -> list[tuple[_StrType, float]]: ...
+    async def zrandmember(self, key, count: Incomplete | None = None, withscores: bool = False): ...
+    @overload
+    async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
+    @overload
+    async def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
+    @overload
+    async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = 0) -> tuple[_StrType, _StrType, float]: ...
+    @overload
+    async def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ...
+    @overload
+    async def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    async def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], float] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    async def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool = False,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    async def zrange(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool = False,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], float] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    async def zrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        desc: bool = False,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+        byscore: bool = False,
+        bylex: bool = False,
+        offset: int | None = None,
+        num: int | None = None,
+    ) -> list[_StrType]: ...
+    @overload
+    async def zrevrange(
+        self,
+        name: _Key,
+        start: int,
+        end: int,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    async def zrevrange(  # type: ignore[overload-overlap]
+        self, name: _Key, start: int, end: int, withscores: Literal[True]
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    async def zrevrange(
+        self, name: _Key, start: int, end: int, withscores: bool = False, score_cast_func: Callable[[Any], Any] = ...
+    ) -> list[_StrType]: ...
+    async def zrangestore(
+        self,
+        dest,
+        name,
+        start,
+        end,
+        byscore: bool = False,
+        bylex: bool = False,
+        desc: bool = False,
+        offset: Incomplete | None = None,
+        num: Incomplete | None = None,
+    ): ...
+    async def zrangebylex(
+        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None
+    ) -> list[_StrType]: ...
+    async def zrevrangebylex(
+        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None
+    ) -> list[_StrType]: ...
+    @overload
+    async def zrangebyscore(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        min: _Value,
+        max: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    async def zrangebyscore(  # type: ignore[overload-overlap]
+        self, name: _Key, min: _Value, max: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    async def zrangebyscore(
+        self,
+        name: _Key,
+        min: _Value,
+        max: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> list[_StrType]: ...
+    @overload
+    async def zrevrangebyscore(  # type: ignore[overload-overlap]
+        self,
+        name: _Key,
+        max: _Value,
+        min: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        *,
+        withscores: Literal[True],
+        score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn],
+    ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ...
+    @overload
+    async def zrevrangebyscore(  # type: ignore[overload-overlap]
+        self, name: _Key, max: _Value, min: _Value, start: int | None = None, num: int | None = None, *, withscores: Literal[True]
+    ) -> list[tuple[_StrType, float]]: ...
+    @overload
+    async def zrevrangebyscore(
+        self,
+        name: _Key,
+        max: _Value,
+        min: _Value,
+        start: int | None = None,
+        num: int | None = None,
+        withscores: bool = False,
+        score_cast_func: Callable[[_StrType], Any] = ...,
+    ) -> list[_StrType]: ...
+    async def zrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
+    async def zrem(self, name: _Key, *values: _Value) -> int: ...
+    async def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    async def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ...
+    async def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ...
+    async def zrevrank(self, name: _Key, value: _Value, withscore: bool = False) -> int | None: ...
+    async def zscore(self, name: _Key, value: _Value) -> float | None: ...
+    async def zunion(self, keys, aggregate: Incomplete | None = None, withscores: bool = False): ...
+    async def zunionstore(
+        self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = None
+    ) -> int: ...
+    async def zmscore(self, key, members): ...
+
+class HyperlogCommands:
+    def pfadd(self, name: _Key, *values: _Value) -> int: ...
+    def pfcount(self, name: _Key) -> int: ...
+    def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ...
+
+class AsyncHyperlogCommands:
+    async def pfadd(self, name: _Key, *values: _Value) -> int: ...
+    async def pfcount(self, name: _Key) -> int: ...
+    async def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ...
+
+class HashCommands(Generic[_StrType]):
+    def hdel(self, name: _Key, *keys: _Key) -> int: ...
+    def hexists(self, name: _Key, key: _Key) -> bool: ...
+    def hget(self, name: _Key, key: _Key) -> _StrType | None: ...
+    def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ...
+    def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ...
+    def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ...
+    def hkeys(self, name: _Key) -> list[_StrType]: ...
+    def hlen(self, name: _Key) -> int: ...
+    @overload
+    def hset(
+        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
+    ) -> int: ...
+    @overload
+    def hset(
+        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
+    ) -> int: ...
+    @overload
+    def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ...
+    def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ...
+    def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ...
+    def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
+    def hvals(self, name: _Key) -> list[_StrType]: ...
+    def hstrlen(self, name, key): ...
+
+class AsyncHashCommands(Generic[_StrType]):
+    async def hdel(self, name: _Key, *keys: _Key) -> int: ...
+    async def hexists(self, name: _Key, key: _Key) -> bool: ...
+    async def hget(self, name: _Key, key: _Key) -> _StrType | None: ...
+    async def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ...
+    async def hincrby(self, name: _Key, key: _Key, amount: int = 1) -> int: ...
+    async def hincrbyfloat(self, name: _Key, key: _Key, amount: float = 1.0) -> float: ...
+    async def hkeys(self, name: _Key) -> list[_StrType]: ...
+    async def hlen(self, name: _Key) -> int: ...
+    @overload
+    async def hset(
+        self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = None, items: Incomplete | None = None
+    ) -> int: ...
+    @overload
+    async def hset(
+        self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value], items: Incomplete | None = None
+    ) -> int: ...
+    @overload
+    async def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value], items: Incomplete | None = None) -> int: ...
+    async def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ...
+    async def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ...
+    async def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ...
+    async def hvals(self, name: _Key) -> list[_StrType]: ...
+    async def hstrlen(self, name, key): ...
+
+class AsyncScript:
+    def __init__(self, registered_client: AsyncValkey[Any], script: ScriptTextT) -> None: ...
+    async def __call__(
+        self, keys: Sequence[KeyT] | None = None, args: Iterable[EncodableT] | None = None, client: AsyncValkey[Any] | None = None
+    ): ...
+
+class PubSubCommands:
+    def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ...
+    def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ...
+    def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ...
+    def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ...
+
+class AsyncPubSubCommands:
+    async def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ...
+    async def pubsub_channels(self, pattern: _Key = "*", **kwargs: _CommandOptions) -> list[str]: ...
+    async def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ...
+    async def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ...
+
+class ScriptCommands(Generic[_StrType]):
+    def eval(self, script, numkeys, *keys_and_args): ...
+    def evalsha(self, sha, numkeys, *keys_and_args): ...
+    def script_exists(self, *args): ...
+    def script_debug(self, *args): ...
+    def script_flush(self, sync_type: Incomplete | None = None): ...
+    def script_kill(self): ...
+    def script_load(self, script): ...
+    def register_script(self, script: str | _StrType) -> Script: ...
+
+class AsyncScriptCommands(Generic[_StrType]):
+    async def eval(self, script, numkeys, *keys_and_args): ...
+    async def evalsha(self, sha, numkeys, *keys_and_args): ...
+    async def script_exists(self, *args): ...
+    async def script_debug(self, *args): ...
+    async def script_flush(self, sync_type: Incomplete | None = None): ...
+    async def script_kill(self): ...
+    async def script_load(self, script): ...
+    def register_script(self, script: ScriptTextT) -> AsyncScript: ...
+
+class GeoCommands:
+    def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ...
+    def geodist(self, name, place1, place2, unit: Incomplete | None = None): ...
+    def geohash(self, name, *values): ...
+    def geopos(self, name, *values): ...
+    def georadius(
+        self,
+        name,
+        longitude,
+        latitude,
+        radius,
+        unit: Incomplete | None = None,
+        withdist: bool = False,
+        withcoord: bool = False,
+        withhash: bool = False,
+        count: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        store: Incomplete | None = None,
+        store_dist: Incomplete | None = None,
+        any: bool = False,
+    ): ...
+    def georadiusbymember(
+        self,
+        name,
+        member,
+        radius,
+        unit: Incomplete | None = None,
+        withdist: bool = False,
+        withcoord: bool = False,
+        withhash: bool = False,
+        count: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        store: Incomplete | None = None,
+        store_dist: Incomplete | None = None,
+        any: bool = False,
+    ): ...
+    def geosearch(
+        self,
+        name,
+        member: Incomplete | None = None,
+        longitude: Incomplete | None = None,
+        latitude: Incomplete | None = None,
+        unit: str = "m",
+        radius: Incomplete | None = None,
+        width: Incomplete | None = None,
+        height: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        count: Incomplete | None = None,
+        any: bool = False,
+        withcoord: bool = False,
+        withdist: bool = False,
+        withhash: bool = False,
+    ): ...
+    def geosearchstore(
+        self,
+        dest,
+        name,
+        member: Incomplete | None = None,
+        longitude: Incomplete | None = None,
+        latitude: Incomplete | None = None,
+        unit: str = "m",
+        radius: Incomplete | None = None,
+        width: Incomplete | None = None,
+        height: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        count: Incomplete | None = None,
+        any: bool = False,
+        storedist: bool = False,
+    ): ...
+
+class AsyncGeoCommands:
+    async def geoadd(self, name, values, nx: bool = False, xx: bool = False, ch: bool = False): ...
+    async def geodist(self, name, place1, place2, unit: Incomplete | None = None): ...
+    async def geohash(self, name, *values): ...
+    async def geopos(self, name, *values): ...
+    async def georadius(
+        self,
+        name,
+        longitude,
+        latitude,
+        radius,
+        unit: Incomplete | None = None,
+        withdist: bool = False,
+        withcoord: bool = False,
+        withhash: bool = False,
+        count: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        store: Incomplete | None = None,
+        store_dist: Incomplete | None = None,
+        any: bool = False,
+    ): ...
+    async def georadiusbymember(
+        self,
+        name,
+        member,
+        radius,
+        unit: Incomplete | None = None,
+        withdist: bool = False,
+        withcoord: bool = False,
+        withhash: bool = False,
+        count: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        store: Incomplete | None = None,
+        store_dist: Incomplete | None = None,
+        any: bool = False,
+    ): ...
+    async def geosearch(
+        self,
+        name,
+        member: Incomplete | None = None,
+        longitude: Incomplete | None = None,
+        latitude: Incomplete | None = None,
+        unit: str = "m",
+        radius: Incomplete | None = None,
+        width: Incomplete | None = None,
+        height: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        count: Incomplete | None = None,
+        any: bool = False,
+        withcoord: bool = False,
+        withdist: bool = False,
+        withhash: bool = False,
+    ): ...
+    async def geosearchstore(
+        self,
+        dest,
+        name,
+        member: Incomplete | None = None,
+        longitude: Incomplete | None = None,
+        latitude: Incomplete | None = None,
+        unit: str = "m",
+        radius: Incomplete | None = None,
+        width: Incomplete | None = None,
+        height: Incomplete | None = None,
+        sort: Incomplete | None = None,
+        count: Incomplete | None = None,
+        any: bool = False,
+        storedist: bool = False,
+    ): ...
+
+class ModuleCommands:
+    def module_load(self, path, *args): ...
+    def module_loadex(self, path: str, options: list[str] | None = None, args: list[str] | None = None): ...
+    def module_unload(self, name): ...
+    def module_list(self): ...
+    def command_info(self): ...
+    def command_count(self): ...
+    def command_list(self, module: str | None = None, category: str | None = None, pattern: str | None = None): ...
+    def command_getkeysandflags(self, *args: str): ...
+    def command_getkeys(self, *args): ...
+    def command(self): ...
+
+class Script:
+    def __init__(self, registered_client, script) -> None: ...
+    def __call__(self, keys=[], args=[], client: Incomplete | None = None): ...
+
+class BitFieldOperation:
+    def __init__(self, client, key, default_overflow: Incomplete | None = None): ...
+    def reset(self) -> None: ...
+    def overflow(self, overflow): ...
+    def incrby(self, fmt, offset, increment, overflow: Incomplete | None = None): ...
+    def get(self, fmt, offset): ...
+    def set(self, fmt, offset, value): ...
+    @property
+    def command(self): ...
+    def execute(self): ...
+
+class AsyncModuleCommands(ModuleCommands):
+    async def module_loadex(self, path: str, options: list[str] | None = None, args: list[str] | None = None): ...
+    async def command_info(self) -> None: ...
+    async def command_list(self, module: str | None = None, category: str | None = None, pattern: str | None = None): ...
+    async def command_getkeysandflags(self, *args: str): ...
+
+class ClusterCommands:
+    def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ...
+    def readwrite(self, **kwargs: _CommandOptions) -> bool: ...
+    def readonly(self, **kwargs: _CommandOptions) -> bool: ...
+
+class AsyncClusterCommands:
+    async def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ...
+    async def readwrite(self, **kwargs: _CommandOptions) -> bool: ...
+    async def readonly(self, **kwargs: _CommandOptions) -> bool: ...
+
+class FunctionCommands:
+    def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ...
+    def function_delete(self, library: str) -> Awaitable[str] | str: ...
+    def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ...
+    def function_list(self, library: str | None = "*", withcode: bool | None = False) -> Awaitable[list[Any]] | list[Any]: ...
+    def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
+    def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
+    def function_dump(self) -> Awaitable[str] | str: ...
+    def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ...
+    def function_kill(self) -> Awaitable[str] | str: ...
+    def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ...
+
+class AsyncFunctionCommands:
+    async def function_load(self, code: str, replace: bool | None = False) -> Awaitable[str] | str: ...
+    async def function_delete(self, library: str) -> Awaitable[str] | str: ...
+    async def function_flush(self, mode: str = "SYNC") -> Awaitable[str] | str: ...
+    async def function_list(
+        self, library: str | None = "*", withcode: bool | None = False
+    ) -> Awaitable[list[Any]] | list[Any]: ...
+    async def fcall(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
+    async def fcall_ro(self, function, numkeys: int, *keys_and_args: list[Any] | None) -> Awaitable[str] | str: ...
+    async def function_dump(self) -> Awaitable[str] | str: ...
+    async def function_restore(self, payload: str, policy: str | None = "APPEND") -> Awaitable[str] | str: ...
+    async def function_kill(self) -> Awaitable[str] | str: ...
+    async def function_stats(self) -> Awaitable[list[Any]] | list[Any]: ...
+
+class DataAccessCommands(
+    BasicKeyCommands[_StrType],
+    HyperlogCommands,
+    HashCommands[_StrType],
+    GeoCommands,
+    ListCommands[_StrType],
+    ScanCommands[_StrType],
+    SetCommands[_StrType],
+    StreamCommands,
+    SortedSetCommands[_StrType],
+): ...
+class AsyncDataAccessCommands(
+    AsyncBasicKeyCommands[_StrType],
+    AsyncHyperlogCommands,
+    AsyncHashCommands[_StrType],
+    AsyncGeoCommands,
+    AsyncListCommands[_StrType],
+    AsyncScanCommands[_StrType],
+    AsyncSetCommands[_StrType],
+    AsyncStreamCommands,
+    AsyncSortedSetCommands[_StrType],
+): ...
+class CoreCommands(
+    ACLCommands[_StrType],
+    ClusterCommands,
+    DataAccessCommands[_StrType],
+    ManagementCommands,
+    ModuleCommands,
+    PubSubCommands,
+    ScriptCommands[_StrType],
+): ...
+class AsyncCoreCommands(
+    AsyncACLCommands[_StrType],
+    AsyncClusterCommands,
+    AsyncDataAccessCommands[_StrType],
+    AsyncManagementCommands,
+    AsyncModuleCommands,
+    AsyncPubSubCommands,
+    AsyncScriptCommands[_StrType],
+    AsyncFunctionCommands,
+): ...
diff --git a/valkey/commands/graph/__init__.pyi b/valkey/commands/graph/__init__.pyi
new file mode 100644
index 00000000..222db4ef
--- /dev/null
+++ b/valkey/commands/graph/__init__.pyi
@@ -0,0 +1,45 @@
+from typing import Any
+
+from .commands import GraphCommands as GraphCommands
+from .edge import Edge as Edge
+from .node import Node as Node
+from .path import Path as Path
+
+class Graph(GraphCommands):
+    NAME: Any
+    client: Any
+    execute_command: Any
+    nodes: Any
+    edges: Any
+    version: int
+    def __init__(self, client, name=...) -> None: ...
+    @property
+    def name(self): ...
+    def get_label(self, idx): ...
+    def get_relation(self, idx): ...
+    def get_property(self, idx): ...
+    def add_node(self, node) -> None: ...
+    def add_edge(self, edge) -> None: ...
+    def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ...
+    def labels(self): ...
+    def relationship_types(self): ...
+    def property_keys(self): ...
+
+
+class AsyncGraph(GraphCommands):
+    NAME: Any
+    client: Any
+    execute_command: Any
+    nodes: Any
+    edges: Any
+    version: int
+    def __init__(self, client, name=...) -> None: ...
+    async def get_label(self, idx): ...
+    async def get_relation(self, idx): ...
+    async def get_property(self, idx): ...
+    async def add_node(self, node) -> None: ...
+    async def add_edge(self, edge) -> None: ...
+    async def call_procedure(self, procedure, *args, read_only: bool = False, **kwagrs): ...
+    async def labels(self): ...
+    async def relationship_types(self): ...
+    async def property_keys(self): ...
diff --git a/valkey/commands/graph/commands.pyi b/valkey/commands/graph/commands.pyi
new file mode 100644
index 00000000..b57418dd
--- /dev/null
+++ b/valkey/commands/graph/commands.pyi
@@ -0,0 +1,25 @@
+from _typeshed import Incomplete
+from typing import Any
+
+class GraphCommands:
+    def commit(self): ...
+    version: Any
+    def query(
+        self,
+        q,
+        params: Incomplete | None = None,
+        timeout: Incomplete | None = None,
+        read_only: bool = False,
+        profile: bool = False,
+    ): ...
+    def merge(self, pattern): ...
+    def delete(self): ...
+    nodes: Any
+    edges: Any
+    def flush(self) -> None: ...
+    def explain(self, query, params: Incomplete | None = None): ...
+    def bulk(self, **kwargs) -> None: ...
+    def profile(self, query): ...
+    def slowlog(self): ...
+    def config(self, name, value: Incomplete | None = None, set: bool = False): ...
+    def list_keys(self): ...
diff --git a/valkey/commands/graph/edge.pyi b/valkey/commands/graph/edge.pyi
new file mode 100644
index 00000000..3bd36b65
--- /dev/null
+++ b/valkey/commands/graph/edge.pyi
@@ -0,0 +1,14 @@
+from _typeshed import Incomplete
+from typing import Any
+
+class Edge:
+    id: Any
+    relation: Any
+    properties: Any
+    src_node: Any
+    dest_node: Any
+    def __init__(
+        self, src_node, relation, dest_node, edge_id: Incomplete | None = None, properties: Incomplete | None = None
+    ) -> None: ...
+    def to_string(self): ...
+    def __eq__(self, rhs): ...
diff --git a/valkey/commands/graph/exceptions.pyi b/valkey/commands/graph/exceptions.pyi
new file mode 100644
index 00000000..6069e055
--- /dev/null
+++ b/valkey/commands/graph/exceptions.pyi
@@ -0,0 +1,5 @@
+from typing import Any
+
+class VersionMismatchException(Exception):
+    version: Any
+    def __init__(self, version) -> None: ...
diff --git a/valkey/commands/graph/execution_plan.py b/valkey/commands/graph/execution_plan.py
index 179a80cc..0f07427c 100644
--- a/valkey/commands/graph/execution_plan.py
+++ b/valkey/commands/graph/execution_plan.py
@@ -166,10 +166,10 @@ def _create_operation(args):
             args.pop(0)
             if len(args) > 0 and "Records produced" in args[-1]:
                 records_produced = int(
-                    re.search("Records produced: (\\d+)", args[-1]).group(1)
+                    re.search("Records produced: (\\d+)", args[-1]).group(1)  # type: ignore[union-attr]
                 )
                 execution_time = float(
-                    re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1)
+                    re.search("Execution time: (\\d+.\\d+) ms", args[-1]).group(1)  # type: ignore[union-attr]
                 )
                 profile_stats = ProfileStats(records_produced, execution_time)
                 args.pop(-1)
@@ -194,7 +194,7 @@ def _create_operation(args):
                 # if the operation is child of the current operation
                 # add it as child and set as current operation
                 child = _create_operation(current_op.split("|"))
-                current.append_child(child)
+                current.append_child(child)  # type: ignore[union-attr]
                 stack.append(current)
                 current = child
                 level += 1
diff --git a/valkey/commands/graph/node.pyi b/valkey/commands/graph/node.pyi
new file mode 100644
index 00000000..e7a65537
--- /dev/null
+++ b/valkey/commands/graph/node.pyi
@@ -0,0 +1,18 @@
+from _typeshed import Incomplete
+from typing import Any
+
+class Node:
+    id: Any
+    alias: Any
+    label: Any
+    labels: Any
+    properties: Any
+    def __init__(
+        self,
+        node_id: Incomplete | None = None,
+        alias: Incomplete | None = None,
+        label: str | list[str] | None = None,
+        properties: Incomplete | None = None,
+    ) -> None: ...
+    def to_string(self): ...
+    def __eq__(self, rhs): ...
diff --git a/valkey/commands/graph/path.pyi b/valkey/commands/graph/path.pyi
new file mode 100644
index 00000000..69106f89
--- /dev/null
+++ b/valkey/commands/graph/path.pyi
@@ -0,0 +1,18 @@
+from typing import Any
+
+class Path:
+    append_type: Any
+    def __init__(self, nodes, edges) -> None: ...
+    @classmethod
+    def new_empty_path(cls): ...
+    def nodes(self): ...
+    def edges(self): ...
+    def get_node(self, index): ...
+    def get_relationship(self, index): ...
+    def first_node(self): ...
+    def last_node(self): ...
+    def edge_count(self): ...
+    def nodes_count(self): ...
+    def add_node(self, node): ...
+    def add_edge(self, edge): ...
+    def __eq__(self, other): ...
diff --git a/valkey/commands/graph/query_result.pyi b/valkey/commands/graph/query_result.pyi
new file mode 100644
index 00000000..d9f8b514
--- /dev/null
+++ b/valkey/commands/graph/query_result.pyi
@@ -0,0 +1,74 @@
+from typing import Any, ClassVar, Literal
+
+LABELS_ADDED: str
+NODES_CREATED: str
+NODES_DELETED: str
+RELATIONSHIPS_DELETED: str
+PROPERTIES_SET: str
+RELATIONSHIPS_CREATED: str
+INDICES_CREATED: str
+INDICES_DELETED: str
+CACHED_EXECUTION: str
+INTERNAL_EXECUTION_TIME: str
+STATS: Any
+
+class ResultSetColumnTypes:
+    COLUMN_UNKNOWN: ClassVar[Literal[0]]
+    COLUMN_SCALAR: ClassVar[Literal[1]]
+    COLUMN_NODE: ClassVar[Literal[2]]
+    COLUMN_RELATION: ClassVar[Literal[3]]
+
+class ResultSetScalarTypes:
+    VALUE_UNKNOWN: ClassVar[Literal[0]]
+    VALUE_NULL: ClassVar[Literal[1]]
+    VALUE_STRING: ClassVar[Literal[2]]
+    VALUE_INTEGER: ClassVar[Literal[3]]
+    VALUE_BOOLEAN: ClassVar[Literal[4]]
+    VALUE_DOUBLE: ClassVar[Literal[5]]
+    VALUE_ARRAY: ClassVar[Literal[6]]
+    VALUE_EDGE: ClassVar[Literal[7]]
+    VALUE_NODE: ClassVar[Literal[8]]
+    VALUE_PATH: ClassVar[Literal[9]]
+    VALUE_MAP: ClassVar[Literal[10]]
+    VALUE_POINT: ClassVar[Literal[11]]
+
+class QueryResult:
+    graph: Any
+    header: Any
+    result_set: Any
+    def __init__(self, graph, response, profile: bool = False) -> None: ...
+    def parse_results(self, raw_result_set) -> None: ...
+    statistics: Any
+    def parse_statistics(self, raw_statistics) -> None: ...
+    def parse_header(self, raw_result_set): ...
+    def parse_records(self, raw_result_set): ...
+    def parse_entity_properties(self, props): ...
+    def parse_string(self, cell): ...
+    def parse_node(self, cell): ...
+    def parse_edge(self, cell): ...
+    def parse_path(self, cell): ...
+    def parse_map(self, cell): ...
+    def parse_point(self, cell): ...
+    def parse_scalar(self, cell): ...
+    def parse_profile(self, response) -> None: ...
+    def is_empty(self): ...
+    @property
+    def labels_added(self): ...
+    @property
+    def nodes_created(self): ...
+    @property
+    def nodes_deleted(self): ...
+    @property
+    def properties_set(self): ...
+    @property
+    def relationships_created(self): ...
+    @property
+    def relationships_deleted(self): ...
+    @property
+    def indices_created(self): ...
+    @property
+    def indices_deleted(self): ...
+    @property
+    def cached_execution(self): ...
+    @property
+    def run_time_ms(self): ...
diff --git a/valkey/commands/helpers.pyi b/valkey/commands/helpers.pyi
new file mode 100644
index 00000000..b4e5ac7f
--- /dev/null
+++ b/valkey/commands/helpers.pyi
@@ -0,0 +1,10 @@
+def list_or_args(keys, args): ...
+def nativestr(x): ...
+def delist(x): ...
+def parse_to_list(response): ...
+def parse_list_to_dict(response): ...
+def parse_to_dict(response): ...
+def random_string(length: int = 10) -> str: ...
+def quote_string(v): ...
+def decode_dict_keys(obj): ...
+def stringify_param_value(value): ...
diff --git a/valkey/commands/json/__init__.pyi b/valkey/commands/json/__init__.pyi
new file mode 100644
index 00000000..f9e8825b
--- /dev/null
+++ b/valkey/commands/json/__init__.pyi
@@ -0,0 +1,15 @@
+from _typeshed import Incomplete
+from typing import Any
+
+from ...client import Pipeline as ClientPipeline
+from .commands import JSONCommands
+
+class JSON(JSONCommands):
+    MODULE_CALLBACKS: dict[str, Any]
+    client: Any
+    execute_command: Any
+    MODULE_VERSION: Incomplete | None
+    def __init__(self, client, version: Incomplete | None = None, decoder=..., encoder=...) -> None: ...
+    def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ...
+
+class Pipeline(JSONCommands, ClientPipeline[Incomplete]): ...  # type: ignore[misc]
diff --git a/valkey/commands/json/commands.pyi b/valkey/commands/json/commands.pyi
new file mode 100644
index 00000000..38d4d4c6
--- /dev/null
+++ b/valkey/commands/json/commands.pyi
@@ -0,0 +1,32 @@
+from _typeshed import Incomplete
+
+class JSONCommands:
+    def arrappend(self, name: str, path: str | None = ".", *args) -> list[int | None]: ...
+    def arrindex(
+        self, name: str, path: str, scalar: int, start: int | None = None, stop: int | None = None
+    ) -> list[int | None]: ...
+    def arrinsert(self, name: str, path: str, index: int, *args) -> list[int | None]: ...
+    def arrlen(self, name: str, path: str | None = ".") -> list[int | None]: ...
+    def arrpop(self, name: str, path: str | None = ".", index: int | None = -1) -> list[str | None]: ...
+    def arrtrim(self, name: str, path: str, start: int, stop: int) -> list[int | None]: ...
+    def type(self, name: str, path: str | None = ".") -> list[str]: ...
+    def resp(self, name: str, path: str | None = ".") -> list[Incomplete]: ...
+    def objkeys(self, name, path="."): ...
+    def objlen(self, name, path="."): ...
+    def numincrby(self, name, path, number): ...
+    def nummultby(self, name, path, number): ...
+    def clear(self, name, path="."): ...
+    def delete(self, key, path="."): ...
+    forget = delete
+    def get(self, name, *args, no_escape: bool = False): ...
+    def mget(self, keys, path): ...
+    def set(self, name, path, obj, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
+    def set_file(self, name, path, file_name, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
+    def set_path(self, json_path, root_folder, nx: bool = False, xx: bool = False, decode_keys: bool = False): ...
+    def strlen(self, name, path: Incomplete | None = None): ...
+    def toggle(self, name, path="."): ...
+    def strappend(self, name, value, path="."): ...
+    def debug(self, subcommand, key: Incomplete | None = None, path="."): ...
+    def jsonget(self, *args, **kwargs): ...
+    def jsonmget(self, *args, **kwargs): ...
+    def jsonset(self, *args, **kwargs): ...
diff --git a/valkey/commands/json/decoders.pyi b/valkey/commands/json/decoders.pyi
new file mode 100644
index 00000000..ccea2438
--- /dev/null
+++ b/valkey/commands/json/decoders.pyi
@@ -0,0 +1,4 @@
+def bulk_of_jsons(d): ...
+def decode_dict_keys(obj): ...
+def unstring(obj): ...
+def decode_list(b): ...
diff --git a/valkey/commands/json/path.pyi b/valkey/commands/json/path.pyi
new file mode 100644
index 00000000..bbc35c4f
--- /dev/null
+++ b/valkey/commands/json/path.pyi
@@ -0,0 +1,5 @@
+class Path:
+    strPath: str
+    @staticmethod
+    def root_path() -> str: ...
+    def __init__(self, path: str) -> None: ...
diff --git a/valkey/commands/parser.pyi b/valkey/commands/parser.pyi
new file mode 100644
index 00000000..f17afa28
--- /dev/null
+++ b/valkey/commands/parser.pyi
@@ -0,0 +1,8 @@
+from valkey.client import AbstractValkey
+from valkey.typing import EncodableT
+
+class CommandsParser:
+    commands: dict[str, str]
+    def __init__(self, valkey_connection: AbstractValkey) -> None: ...
+    def initialize(self, r: AbstractValkey) -> None: ...
+    def get_keys(self, valkey_conn: AbstractValkey, *args: EncodableT) -> list[EncodableT] | None: ...
diff --git a/valkey/commands/redismodules.pyi b/valkey/commands/redismodules.pyi
new file mode 100644
index 00000000..129b2a17
--- /dev/null
+++ b/valkey/commands/redismodules.pyi
@@ -0,0 +1,14 @@
+from .json import JSON
+from .search import Search
+from .timeseries import TimeSeries
+
+class ValkeyModuleCommands:
+    def json(self, encoder=..., decoder=...) -> JSON: ...
+    def ft(self, index_name: str = "idx") -> Search: ...
+    def ts(self) -> TimeSeries: ...
+    def bf(self): ...
+    def cf(self): ...
+    def cms(self): ...
+    def topk(self): ...
+    def tdigest(self): ...
+    def graph(self, index_name: str = "idx"): ...
diff --git a/valkey/commands/search/__init__.pyi b/valkey/commands/search/__init__.pyi
new file mode 100644
index 00000000..4776dfc9
--- /dev/null
+++ b/valkey/commands/search/__init__.pyi
@@ -0,0 +1,40 @@
+from _typeshed import Incomplete
+
+from .commands import SearchCommands
+
+class Search(SearchCommands):
+    class BatchIndexer:
+        def __init__(self, client, chunk_size: int = 1000) -> None: ...
+        def add_document(
+            self,
+            doc_id,
+            nosave: bool = False,
+            score: float = 1.0,
+            payload: Incomplete | None = None,
+            replace: bool = False,
+            partial: bool = False,
+            no_create: bool = False,
+            **fields,
+        ): ...
+        def add_document_hash(self, doc_id, score: float = 1.0, replace: bool = False): ...
+        def commit(self): ...
+
+    def __init__(self, client, index_name: str = "idx") -> None: ...
+
+class AsyncSearch(SearchCommands):
+    class BatchIndexer:
+        def __init__(self, client, chunk_size: int = 1000) -> None: ...
+        async def add_document(
+            self,
+            doc_id,
+            nosave: bool = False,
+            score: float = 1.0,
+            payload: Incomplete | None = None,
+            replace: bool = False,
+            partial: bool = False,
+            no_create: bool = False,
+            **fields,
+        ): ...
+        async def commit(self): ...
+
+    def __init__(self, client, index_name: str = "idx") -> None: ...
diff --git a/valkey/commands/search/aggregation.py b/valkey/commands/search/aggregation.py
index 45172380..6c523216 100644
--- a/valkey/commands/search/aggregation.py
+++ b/valkey/commands/search/aggregation.py
@@ -22,7 +22,7 @@ class Reducer:
     See the `valkeyearch.reducers` module for the actual reducers.
     """
 
-    NAME = None
+    NAME: Union[str, None] = None
 
     def __init__(self, *args: List[str]) -> None:
         self._args = args
diff --git a/valkey/commands/search/aggregation.pyi b/valkey/commands/search/aggregation.pyi
new file mode 100644
index 00000000..f5200332
--- /dev/null
+++ b/valkey/commands/search/aggregation.pyi
@@ -0,0 +1,53 @@
+from typing import Any, ClassVar, Literal, Union
+
+FIELDNAME: Any
+
+class Limit:
+    offset: Any
+    count: Any
+    def __init__(self, offset: int = 0, count: int = 0) -> None: ...
+    def build_args(self): ...
+
+class Reducer:
+    NAME: ClassVar[Union[str, None]]
+    def __init__(self, *args) -> None: ...
+    def alias(self, alias): ...
+    @property
+    def args(self): ...
+
+class SortDirection:
+    DIRSTRING: ClassVar[str | None]
+    field: Any
+    def __init__(self, field) -> None: ...
+
+class Asc(SortDirection):
+    DIRSTRING: ClassVar[Literal["ASC"]]
+
+class Desc(SortDirection):
+    DIRSTRING: ClassVar[Literal["DESC"]]
+
+class AggregateRequest:
+    def __init__(self, query: str = "*") -> None: ...
+    def load(self, *fields): ...
+    def group_by(self, fields, *reducers): ...
+    def apply(self, **kwexpr): ...
+    def limit(self, offset, num): ...
+    def sort_by(self, *fields, **kwargs): ...
+    def filter(self, expressions): ...
+    def with_schema(self): ...
+    def verbatim(self): ...
+    def cursor(self, count: int = 0, max_idle: float = 0.0): ...
+    def build_args(self): ...
+
+class Cursor:
+    cid: Any
+    max_idle: int
+    count: int
+    def __init__(self, cid) -> None: ...
+    def build_args(self): ...
+
+class AggregateResult:
+    rows: Any
+    cursor: Any
+    schema: Any
+    def __init__(self, rows, cursor, schema) -> None: ...
diff --git a/valkey/commands/search/commands.pyi b/valkey/commands/search/commands.pyi
new file mode 100644
index 00000000..f8a2baf3
--- /dev/null
+++ b/valkey/commands/search/commands.pyi
@@ -0,0 +1,111 @@
+from _typeshed import Incomplete
+from collections.abc import Mapping
+from typing import Any, Literal
+from typing_extensions import TypeAlias
+
+from .aggregation import AggregateRequest, AggregateResult, Cursor
+from .query import Query
+from .result import Result
+
+_QueryParams: TypeAlias = Mapping[str, str | float]
+
+NUMERIC: Literal["NUMERIC"]
+
+CREATE_CMD: Literal["FT.CREATE"]
+ALTER_CMD: Literal["FT.ALTER"]
+SEARCH_CMD: Literal["FT.SEARCH"]
+ADD_CMD: Literal["FT.ADD"]
+ADDHASH_CMD: Literal["FT.ADDHASH"]
+DROP_CMD: Literal["FT.DROP"]
+EXPLAIN_CMD: Literal["FT.EXPLAIN"]
+EXPLAINCLI_CMD: Literal["FT.EXPLAINCLI"]
+DEL_CMD: Literal["FT.DEL"]
+AGGREGATE_CMD: Literal["FT.AGGREGATE"]
+PROFILE_CMD: Literal["FT.PROFILE"]
+CURSOR_CMD: Literal["FT.CURSOR"]
+SPELLCHECK_CMD: Literal["FT.SPELLCHECK"]
+DICT_ADD_CMD: Literal["FT.DICTADD"]
+DICT_DEL_CMD: Literal["FT.DICTDEL"]
+DICT_DUMP_CMD: Literal["FT.DICTDUMP"]
+GET_CMD: Literal["FT.GET"]
+MGET_CMD: Literal["FT.MGET"]
+CONFIG_CMD: Literal["FT.CONFIG"]
+TAGVALS_CMD: Literal["FT.TAGVALS"]
+ALIAS_ADD_CMD: Literal["FT.ALIASADD"]
+ALIAS_UPDATE_CMD: Literal["FT.ALIASUPDATE"]
+ALIAS_DEL_CMD: Literal["FT.ALIASDEL"]
+INFO_CMD: Literal["FT.INFO"]
+SUGADD_COMMAND: Literal["FT.SUGADD"]
+SUGDEL_COMMAND: Literal["FT.SUGDEL"]
+SUGLEN_COMMAND: Literal["FT.SUGLEN"]
+SUGGET_COMMAND: Literal["FT.SUGGET"]
+SYNUPDATE_CMD: Literal["FT.SYNUPDATE"]
+SYNDUMP_CMD: Literal["FT.SYNDUMP"]
+
+NOOFFSETS: Literal["NOOFFSETS"]
+NOFIELDS: Literal["NOFIELDS"]
+STOPWORDS: Literal["STOPWORDS"]
+WITHSCORES: Literal["WITHSCORES"]
+FUZZY: Literal["FUZZY"]
+WITHPAYLOADS: Literal["WITHPAYLOADS"]
+
+class SearchCommands:
+    def batch_indexer(self, chunk_size: int = 100): ...
+    def create_index(
+        self,
+        fields,
+        no_term_offsets: bool = False,
+        no_field_flags: bool = False,
+        stopwords: Incomplete | None = None,
+        definition: Incomplete | None = None,
+        max_text_fields: bool = False,  # added in 4.1.1
+        temporary: Incomplete | None = None,  # added in 4.1.1
+        no_highlight: bool = False,  # added in 4.1.1
+        no_term_frequencies: bool = False,  # added in 4.1.1
+        skip_initial_scan: bool = False,  # added in 4.1.1
+    ): ...
+    def alter_schema_add(self, fields): ...
+    def dropindex(self, delete_documents: bool = False): ...
+    def add_document(
+        self,
+        doc_id,
+        nosave: bool = False,
+        score: float = 1.0,
+        payload: Incomplete | None = None,
+        replace: bool = False,
+        partial: bool = False,
+        language: Incomplete | None = None,
+        no_create: bool = False,
+        **fields,
+    ): ...
+    def add_document_hash(self, doc_id, score: float = 1.0, language: Incomplete | None = None, replace: bool = False): ...
+    def delete_document(self, doc_id, conn: Incomplete | None = None, delete_actual_document: bool = False): ...
+    def load_document(self, id): ...
+    def get(self, *ids): ...
+    def info(self): ...
+    def get_params_args(self, query_params: _QueryParams) -> list[Any]: ...
+    def search(self, query: str | Query, query_params: _QueryParams | None = None) -> Result: ...
+    def explain(self, query: str | Query, query_params: _QueryParams | None = None): ...
+    def explain_cli(self, query): ...
+    def aggregate(self, query: AggregateRequest | Cursor, query_params: _QueryParams | None = None) -> AggregateResult: ...
+    def profile(
+        self, query: str | Query | AggregateRequest, limited: bool = False, query_params: Mapping[str, str | float] | None = None
+    ) -> tuple[Incomplete, Incomplete]: ...
+    def spellcheck(
+        self, query, distance: Incomplete | None = None, include: Incomplete | None = None, exclude: Incomplete | None = None
+    ): ...
+    def dict_add(self, name, *terms): ...
+    def dict_del(self, name, *terms): ...
+    def dict_dump(self, name): ...
+    def config_set(self, option: str, value: str) -> bool: ...
+    def config_get(self, option: str) -> dict[str, str]: ...
+    def tagvals(self, tagfield): ...
+    def aliasadd(self, alias): ...
+    def aliasupdate(self, alias): ...
+    def aliasdel(self, alias): ...
+    def sugadd(self, key, *suggestions, **kwargs): ...
+    def suglen(self, key): ...
+    def sugdel(self, key, string): ...
+    def sugget(self, key, prefix, fuzzy: bool = False, num: int = 10, with_scores: bool = False, with_payloads: bool = False): ...
+    def synupdate(self, groupid, skipinitial: bool = False, *terms): ...
+    def syndump(self): ...
diff --git a/valkey/commands/search/field.py b/valkey/commands/search/field.py
index 72907ae4..f9b25b62 100644
--- a/valkey/commands/search/field.py
+++ b/valkey/commands/search/field.py
@@ -1,4 +1,4 @@
-from typing import List
+from typing import List, Union
 
 from valkey import DataError
 
@@ -18,10 +18,10 @@ class Field:
     def __init__(
         self,
         name: str,
-        args: List[str] = None,
+        args: Union[List[str], None] = None,
         sortable: bool = False,
         no_index: bool = False,
-        as_name: str = None,
+        as_name: Union[str, None] = None,
     ):
         if args is None:
             args = []
@@ -63,11 +63,11 @@ def __init__(
         name: str,
         weight: float = 1.0,
         no_stem: bool = False,
-        phonetic_matcher: str = None,
+        phonetic_matcher: Union[str, None] = None,
         withsuffixtrie: bool = False,
         **kwargs,
     ):
-        Field.__init__(self, name, args=[Field.TEXT, Field.WEIGHT, weight], **kwargs)
+        Field.__init__(self, name, args=[Field.TEXT, Field.WEIGHT, weight], **kwargs)  # type: ignore[list-item]
 
         if no_stem:
             Field.append_arg(self, self.NOSTEM)
@@ -148,7 +148,7 @@ class VectorField(Field):
     See https://oss.valkey.com/valkeyearch/Vectors/#vector_fields.
     """
 
-    def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs):
+    def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs):  # type: ignore[type-arg]
         """
         Create Vector Field. Notice that Vector cannot have sortable or no_index tag,
         although it's also a Field.
@@ -180,5 +180,5 @@ def __init__(self, name: str, algorithm: str, attributes: dict, **kwargs):
             attr_li.extend([key, value])
 
         Field.__init__(
-            self, name, args=[Field.VECTOR, algorithm, len(attr_li), *attr_li], **kwargs
+            self, name, args=[Field.VECTOR, algorithm, len(attr_li), *attr_li], **kwargs  # type: ignore[list-item]
         )
diff --git a/valkey/commands/search/query.pyi b/valkey/commands/search/query.pyi
new file mode 100644
index 00000000..eb1846ba
--- /dev/null
+++ b/valkey/commands/search/query.pyi
@@ -0,0 +1,52 @@
+from _typeshed import Incomplete
+from typing import Any
+
+class Query:
+    def __init__(self, query_string) -> None: ...
+    def query_string(self): ...
+    def limit_ids(self, *ids): ...
+    def return_fields(self, *fields): ...
+    def return_field(self, field, as_field: Incomplete | None = None): ...
+    def summarize(
+        self,
+        fields: Incomplete | None = None,
+        context_len: Incomplete | None = None,
+        num_frags: Incomplete | None = None,
+        sep: Incomplete | None = None,
+    ): ...
+    def highlight(self, fields: Incomplete | None = None, tags: Incomplete | None = None): ...
+    def language(self, language): ...
+    def slop(self, slop): ...
+    def in_order(self): ...
+    def scorer(self, scorer): ...
+    def get_args(self): ...
+    def paging(self, offset, num): ...
+    def verbatim(self): ...
+    def no_content(self): ...
+    def no_stopwords(self): ...
+    def with_payloads(self): ...
+    def with_scores(self): ...
+    def limit_fields(self, *fields): ...
+    def add_filter(self, flt): ...
+    def sort_by(self, field, asc: bool = True): ...
+    def expander(self, expander): ...
+
+class Filter:
+    args: Any
+    def __init__(self, keyword, field, *args) -> None: ...
+
+class NumericFilter(Filter):
+    INF: str
+    NEG_INF: str
+    def __init__(self, field, minval, maxval, minExclusive: bool = False, maxExclusive: bool = False) -> None: ...
+
+class GeoFilter(Filter):
+    METERS: str
+    KILOMETERS: str
+    FEET: str
+    MILES: str
+    def __init__(self, field, lon, lat, radius, unit="km") -> None: ...
+
+class SortbyField:
+    args: Any
+    def __init__(self, field, asc: bool = True) -> None: ...
diff --git a/valkey/commands/search/querystring.py b/valkey/commands/search/querystring.py
index 3ff13209..bd576490 100644
--- a/valkey/commands/search/querystring.py
+++ b/valkey/commands/search/querystring.py
@@ -182,7 +182,7 @@ def __init__(self, *children, **kwparams):
 
         self.params = []
 
-        kvparams = {}
+        kvparams = {}  # type: ignore[var-annotated]
         for k, v in kwparams.items():
             curvals = kvparams.setdefault(k, [])
             if isinstance(v, (str, int, float)):
diff --git a/valkey/commands/search/reducers.py b/valkey/commands/search/reducers.py
index 694558de..00f65075 100644
--- a/valkey/commands/search/reducers.py
+++ b/valkey/commands/search/reducers.py
@@ -151,7 +151,7 @@ def __init__(self, field: str, *byfields: Union[Asc, Desc]) -> None:
             and isinstance(byfields[0], type)
             and issubclass(byfields[0], SortDirection)
         ):
-            byfields = [byfields[0](field)]
+            byfields = [byfields[0](field)]  # type: ignore[assignment]
 
         for f in byfields:
             fieldstrs += [f.field, f.DIRSTRING]
diff --git a/valkey/commands/search/result.pyi b/valkey/commands/search/result.pyi
new file mode 100644
index 00000000..046c3170
--- /dev/null
+++ b/valkey/commands/search/result.pyi
@@ -0,0 +1,7 @@
+from typing import Any
+
+class Result:
+    total: Any
+    duration: Any
+    docs: Any
+    def __init__(self, res, hascontent, duration: int = 0, has_payload: bool = False, with_scores: bool = False) -> None: ...
diff --git a/valkey/commands/sentinel.pyi b/valkey/commands/sentinel.pyi
new file mode 100644
index 00000000..b526a45f
--- /dev/null
+++ b/valkey/commands/sentinel.pyi
@@ -0,0 +1,17 @@
+class SentinelCommands:
+    def sentinel(self, *args): ...
+    def sentinel_get_master_addr_by_name(self, service_name): ...
+    def sentinel_master(self, service_name): ...
+    def sentinel_masters(self): ...
+    def sentinel_monitor(self, name, ip, port, quorum): ...
+    def sentinel_remove(self, name): ...
+    def sentinel_sentinels(self, service_name): ...
+    def sentinel_set(self, name, option, value): ...
+    def sentinel_slaves(self, service_name): ...
+    def sentinel_reset(self, pattern): ...
+    def sentinel_failover(self, new_master_name): ...
+    def sentinel_ckquorum(self, new_master_name): ...
+    def sentinel_flushconfig(self): ...
+
+class AsyncSentinelCommands(SentinelCommands):
+    async def sentinel(self, *args) -> None: ...
diff --git a/valkey/commands/timeseries/__init__.pyi b/valkey/commands/timeseries/__init__.pyi
new file mode 100644
index 00000000..95457d6f
--- /dev/null
+++ b/valkey/commands/timeseries/__init__.pyi
@@ -0,0 +1,14 @@
+from _typeshed import Incomplete
+from typing import Any
+
+from ...client import Pipeline as ClientPipeline
+from .commands import TimeSeriesCommands
+
+class TimeSeries(TimeSeriesCommands):
+    MODULE_CALLBACKS: dict[str, Any]
+    client: Any
+    execute_command: Any
+    def __init__(self, client: Incomplete | None = None, **kwargs) -> None: ...
+    def pipeline(self, transaction: bool = True, shard_hint: Incomplete | None = None) -> Pipeline: ...
+
+class Pipeline(TimeSeriesCommands, ClientPipeline[Incomplete]): ...  # type: ignore[misc]
diff --git a/valkey/commands/timeseries/commands.pyi b/valkey/commands/timeseries/commands.pyi
new file mode 100644
index 00000000..ed70e575
--- /dev/null
+++ b/valkey/commands/timeseries/commands.pyi
@@ -0,0 +1,160 @@
+from typing import Literal
+from typing_extensions import TypeAlias
+
+_Key: TypeAlias = bytes | str | memoryview
+
+ADD_CMD: Literal["TS.ADD"]
+ALTER_CMD: Literal["TS.ALTER"]
+CREATERULE_CMD: Literal["TS.CREATERULE"]
+CREATE_CMD: Literal["TS.CREATE"]
+DECRBY_CMD: Literal["TS.DECRBY"]
+DELETERULE_CMD: Literal["TS.DELETERULE"]
+DEL_CMD: Literal["TS.DEL"]
+GET_CMD: Literal["TS.GET"]
+INCRBY_CMD: Literal["TS.INCRBY"]
+INFO_CMD: Literal["TS.INFO"]
+MADD_CMD: Literal["TS.MADD"]
+MGET_CMD: Literal["TS.MGET"]
+MRANGE_CMD: Literal["TS.MRANGE"]
+MREVRANGE_CMD: Literal["TS.MREVRANGE"]
+QUERYINDEX_CMD: Literal["TS.QUERYINDEX"]
+RANGE_CMD: Literal["TS.RANGE"]
+REVRANGE_CMD: Literal["TS.REVRANGE"]
+
+class TimeSeriesCommands:
+    def create(
+        self,
+        key: _Key,
+        retention_msecs: int | None = None,
+        uncompressed: bool | None = False,
+        labels: dict[str, str] | None = None,
+        chunk_size: int | None = None,
+        duplicate_policy: str | None = None,
+    ): ...
+    def alter(
+        self,
+        key: _Key,
+        retention_msecs: int | None = None,
+        labels: dict[str, str] | None = None,
+        chunk_size: int | None = None,
+        duplicate_policy: str | None = None,
+    ): ...
+    def add(
+        self,
+        key: _Key,
+        timestamp: int | str,
+        value: float,
+        retention_msecs: int | None = None,
+        uncompressed: bool | None = False,
+        labels: dict[str, str] | None = None,
+        chunk_size: int | None = None,
+        duplicate_policy: str | None = None,
+    ): ...
+    def madd(self, ktv_tuples): ...
+    def incrby(
+        self,
+        key: _Key,
+        value: float,
+        timestamp: int | str | None = None,
+        retention_msecs: int | None = None,
+        uncompressed: bool | None = False,
+        labels: dict[str, str] | None = None,
+        chunk_size: int | None = None,
+    ): ...
+    def decrby(
+        self,
+        key: _Key,
+        value: float,
+        timestamp: int | str | None = None,
+        retention_msecs: int | None = None,
+        uncompressed: bool | None = False,
+        labels: dict[str, str] | None = None,
+        chunk_size: int | None = None,
+    ): ...
+    def delete(self, key, from_time, to_time): ...
+    def createrule(
+        self, source_key: _Key, dest_key: _Key, aggregation_type: str, bucket_size_msec: int, align_timestamp: int | None = None
+    ): ...
+    def deleterule(self, source_key, dest_key): ...
+    def range(
+        self,
+        key: _Key,
+        from_time: int | str,
+        to_time: int | str,
+        count: int | None = None,
+        aggregation_type: str | None = None,
+        bucket_size_msec: int | None = 0,
+        filter_by_ts: list[int] | None = None,
+        filter_by_min_value: int | None = None,
+        filter_by_max_value: int | None = None,
+        align: int | str | None = None,
+        latest: bool | None = False,
+        bucket_timestamp: str | None = None,
+        empty: bool | None = False,
+    ): ...
+    def revrange(
+        self,
+        key: _Key,
+        from_time: int | str,
+        to_time: int | str,
+        count: int | None = None,
+        aggregation_type: str | None = None,
+        bucket_size_msec: int | None = 0,
+        filter_by_ts: list[int] | None = None,
+        filter_by_min_value: int | None = None,
+        filter_by_max_value: int | None = None,
+        align: int | str | None = None,
+        latest: bool | None = False,
+        bucket_timestamp: str | None = None,
+        empty: bool | None = False,
+    ): ...
+    def mrange(
+        self,
+        from_time: int | str,
+        to_time: int | str,
+        filters: list[str],
+        count: int | None = None,
+        aggregation_type: str | None = None,
+        bucket_size_msec: int | None = 0,
+        with_labels: bool | None = False,
+        filter_by_ts: list[int] | None = None,
+        filter_by_min_value: int | None = None,
+        filter_by_max_value: int | None = None,
+        groupby: str | None = None,
+        reduce: str | None = None,
+        select_labels: list[str] | None = None,
+        align: int | str | None = None,
+        latest: bool | None = False,
+        bucket_timestamp: str | None = None,
+        empty: bool | None = False,
+    ): ...
+    def mrevrange(
+        self,
+        from_time: int | str,
+        to_time: int | str,
+        filters: list[str],
+        count: int | None = None,
+        aggregation_type: str | None = None,
+        bucket_size_msec: int | None = 0,
+        with_labels: bool | None = False,
+        filter_by_ts: list[int] | None = None,
+        filter_by_min_value: int | None = None,
+        filter_by_max_value: int | None = None,
+        groupby: str | None = None,
+        reduce: str | None = None,
+        select_labels: list[str] | None = None,
+        align: int | str | None = None,
+        latest: bool | None = False,
+        bucket_timestamp: str | None = None,
+        empty: bool | None = False,
+    ): ...
+    def get(self, key: _Key, latest: bool | None = False): ...
+    def mget(
+        self,
+        filters: list[str],
+        with_labels: bool | None = False,
+        select_labels: list[str] | None = None,
+        latest: bool | None = False,
+    ): ...
+    def info(self, key): ...
+    def queryindex(self, filters): ...
diff --git a/valkey/commands/timeseries/info.pyi b/valkey/commands/timeseries/info.pyi
new file mode 100644
index 00000000..8b082c7d
--- /dev/null
+++ b/valkey/commands/timeseries/info.pyi
@@ -0,0 +1,18 @@
+from _typeshed import Incomplete
+from typing import Any
+
+class TSInfo:
+    rules: list[Any]
+    labels: list[Any]
+    sourceKey: Incomplete | None
+    chunk_count: Incomplete | None
+    memory_usage: Incomplete | None
+    total_samples: Incomplete | None
+    retention_msecs: Incomplete | None
+    last_time_stamp: Incomplete | None
+    first_time_stamp: Incomplete | None
+
+    max_samples_per_chunk: Incomplete | None
+    chunk_size: Incomplete | None
+    duplicate_policy: Incomplete | None
+    def __init__(self, args) -> None: ...
diff --git a/valkey/commands/timeseries/utils.pyi b/valkey/commands/timeseries/utils.pyi
new file mode 100644
index 00000000..4a0d52c4
--- /dev/null
+++ b/valkey/commands/timeseries/utils.pyi
@@ -0,0 +1,5 @@
+def list_to_dict(aList): ...
+def parse_range(response): ...
+def parse_m_range(response): ...
+def parse_get(response): ...
+def parse_m_get(response): ...
diff --git a/valkey/connection.pyi b/valkey/connection.pyi
new file mode 100644
index 00000000..9796fd21
--- /dev/null
+++ b/valkey/connection.pyi
@@ -0,0 +1,289 @@
+from _typeshed import Incomplete, Unused
+from abc import abstractmethod
+from collections.abc import Callable, Iterable, Mapping
+from queue import Queue
+from socket import socket
+from typing import Any, ClassVar
+from typing_extensions import Self, TypeAlias
+
+from .credentials import CredentialProvider
+from .retry import Retry
+
+ssl_available: bool
+SYM_STAR: bytes
+SYM_DOLLAR: bytes
+SYM_CRLF: bytes
+SYM_EMPTY: bytes
+SERVER_CLOSED_CONNECTION_ERROR: str
+NONBLOCKING_EXCEPTIONS: tuple[type[Exception], ...]
+NONBLOCKING_EXCEPTION_ERROR_NUMBERS: dict[type[Exception], int]
+SENTINEL: object
+MODULE_LOAD_ERROR: str
+NO_SUCH_MODULE_ERROR: str
+MODULE_UNLOAD_NOT_POSSIBLE_ERROR: str
+MODULE_EXPORTS_DATA_TYPES_ERROR: str
+FALSE_STRINGS: tuple[str, ...]
+URL_QUERY_ARGUMENT_PARSERS: dict[str, Callable[[Any], Any]]
+
+# Options as passed to Pool.get_connection().
+_ConnectionPoolOptions: TypeAlias = Any
+_ConnectFunc: TypeAlias = Callable[[Connection], object]
+
+class BaseParser:
+    EXCEPTION_CLASSES: ClassVar[dict[str, type[Exception] | dict[str, type[Exception]]]]
+    @classmethod
+    def parse_error(cls, response: str) -> Exception: ...
+
+class SocketBuffer:
+    socket_read_size: int
+    bytes_written: int
+    bytes_read: int
+    socket_timeout: float | None
+    def __init__(self, socket: socket, socket_read_size: int, socket_timeout: float | None) -> None: ...
+    def unread_bytes(self) -> int: ...
+    def can_read(self, timeout: float | None) -> bool: ...
+    def read(self, length: int) -> bytes: ...
+    def readline(self) -> bytes: ...
+    def get_pos(self) -> int: ...
+    def rewind(self, pos: int) -> None: ...
+    def purge(self) -> None: ...
+    def close(self) -> None: ...
+
+class PythonParser(BaseParser):
+    encoding: str
+    socket_read_size: int
+    encoder: Encoder | None
+    def __init__(self, socket_read_size: int) -> None: ...
+    def __del__(self) -> None: ...
+    def on_connect(self, connection: Connection) -> None: ...
+    def on_disconnect(self) -> None: ...
+    def can_read(self, timeout: float | None) -> bool: ...
+    def read_response(self, disable_decoding: bool = False) -> Any: ...  # `str | bytes` or `list[str | bytes]`
+
+class LibvalkeyParser(BaseParser):
+    socket_read_size: int
+    def __init__(self, socket_read_size: int) -> None: ...
+    def __del__(self) -> None: ...
+    def on_connect(self, connection: Connection, **kwargs) -> None: ...
+    def on_disconnect(self) -> None: ...
+    def can_read(self, timeout: float | None) -> bool: ...
+    def read_from_socket(self, timeout: float | None = ..., raise_on_timeout: bool = True) -> bool: ...
+    def read_response(self, disable_decoding: bool = False) -> Any: ...  # `str | bytes` or `list[str | bytes]`
+
+DefaultParser: type[BaseParser]  # Libvalkey or PythonParser
+
+_Encodable: TypeAlias = str | bytes | memoryview | bool | float
+
+class Encoder:
+    encoding: str
+    encoding_errors: str
+    decode_responses: bool
+    def __init__(self, encoding: str, encoding_errors: str, decode_responses: bool) -> None: ...
+    def encode(self, value: _Encodable) -> bytes: ...
+    def decode(self, value: str | bytes | memoryview, force: bool = False) -> str: ...
+
+class AbstractConnection:
+    pid: int
+    db: int
+    client_name: str | None
+    credential_provider: CredentialProvider | None
+    password: str | None
+    username: str | None
+    socket_timeout: float | None
+    socket_connect_timeout: float | None
+    retry_on_timeout: bool
+    retry_on_error: list[type[Exception]]
+    retry: Retry
+    health_check_interval: int
+    next_health_check: int
+    valkey_connect_func: _ConnectFunc | None
+    encoder: Encoder
+
+    def __init__(
+        self,
+        db: int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[Exception]] = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: _ConnectFunc | None = None,
+        credential_provider: CredentialProvider | None = None,
+        command_packer: Incomplete | None = None,
+    ) -> None: ...
+    @abstractmethod
+    def repr_pieces(self) -> list[tuple[str, Any]]: ...
+    def register_connect_callback(self, callback: _ConnectFunc) -> None: ...
+    def clear_connect_callbacks(self) -> None: ...
+    def set_parser(self, parser_class: type[BaseParser]) -> None: ...
+    def connect(self) -> None: ...
+    def on_connect(self) -> None: ...
+    def disconnect(self, *args: Unused) -> None: ...  # 'args' added in valkey 4.1.2
+    def check_health(self) -> None: ...
+    def send_packed_command(self, command: str | Iterable[str], check_health: bool = True) -> None: ...
+    def send_command(self, *args, **kwargs) -> None: ...
+    def can_read(self, timeout: float | None = 0) -> bool: ...
+    def read_response(
+        self, disable_decoding: bool = False, *, disconnect_on_error: bool = True
+    ) -> Any: ...  # `str | bytes` or `list[str | bytes]`
+    def pack_command(self, *args) -> list[bytes]: ...
+    def pack_commands(self, commands: Iterable[Iterable[Incomplete]]) -> list[bytes]: ...
+
+class Connection(AbstractConnection):
+    host: str
+    port: int
+    socket_keepalive: bool
+    socket_keepalive_options: Mapping[str, int | str]
+    socket_type: int
+    def __init__(
+        self,
+        host: str = "localhost",
+        port: int = 6379,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[str, int | str] | None = None,
+        socket_type: int = 0,
+        *,
+        db: int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[Exception]] = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: _ConnectFunc | None = None,
+        credential_provider: CredentialProvider | None = None,
+        command_packer: Incomplete | None = None,
+    ) -> None: ...
+    def repr_pieces(self) -> list[tuple[str, Any]]: ...
+
+class SSLConnection(Connection):
+    keyfile: Any
+    certfile: Any
+    cert_reqs: Any
+    ca_certs: Any
+    ca_path: Incomplete | None
+    check_hostname: bool
+    certificate_password: Incomplete | None
+    ssl_validate_ocsp: bool
+    ssl_validate_ocsp_stapled: bool  # added in 4.1.1
+    ssl_ocsp_context: Incomplete | None  # added in 4.1.1
+    ssl_ocsp_expected_cert: Incomplete | None  # added in 4.1.1
+    def __init__(
+        self,
+        ssl_keyfile=None,
+        ssl_certfile=None,
+        ssl_cert_reqs="required",
+        ssl_ca_certs=None,
+        ssl_ca_data: Incomplete | None = None,
+        ssl_check_hostname: bool = False,
+        ssl_ca_path: Incomplete | None = None,
+        ssl_password: Incomplete | None = None,
+        ssl_validate_ocsp: bool = False,
+        ssl_validate_ocsp_stapled: bool = False,  # added in 4.1.1
+        ssl_ocsp_context: Incomplete | None = None,  # added in 4.1.1
+        ssl_ocsp_expected_cert: Incomplete | None = None,  # added in 4.1.1
+        *,
+        host: str = "localhost",
+        port: int = 6379,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        socket_keepalive: bool = False,
+        socket_keepalive_options: Mapping[str, int | str] | None = None,
+        socket_type: int = 0,
+        db: int = 0,
+        password: str | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[Exception]] = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: _ConnectFunc | None = None,
+        credential_provider: CredentialProvider | None = None,
+        command_packer: Incomplete | None = None,
+    ) -> None: ...
+
+class UnixDomainSocketConnection(AbstractConnection):
+    path: str
+    def __init__(
+        self,
+        path: str = "",
+        *,
+        db: int = 0,
+        password: str | None = None,
+        socket_timeout: float | None = None,
+        socket_connect_timeout: float | None = None,
+        retry_on_timeout: bool = False,
+        retry_on_error: list[type[Exception]] = ...,
+        encoding: str = "utf-8",
+        encoding_errors: str = "strict",
+        decode_responses: bool = False,
+        parser_class: type[BaseParser] = ...,
+        socket_read_size: int = 65536,
+        health_check_interval: int = 0,
+        client_name: str | None = None,
+        username: str | None = None,
+        retry: Retry | None = None,
+        valkey_connect_func: _ConnectFunc | None = None,
+        credential_provider: CredentialProvider | None = None,
+        command_packer: Incomplete | None = None,
+    ) -> None: ...
+    def repr_pieces(self) -> list[tuple[str, Any]]: ...
+
+# TODO: make generic on `connection_class`
+class ConnectionPool:
+    connection_class: type[Connection]
+    connection_kwargs: dict[str, Any]
+    max_connections: int
+    pid: int
+    @classmethod
+    def from_url(cls, url: str, *, db: int = ..., decode_components: bool = ..., **kwargs) -> Self: ...
+    def __init__(
+        self, connection_class: type[AbstractConnection] = ..., max_connections: int | None = None, **connection_kwargs
+    ) -> None: ...
+    def reset(self) -> None: ...
+    def get_connection(self, command_name: Unused, *keys, **options: _ConnectionPoolOptions) -> Connection: ...
+    def make_connection(self) -> Connection: ...
+    def release(self, connection: Connection) -> None: ...
+    def disconnect(self, inuse_connections: bool = True) -> None: ...
+    def get_encoder(self) -> Encoder: ...
+    def owns_connection(self, connection: Connection) -> bool: ...
+
+class BlockingConnectionPool(ConnectionPool):
+    queue_class: type[Queue[Any]]
+    timeout: float
+    pool: Queue[Connection | None]  # might not be defined
+    def __init__(
+        self,
+        max_connections: int = 50,
+        timeout: float = 20,
+        connection_class: type[Connection] = ...,
+        queue_class: type[Queue[Any]] = ...,
+        **connection_kwargs,
+    ) -> None: ...
+    def disconnect(self) -> None: ...  # type: ignore[override]
+
+def to_bool(value: object) -> bool: ...
+def parse_url(url: str) -> dict[str, Any]: ...
diff --git a/valkey/crc.pyi b/valkey/crc.pyi
new file mode 100644
index 00000000..d808e657
--- /dev/null
+++ b/valkey/crc.pyi
@@ -0,0 +1,5 @@
+from valkey.typing import EncodedT
+
+VALKEY_CLUSTER_HASH_SLOTS: int
+
+def key_slot(key: EncodedT, bucket: int = 16384) -> int: ...
diff --git a/valkey/credentials.pyi b/valkey/credentials.pyi
new file mode 100644
index 00000000..7a2d78ec
--- /dev/null
+++ b/valkey/credentials.pyi
@@ -0,0 +1,11 @@
+from abc import abstractmethod
+
+class CredentialProvider:
+    @abstractmethod
+    def get_credentials(self) -> tuple[str] | tuple[str, str]: ...
+
+class UsernamePasswordCredentialProvider(CredentialProvider):
+    username: str
+    password: str
+    def __init__(self, username: str | None = None, password: str | None = None) -> None: ...
+    def get_credentials(self) -> tuple[str] | tuple[str, str]: ...
diff --git a/valkey/exceptions.pyi b/valkey/exceptions.pyi
new file mode 100644
index 00000000..50eb8955
--- /dev/null
+++ b/valkey/exceptions.pyi
@@ -0,0 +1,43 @@
+class ValkeyError(Exception): ...
+class AuthenticationError(ValkeyError): ...
+class ConnectionError(ValkeyError): ...
+class TimeoutError(ValkeyError): ...
+class AuthorizationError(ConnectionError): ...
+class BusyLoadingError(ConnectionError): ...
+class InvalidResponse(ValkeyError): ...
+class ResponseError(ValkeyError): ...
+class DataError(ValkeyError): ...
+class PubSubError(ValkeyError): ...
+class WatchError(ValkeyError): ...
+class NoScriptError(ResponseError): ...
+class OutOfMemoryError(ResponseError): ...
+class ExecAbortError(ResponseError): ...
+class ReadOnlyError(ResponseError): ...
+class NoPermissionError(ResponseError): ...
+class ModuleError(ResponseError): ...
+class LockError(ValkeyError, ValueError): ...
+class LockNotOwnedError(LockError): ...
+class ChildDeadlockedError(Exception): ...
+class AuthenticationWrongNumberOfArgsError(ResponseError): ...
+class ValkeyClusterException(Exception): ...
+class ClusterError(ValkeyError): ...
+
+class ClusterDownError(ClusterError, ResponseError):
+    args: tuple[str]
+    message: str
+    def __init__(self, resp: str) -> None: ...
+
+class AskError(ResponseError):
+    args: tuple[str]
+    message: str
+    slot_id: int
+    node_addr: tuple[str, int]
+    host: str
+    port: int
+    def __init__(self, resp: str) -> None: ...
+
+class TryAgainError(ResponseError): ...
+class ClusterCrossSlotError(ResponseError): ...
+class MovedError(AskError): ...
+class MasterDownError(ClusterDownError): ...
+class SlotNotCoveredError(ValkeyClusterException): ...
diff --git a/valkey/lock.pyi b/valkey/lock.pyi
new file mode 100644
index 00000000..81d1dcac
--- /dev/null
+++ b/valkey/lock.pyi
@@ -0,0 +1,56 @@
+from _typeshed import Incomplete
+from types import TracebackType
+from typing import Any, ClassVar, Protocol
+from typing_extensions import Self
+
+from valkey.client import Valkey
+
+class _Local(Protocol):
+    token: str | bytes | None
+
+class Lock:
+    LUA_EXTEND_SCRIPT: ClassVar[str]
+    LUA_REACQUIRE_SCRIPT: ClassVar[str]
+    LUA_RELEASE_SCRIPT: ClassVar[str]
+    lua_extend: ClassVar[Incomplete | None]
+    lua_reacquire: ClassVar[Incomplete | None]
+    lua_release: ClassVar[Incomplete | None]
+    valkey: Valkey[Any]
+    name: str
+    timeout: float | None
+    sleep: float
+    blocking: bool
+    blocking_timeout: float | None
+    thread_local: bool
+    local: _Local
+    def __init__(
+        self,
+        valkey: Valkey[Any],
+        name: str,
+        timeout: float | None = None,
+        sleep: float = 0.1,
+        blocking: bool = True,
+        blocking_timeout: float | None = None,
+        thread_local: bool = True,
+    ) -> None: ...
+    def register_scripts(self) -> None: ...
+    def __enter__(self) -> Self: ...
+    def __exit__(
+        self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None
+    ) -> bool | None: ...
+    def acquire(
+        self,
+        sleep: float | None = None,
+        blocking: bool | None = None,
+        blocking_timeout: float | None = None,
+        token: str | bytes | None = None,
+    ) -> bool: ...
+    def do_acquire(self, token: str | bytes) -> bool: ...
+    def locked(self) -> bool: ...
+    def owned(self) -> bool: ...
+    def release(self) -> None: ...
+    def do_release(self, expected_token: str | bytes) -> None: ...
+    def extend(self, additional_time: float, replace_ttl: bool = False) -> bool: ...
+    def do_extend(self, additional_time: float, replace_ttl: bool) -> bool: ...
+    def reacquire(self) -> bool: ...
+    def do_reacquire(self) -> bool: ...
diff --git a/valkey/ocsp.pyi b/valkey/ocsp.pyi
new file mode 100644
index 00000000..5fc72e08
--- /dev/null
+++ b/valkey/ocsp.pyi
@@ -0,0 +1,21 @@
+from _typeshed import Incomplete
+from ssl import SSLObject, SSLSocket
+from typing import Literal
+
+from cryptography.x509.base import Certificate
+from OpenSSL.SSL import Connection
+
+def ocsp_staple_verifier(con: Connection, ocsp_bytes: bytes, expected: bytes | None = None) -> Literal[True]: ...
+
+class OCSPVerifier:
+    SOCK: SSLObject | SSLSocket
+    HOST: str
+    PORT: int
+    CA_CERTS: str | None
+    def __init__(self, sock: SSLObject | SSLSocket, host: str, port: int, ca_certs: str | None = None) -> None: ...
+    # cryptography.x509.general_name.GeneralName.value is typed as Any
+    def components_from_socket(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ...
+    def components_from_direct_connection(self) -> tuple[Certificate, Incomplete | None, Incomplete]: ...
+    def build_certificate_url(self, server: str, cert: Certificate, issuer_cert: Certificate) -> str: ...
+    def check_certificate(self, server: str, cert: Certificate, issuer_url: str | bytes) -> Literal[True]: ...
+    def is_valid(self) -> Literal[True]: ...
diff --git a/valkey/retry.py b/valkey/retry.py
index e40a8331..4eb34d77 100644
--- a/valkey/retry.py
+++ b/valkey/retry.py
@@ -7,7 +7,7 @@
 T = TypeVar("T")
 
 if TYPE_CHECKING:
-    from redis.backoff import AbstractBackoff
+    from valkey.backoff import AbstractBackoff
 
 
 class Retry:
diff --git a/valkey/retry.pyi b/valkey/retry.pyi
new file mode 100644
index 00000000..ab727e66
--- /dev/null
+++ b/valkey/retry.pyi
@@ -0,0 +1,11 @@
+from collections.abc import Callable, Iterable
+from typing import TypeVar
+
+from valkey.backoff import AbstractBackoff
+
+_T = TypeVar("_T")
+
+class Retry:
+    def __init__(self, backoff: AbstractBackoff, retries: int, supported_errors: tuple[type[Exception], ...] = ...) -> None: ...
+    def update_supported_errors(self, specified_errors: Iterable[type[Exception]]) -> None: ...
+    def call_with_retry(self, do: Callable[[], _T], fail: Callable[[Exception], object]) -> _T: ...
diff --git a/valkey/sentinel.pyi b/valkey/sentinel.pyi
new file mode 100644
index 00000000..4a4c9489
--- /dev/null
+++ b/valkey/sentinel.pyi
@@ -0,0 +1,62 @@
+from collections.abc import Iterable, Iterator
+from typing import Any, Literal, TypeVar, overload
+from typing_extensions import TypeAlias
+
+from valkey.client import Valkey
+from valkey.commands.sentinel import SentinelCommands
+from valkey.connection import Connection, ConnectionPool, SSLConnection
+from valkey.exceptions import ConnectionError
+
+_ValkeyT = TypeVar("_ValkeyT", bound=Valkey[Any])
+_AddressAndPort: TypeAlias = tuple[str, int]
+_SentinelState: TypeAlias = dict[str, Any]  # TODO: this can be a TypedDict
+
+class MasterNotFoundError(ConnectionError): ...
+class SlaveNotFoundError(ConnectionError): ...
+
+class SentinelManagedConnection(Connection):
+    connection_pool: SentinelConnectionPool
+    def __init__(self, *, connection_pool: SentinelConnectionPool, **kwargs) -> None: ...
+    def connect_to(self, address: _AddressAndPort) -> None: ...
+    def connect(self) -> None: ...
+    # The result can be either `str | bytes` or `list[str | bytes]`
+    def read_response(self, disable_decoding: bool = False, *, disconnect_on_error: bool = False) -> Any: ...
+
+class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ...
+
+class SentinelConnectionPool(ConnectionPool):
+    is_master: bool
+    check_connection: bool
+    service_name: str
+    sentinel_manager: Sentinel
+    def __init__(self, service_name: str, sentinel_manager: Sentinel, **kwargs) -> None: ...
+    def reset(self) -> None: ...
+    def owns_connection(self, connection: Connection) -> bool: ...
+    def get_master_address(self) -> _AddressAndPort: ...
+    def rotate_slaves(self) -> Iterator[_AddressAndPort]: ...
+
+class Sentinel(SentinelCommands):
+    sentinel_kwargs: dict[str, Any]
+    sentinels: list[Valkey[Any]]
+    min_other_sentinels: int
+    connection_kwargs: dict[str, Any]
+    def __init__(
+        self,
+        sentinels: Iterable[_AddressAndPort],
+        min_other_sentinels: int = 0,
+        sentinel_kwargs: dict[str, Any] | None = None,
+        **connection_kwargs,
+    ) -> None: ...
+    def check_master_state(self, state: _SentinelState, service_name: str) -> bool: ...
+    def discover_master(self, service_name: str) -> _AddressAndPort: ...
+    def filter_slaves(self, slaves: Iterable[_SentinelState]) -> list[_AddressAndPort]: ...
+    def discover_slaves(self, service_name: str) -> list[_AddressAndPort]: ...
+    @overload
+    def master_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ...
+    @overload
+    def master_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ...
+    @overload
+    def slave_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Valkey[Any]: ...
+    @overload
+    def slave_for(self, service_name: str, valkey_class: type[_ValkeyT], connection_pool_class=..., **kwargs) -> _ValkeyT: ...
+    def execute_command(self, *args, **kwargs) -> Literal[True]: ...
diff --git a/valkey/typing.pyi b/valkey/typing.pyi
new file mode 100644
index 00000000..dce33cb2
--- /dev/null
+++ b/valkey/typing.pyi
@@ -0,0 +1,34 @@
+from collections.abc import Iterable
+from datetime import datetime, timedelta
+from typing import Any, Protocol, TypeVar
+from typing_extensions import TypeAlias
+
+from valkey.asyncio.connection import ConnectionPool as AsyncConnectionPool
+from valkey.connection import ConnectionPool
+
+# The following type aliases exist at runtime.
+EncodedT: TypeAlias = bytes | memoryview
+DecodedT: TypeAlias = str | int | float
+EncodableT: TypeAlias = EncodedT | DecodedT
+AbsExpiryT: TypeAlias = int | datetime
+ExpiryT: TypeAlias = int | timedelta
+ZScoreBoundT: TypeAlias = float | str
+BitfieldOffsetT: TypeAlias = int | str
+_StringLikeT: TypeAlias = bytes | str | memoryview  # noqa: Y043
+KeyT: TypeAlias = _StringLikeT
+PatternT: TypeAlias = _StringLikeT
+FieldT: TypeAlias = EncodableT
+KeysT: TypeAlias = KeyT | Iterable[KeyT]
+ChannelT: TypeAlias = _StringLikeT
+GroupT: TypeAlias = _StringLikeT
+ConsumerT: TypeAlias = _StringLikeT
+StreamIdT: TypeAlias = int | _StringLikeT
+ScriptTextT: TypeAlias = _StringLikeT
+TimeoutSecT: TypeAlias = int | float | _StringLikeT
+AnyKeyT = TypeVar("AnyKeyT", bytes, str, memoryview)  # noqa: Y001
+AnyFieldT = TypeVar("AnyFieldT", bytes, str, memoryview)  # noqa: Y001
+AnyChannelT = TypeVar("AnyChannelT", bytes, str, memoryview)  # noqa: Y001
+
+class CommandsProtocol(Protocol):
+    connection_pool: AsyncConnectionPool[Any] | ConnectionPool
+    def execute_command(self, *args, **options): ...
diff --git a/valkey/utils.pyi b/valkey/utils.pyi
new file mode 100644
index 00000000..de41c112
--- /dev/null
+++ b/valkey/utils.pyi
@@ -0,0 +1,22 @@
+from _typeshed import Unused
+from collections.abc import Iterable, Mapping
+from contextlib import AbstractContextManager
+from typing import Any, Literal, TypeVar, overload
+
+from .client import Pipeline, Valkey, _StrType
+
+_T = TypeVar("_T")
+
+LIBVALKEY_AVAILABLE: bool
+CRYPTOGRAPHY_AVAILABLE: bool
+
+@overload
+def from_url(url: str, *, db: int = ..., decode_responses: Literal[True], **kwargs: Any) -> Valkey[str]: ...
+@overload
+def from_url(url: str, *, db: int = ..., decode_responses: Literal[False] = False, **kwargs: Any) -> Valkey[bytes]: ...
+def pipeline(valkey_obj: Valkey[_StrType]) -> AbstractContextManager[Pipeline[_StrType]]: ...
+def str_if_bytes(value: str | bytes) -> str: ...
+def safe_str(value: object) -> str: ...
+def dict_merge(*dicts: Mapping[str, _T]) -> dict[str, _T]: ...
+def list_keys_to_dict(key_list, callback): ...  # unused, alias for `dict.fromkeys`
+def merge_result(command: Unused, res: Mapping[Any, Iterable[_T]]) -> list[_T]: ...