Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fuzzy finding #30

Merged
merged 6 commits into from
Feb 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
from poketerm import main
from poketerm.utils import testing
import sys

if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == "cache":
testing.handle_cache_test()
main.main()
418 changes: 417 additions & 1 deletion poetry.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions poketerm/config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from poketerm.utils.caching import load_cache, save_cache
from poketerm.utils.caching import CacheManager

APP_VERSION = "0.3.0"

Expand Down Expand Up @@ -26,7 +26,7 @@ class Config:

@classmethod
def LoadCache(cls):
cache = load_cache("config")
cache = CacheManager.load_cache_of_type("config")
if cache is None:
return

Expand All @@ -45,4 +45,4 @@ def SaveCache(cls):
"type": cls.TYPE_FLAGS,
# "move": cls.MOVE_FLAGS
}
save_cache("config", flagList)
CacheManager.save_cache_of_type("config", flagList)
8 changes: 5 additions & 3 deletions poketerm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ def handle_cache_test():
"Nature": nature.Nature,
"Pokemon": pokemon.Pokemon,
"Type": type.Type,
"Version": version.Version,
"Species": species.Species,
"VersionGroup": version_group.VersionGroup,
# "Version": version.Version,
# "Species": species.Species,
# "VersionGroup": version_group.VersionGroup,
}

SEARCH_OPTIONS = [
Expand Down Expand Up @@ -97,6 +97,8 @@ def handle_cache_test():
def main():
startup()

SearchManager.load_valid_names([RESOURCES[name] for name in RESOURCES.keys()])

if updater.check_for_update():
shutdown()

Expand Down
1 change: 1 addition & 0 deletions poketerm/resources/egg_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@


class EggGroup(Resource):
MAX_COUNT = 15
ENDPOINT = "egg-group"

def __init__(self, data):
Expand Down
19 changes: 18 additions & 1 deletion poketerm/utils/api.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,36 @@
import requests

BASE_URL = "https://pokeapi.co/api/v2"
LOCAL_URL = "http://localhost/api/v2"


def get_ID_from_URL(URL: str) -> int:
return int(URL.split("/")[-2])


def get_from_API(endpoint, searchTerm):
response = requests.get(f"{BASE_URL}/{endpoint}/{searchTerm}")
from poketerm.utils.testing import TEST_RUNNING

response = requests.get(
f"{LOCAL_URL if TEST_RUNNING else BASE_URL}/{endpoint}/{searchTerm}"
)
if response.status_code == 404:
return None
return response.json()


async def get_from_API_async(endpoint, searchTerm, session):
from poketerm.utils.testing import TEST_RUNNING

async with session.get(
f"{LOCAL_URL if TEST_RUNNING else BASE_URL}/{endpoint}/{searchTerm}"
) as response:
resp = await response
if resp.status_code == 404:
return None
return resp.json()


def get_from_URL(url) -> dict | None:
response = requests.get(url)
if response.status_code == 404:
Expand Down
19 changes: 2 additions & 17 deletions poketerm/utils/caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,9 +71,9 @@ def save_cache_of_type(cache_type: str, cache):
@staticmethod
def load_cache_of_type(cache_type: str):
if not does_cache_type_exist(cache_type):
return
return None
if not os.path.getsize(get_cache_filepath(cache_type)) > 0:
return
return None
with open(get_cache_filepath(cache_type), "rb") as cache_file:
data = pickle.load(cache_file)
return data
Expand Down Expand Up @@ -113,18 +113,3 @@ def remove_cache_dir():

def does_cache_type_exist(cacheType) -> bool:
return os.path.exists(get_cache_filepath(cacheType))


def save_cache(cacheType, cache) -> None:
with open(get_cache_filepath(cacheType), "wb") as f:
pickle.dump(cache, f)
print(f"Successfully saved {cacheType.upper()} cache")


def load_cache(cacheType) -> dict | None:
if not does_cache_type_exist(cacheType):
return None
with open(get_cache_filepath(cacheType), "rb") as f:
cache = pickle.load(f)
print(f"Successfully loaded {cacheType.upper()} cache")
return cache
64 changes: 56 additions & 8 deletions poketerm/utils/searching.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,45 @@
import re

from readchar import readkey
from thefuzz import process
from typing import Optional

from poketerm.utils.api import get_from_API
from poketerm.utils.api import get_from_API, get_from_API_async
from poketerm.utils.caching import CacheManager
from poketerm.resources.data import Resource
from poketerm.console import console


class SearchManager:
VALID_NAMES: dict[str, list[str]] = []
VALID_NAMES: dict[str, list[str]] = {}

@classmethod
def update_valid_names(cls, resource):
if resource.ENDPOINT not in cls.VALID_NAMES:
cls.VALID_NAMES[resource.ENDPOINT] = []
if resource.name not in cls.VALID_NAMES[resource.ENDPOINT]:
cls.VALID_NAMES[resource.ENDPOINT].append(resource.name)

@classmethod
def load_valid_names(cls, searchable_resources: list[Resource]) -> None:
cache = CacheManager.load_cache_of_type("valid-names")
if cache:
cls.VALID_NAMES = cache
return
for resource in searchable_resources:
names = []
for i in range(1, resource.MAX_COUNT + 1):
res = cls.handle_search_and_cast(resource, i)
names.append(res.name)

cls.VALID_NAMES[resource.ENDPOINT] = names

cls.save_valid_names()
return

@classmethod
def load_valid_names(cls):
pass
def save_valid_names(cls):
CacheManager.save_cache_of_type("valid-names", cls.VALID_NAMES)

@classmethod
def handle_search_and_cast(cls, resource, query: Optional[str | int] = None):
Expand All @@ -31,17 +59,24 @@ def handle_search(cls, endpoint: str, query: Optional[str | int] = None):
if query is None or query == "":
q = prompt_for_query(endpoint)
if q == "":
return
return None
else:
q = str(query)

q = normalize_search_term(q)
data = obtain_data(endpoint, q)

if data is None:
# print("oops no data!")
# TODO: Implement fuzzy-finding
return
choices = process.extract(q, cls.VALID_NAMES[endpoint])
for i, choice in enumerate(choices):
console.print(f"[{i + 1}] {choice[0]}")
console.print(
"Query not found! [1-5] for closest matches, or anything else to return."
)
key = readkey()
if key.isdigit() and 5 >= int(key) > 0:
return obtain_data(endpoint, choices[int(key) - 1][0])
return None
return data


Expand All @@ -57,6 +92,19 @@ def obtain_data(endpoint: str, query: str):
return get_from_API(endpoint, query)


async def obtain_data_async(endpoint: str, query: str, session):
if query.isdigit():
data = CacheManager.get_data_from_ID(endpoint, query)
else:
data = CacheManager.get_data_from_name(endpoint, query)

if data:
return data

data = await get_from_API_async(endpoint, query, session)
return data


def prompt_for_query(endpoint: str):
return input(f"{endpoint.title()} Name or ID: ").lower()

Expand Down
77 changes: 67 additions & 10 deletions poketerm/utils/testing.py
Original file line number Diff line number Diff line change
@@ -1,27 +1,47 @@
import asyncio
from concurrent.futures import ThreadPoolExecutor

import aiohttp

from poketerm.console import console

from poketerm.resources import pokemon, generation, move, ability, type, nature
from poketerm.resources import item, location, machine, pokedex, region
from poketerm.resources import (
pokemon,
generation,
move,
ability,
type,
nature,
egg_group,
item,
location,
machine,
pokedex,
region,
)

from poketerm.utils.searching import SearchManager
from poketerm.utils.searching import SearchManager, obtain_data_async
from poketerm.utils.caching import CacheManager

from rich.progress import Progress, BarColumn, MofNCompleteColumn, TimeElapsedColumn

from readchar import readchar

TEST_RUNNING = False

TESTABLE_RESOURCES = {
"Generation": generation.Generation, # 9
"Region": region.Region, # 10
"Egg Group": egg_group.EggGroup, # 15
"Type": type.Type, # 18
"Nature": nature.Nature, # 24
"Pokedex": pokedex.Pokedex, # 33
"Ability": ability.Ability, # 307
# "Location": location.Location, # 867
"Move": move.Move, # 919
"Pokemon": pokemon.Pokemon, # 1025
# "Machine": machine.Machine, # 1688
"Item": item.Item, # 2159
# "Item": item.Item, # 2159
# "Machine": machine.Machine, # 1688S
# "Location": location.Location, # 867
# "Pokedex": pokedex.Pokedex, # 33
# "Region": region.Region, # 10
}

progress = Progress(
Expand All @@ -35,11 +55,28 @@ def handle_resource_test(resource, taskID):


def handle_single_query(resource, taskID, query):
SearchManager.handle_search_and_cast(resource, query)
res = SearchManager.handle_search_and_cast(resource, query)
SearchManager.update_valid_names(res)
progress.update(taskID, advance=1)


async def handle_resource_async(resource, taskID):
async with aiohttp.ClientSession() as session:
for i in range(1, resource.MAX_COUNT + 1):
await handle_query_async(resource, taskID, i, session)


async def handle_query_async(resource, taskID, query, session):
data = await obtain_data_async(resource.ENDPOINT, query, session)
res = resource(data)
SearchManager.update_valid_names(res)
CacheManager.cache_resource(res)
progress.update(taskID, advance=1)


def handle_cache_test():
global TEST_RUNNING
TEST_RUNNING = False
console.clear()
console.rule("Cache Test", style="white")

Expand All @@ -52,6 +89,26 @@ def handle_cache_test():
executor.submit(handle_resource_test, resource, taskID)
_ = readchar()

CacheManager.save_mappings()
SearchManager.save_valid_names()


async def handle_test_async():
global TEST_RUNNING
TEST_RUNNING = False
console.clear()
console.rule("Async Cache Test", style="white")

with progress:
tasks = []
for resource in TESTABLE_RESOURCES.values():
taskID = progress.add_task(
f"Loading {resource.ENDPOINT} info...", total=resource.MAX_COUNT
)
tasks.append(handle_resource_async(resource, taskID))
await asyncio.gather(*tasks)


if __name__ == "__main__":
handle_cache_test()
# handle_cache_test()
asyncio.run(handle_test_async())
9 changes: 1 addition & 8 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,7 @@ rich = "13.7.0"
readchar = "4.0.5"
thefuzz = "0.22.1"
pytest = "8.0.0"

[tool.poetry.group.dev.dependencies]
requests = "2.31.0"
beautifulsoup4 = "4.12.3"
rich = "13.7.0"
readchar = "4.0.5"
thefuzz = "0.22.1"
pytest = "8.0.0"
aiohttp = "^3.9.3"

[build-system]
requires = ["poetry-core"]
Expand Down
Loading