diff --git a/booklog/cli/add_author.py b/booklog/cli/add_author.py
index 70b73672..090d1b51 100644
--- a/booklog/cli/add_author.py
+++ b/booklog/cli/add_author.py
@@ -6,7 +6,7 @@
from prompt_toolkit.shortcuts import confirm
from booklog.cli import ask
-from booklog.data import api as data_api
+from booklog.repository import api as repository_api
Option = Tuple[Optional[str], AnyFormattedText]
@@ -17,7 +17,7 @@ def prompt() -> None:
if not name:
return
- data_api.create_author(name)
+ repository_api.create_author(name)
def ask_for_name() -> Optional[str]:
diff --git a/booklog/cli/add_reading.py b/booklog/cli/add_reading.py
index 6dd10437..6bc4f62d 100644
--- a/booklog/cli/add_reading.py
+++ b/booklog/cli/add_reading.py
@@ -11,11 +11,10 @@
from prompt_toolkit.validation import Validator
from booklog.cli import ask, radio_list, select_work
-from booklog.data import api as data_api
+from booklog.repository import api as repository_api
Option = Tuple[Optional[str], AnyFormattedText]
-WorkOption = Tuple[Optional[data_api.Work], AnyFormattedText]
Stages = Literal[
"ask_for_work",
@@ -30,8 +29,8 @@
@dataclass(kw_only=True)
class State(object):
stage: Stages = "ask_for_work"
- work: Optional[data_api.Work] = None
- timeline: list[data_api.TimelineEntry] = field(default_factory=list)
+ work: Optional[repository_api.Work] = None
+ timeline: list[repository_api.TimelineEntry] = field(default_factory=list)
edition: Optional[str] = None
grade: Optional[str] = None
@@ -57,11 +56,14 @@ def persist_reading(state: State) -> State:
assert state.timeline
assert state.grade
- data_api.create_reading(
+ repository_api.create_reading(
work=state.work,
edition=state.edition,
timeline=state.timeline,
- grade=state.grade,
+ )
+
+ repository_api.create_or_update_review(
+ work=state.work, date=state.timeline[-1].date, grade=state.grade
)
if confirm("Add another reading?"):
@@ -170,7 +172,7 @@ def ask_for_timeline(state: State) -> State: # noqa: WPS231
continue
state.timeline.append(
- data_api.TimelineEntry(date=timeline_date, progress=progress)
+ repository_api.TimelineEntry(date=timeline_date, progress=progress)
)
if progress in {"Finished", "Abandoned"}:
@@ -207,7 +209,7 @@ def ask_for_edition(state: State) -> State:
def build_edition_options() -> List[Option]:
- editions = data_api.all_editions()
+ editions = repository_api.reading_editions()
options: list[Option] = [
(edition, "{0}".format(html.escape(edition)))
diff --git a/booklog/cli/add_work.py b/booklog/cli/add_work.py
index 1033a3c3..e4737fc3 100644
--- a/booklog/cli/add_work.py
+++ b/booklog/cli/add_work.py
@@ -2,18 +2,13 @@
import html
from dataclasses import dataclass, field
-from typing import Callable, Literal, Optional, Tuple
+from typing import Callable, Literal, Optional
-from prompt_toolkit.formatted_text import AnyFormattedText
from prompt_toolkit.shortcuts import confirm
from booklog.cli import ask, radio_list, select_author, select_work
from booklog.cli.utils.array_to_sentence import array_to_sentence
-from booklog.data import api as data_api
-
-AuthorOption = Tuple[Optional[data_api.AuthorWithWorks], AnyFormattedText]
-
-Option = Tuple[Optional[str], AnyFormattedText]
+from booklog.repository import api as repository_api
Stages = Literal[
"ask_for_authors",
@@ -30,9 +25,9 @@
@dataclass(kw_only=True)
class State(object):
stage: Stages = "ask_for_authors"
- kind: Optional[str] = None
+ kind: Optional[repository_api.WORK_KIND_TYPE] = None
title: Optional[str] = None
- work_authors: list[data_api.WorkAuthor] = field(default_factory=list)
+ work_authors: list[repository_api.WorkAuthor] = field(default_factory=list)
subtitle: Optional[str] = None
year_published: Optional[str] = None
included_works: list[str] = field(default_factory=list)
@@ -61,7 +56,7 @@ def persist_work(state: State) -> State:
assert state.kind
assert state.work_authors
- data_api.create_work(
+ repository_api.create_work(
title=state.title,
work_authors=state.work_authors,
subtitle=state.subtitle,
@@ -70,7 +65,9 @@ def persist_work(state: State) -> State:
included_work_slugs=state.included_works,
)
- author_names = array_to_sentence([author.name for author in state.work_authors])
+ author_names = array_to_sentence(
+ [author.author().name for author in state.work_authors]
+ )
if confirm("Add more works by {0}?".format(author_names)):
state.stage = "ask_for_kind"
@@ -113,12 +110,7 @@ def ask_for_authors(state: State) -> State:
author_notes = None
state.work_authors.append(
- data_api.WorkAuthor(
- slug=author.slug,
- notes=author_notes,
- name=author.name,
- sort_name=author.sort_name,
- )
+ repository_api.WorkAuthor(notes=author_notes, author_slug=author.slug)
)
if not confirm("Add more Authors?"):
@@ -192,7 +184,7 @@ def ask_for_kind(state: State) -> State:
title="Select kind:",
options=[
(kind, "{0}".format(html.escape(kind)))
- for kind in sorted(data_api.WORK_KINDS)
+ for kind in sorted(repository_api.WORK_KINDS)
],
)
diff --git a/booklog/cli/main.py b/booklog/cli/main.py
index 9d54ac94..7e67e260 100644
--- a/booklog/cli/main.py
+++ b/booklog/cli/main.py
@@ -1,6 +1,6 @@
from booklog.cli import add_author, add_reading, add_work, radio_list
-from booklog.data import api as data_api
-from booklog.logger import logger
+from booklog.exports import api as exports_api
+from booklog.utils.logging import logger
@logger.catch
@@ -22,4 +22,4 @@ def prompt() -> None:
def export() -> None:
- data_api.export_data()
+ exports_api.export_data()
diff --git a/booklog/cli/select_author.py b/booklog/cli/select_author.py
index ab235623..927387b0 100644
--- a/booklog/cli/select_author.py
+++ b/booklog/cli/select_author.py
@@ -1,24 +1,25 @@
from __future__ import annotations
import html
-from typing import List, Optional, Tuple
+import itertools
+from typing import Iterable, Optional, Tuple
from prompt_toolkit.formatted_text import AnyFormattedText
from booklog.cli import ask, radio_list
-from booklog.data import api as data_api
+from booklog.repository import api as repository_api
-AuthorOption = Tuple[Optional[data_api.AuthorWithWorks], AnyFormattedText]
+AuthorOption = Tuple[Optional[repository_api.Author], AnyFormattedText]
-def prompt() -> Optional[data_api.AuthorWithWorks]:
+def prompt() -> Optional[repository_api.Author]:
while True:
name = ask.prompt("Author: ")
if not name:
return None
- authors = data_api.search_authors(name)
+ authors = search_authors(name)
options: list[AuthorOption] = build_author_options(authors)
@@ -33,15 +34,22 @@ def prompt() -> Optional[data_api.AuthorWithWorks]:
return selected_author
-def format_author_works(author: data_api.AuthorWithWorks) -> str:
- first_three_author_works = author.works[:3]
+def search_authors(query: str) -> Iterable[repository_api.Author]:
+ return filter(
+ lambda author: query.lower() in author.name.lower(),
+ repository_api.authors(),
+ )
+
+
+def format_author_works(author: repository_api.Author) -> str:
+ first_three_author_works = itertools.islice(author.works(), 3)
return ", ".join(html.escape(work.title) for work in first_three_author_works)
def build_author_options(
- authors: list[data_api.AuthorWithWorks],
-) -> List[AuthorOption]:
+ authors: Iterable[repository_api.Author],
+) -> list[AuthorOption]:
if not authors:
return [(None, "Search Again")]
diff --git a/booklog/cli/select_work.py b/booklog/cli/select_work.py
index 60dc651d..77ee18f0 100644
--- a/booklog/cli/select_work.py
+++ b/booklog/cli/select_work.py
@@ -1,27 +1,26 @@
from __future__ import annotations
import html
-from typing import List, Optional, Tuple
+from typing import Iterable, Optional, Tuple
from prompt_toolkit.formatted_text import AnyFormattedText
from prompt_toolkit.shortcuts import confirm
from booklog.cli import ask, radio_list
from booklog.cli.utils.array_to_sentence import array_to_sentence
-from booklog.data import api as data_api
+from booklog.repository import api as repository_api
-WorkOption = Tuple[Optional[data_api.Work], AnyFormattedText]
+WorkOption = Tuple[Optional[repository_api.Work], AnyFormattedText]
-def prompt() -> Optional[data_api.Work]:
+def prompt() -> Optional[repository_api.Work]:
while True:
title = ask.prompt("Title: ")
if title is None:
return None
- works = data_api.search_works(title)
-
+ works = search_works(title)
options = build_work_options(works)
selected_work = radio_list.prompt(
@@ -36,9 +35,17 @@ def prompt() -> Optional[data_api.Work]:
return selected_work
+def search_works(query: str) -> Iterable[repository_api.Work]:
+ return filter(
+ lambda work: query.lower()
+ in "{0}: {1}".format(work.title, work.subtitle).lower(),
+ repository_api.works(),
+ )
+
+
def build_work_options(
- works: list[data_api.Work],
-) -> List[WorkOption]:
+ works: Iterable[repository_api.Work],
+) -> list[WorkOption]:
if not works:
return [(None, "Search Again")]
@@ -48,7 +55,10 @@ def build_work_options(
"{0} by {1}".format(
html.escape(work.title),
array_to_sentence(
- [html.escape(author.name) for author in work.authors]
+ [
+ html.escape(work_author.author().name)
+ for work_author in work.work_authors
+ ]
),
),
)
diff --git a/booklog/data/api.py b/booklog/data/api.py
deleted file mode 100644
index dddb0589..00000000
--- a/booklog/data/api.py
+++ /dev/null
@@ -1,53 +0,0 @@
-from __future__ import annotations
-
-from booklog.data.core import api as core_api
-from booklog.data.exports import api as exports_api
-from booklog.data.readings import api as readings_api
-from booklog.data.reviews import api as reviews_api
-
-AuthorWithWorks = core_api.AuthorWithWorks
-
-Work = core_api.Work
-
-Author = core_api.Author
-
-WorkAuthor = core_api.WorkAuthor
-
-Review = reviews_api.Review
-
-Reading = readings_api.Reading
-
-TimelineEntry = readings_api.TimelineEntry
-
-create_author = core_api.create_author
-
-search_authors = core_api.search_authors
-
-search_works = core_api.search_works
-
-create_work = core_api.create_work
-
-all_editions = readings_api.all_editions
-
-WORK_KINDS = core_api.WORK_KINDS
-
-export_data = exports_api.export_data
-
-
-def create_reading(
- work: core_api.Work,
- edition: str,
- timeline: list[TimelineEntry],
- grade: str,
-) -> None:
- readings_api.create_reading(
- work=work,
- edition=edition,
- timeline=timeline,
- )
-
- reviews_api.create_or_update(
- work=work,
- date=timeline[-1].date,
- grade=grade,
- )
diff --git a/booklog/data/core/api.py b/booklog/data/core/api.py
deleted file mode 100644
index bac5aa18..00000000
--- a/booklog/data/core/api.py
+++ /dev/null
@@ -1,23 +0,0 @@
-from __future__ import annotations
-
-from booklog.data.core import json_works, orm, queries
-
-Work = orm.Work
-
-Author = orm.Author
-
-AuthorWithWorks = orm.AuthorWithWorks
-
-WorkAuthor = orm.WorkAuthor
-
-WORK_KINDS = json_works.KINDS
-
-search_authors = queries.search_authors
-
-search_works = queries.search_works
-
-create_work = orm.create_work
-
-create_author = orm.create_author
-
-all_authors_and_works = queries.all_authors_and_works
diff --git a/booklog/data/core/orm.py b/booklog/data/core/orm.py
deleted file mode 100644
index b3667535..00000000
--- a/booklog/data/core/orm.py
+++ /dev/null
@@ -1,146 +0,0 @@
-from __future__ import annotations
-
-from dataclasses import dataclass
-from typing import Optional
-
-from booklog.data.core import json_authors, json_works
-
-
-@dataclass
-class Author(object):
- name: str
- sort_name: str
- slug: str
-
-
-@dataclass
-class AuthorWithWorks(Author):
- works: list[Work]
-
-
-@dataclass
-class WorkAuthor(Author):
- notes: Optional[str]
-
-
-@dataclass
-class Work(object):
- title: str
- subtitle: Optional[str]
- year: str
- sort_title: str
- authors: list[WorkAuthor]
- slug: str
- kind: str
- included_work_slugs: list[str]
- included_in_work_slugs: list[str]
-
-
-def create_work( # noqa: WPS211
- title: str,
- subtitle: Optional[str],
- year: str,
- work_authors: list[WorkAuthor],
- kind: str,
- included_work_slugs: Optional[list[str]] = None,
-) -> Work:
- return hydrate_json_work(
- json_work=json_works.create(
- title=title,
- subtitle=subtitle,
- year=year,
- work_authors=[
- json_works.CreateWorkAuthor(
- slug=work_author.slug, notes=work_author.notes
- )
- for work_author in work_authors
- ],
- kind=kind,
- included_work_slugs=included_work_slugs,
- ),
- all_json_works=json_works.deserialize_all(),
- all_json_authors=json_authors.deserialize_all(),
- )
-
-
-def create_author(
- name: str,
-) -> Author:
- return hydrate_json_author(json_authors.create(name=name))
-
-
-def hydrate_json_work_authors(
- json_work_authors: list[json_works.JsonWorkAuthor],
- all_json_authors: list[json_authors.JsonAuthor],
-) -> list[WorkAuthor]:
- work_authors = []
-
- for work_author in json_work_authors:
- json_author = json_authors.author_for_slug(
- slug=work_author["slug"], all_json_authors=all_json_authors
- )
- work_authors.append(
- WorkAuthor(
- name=json_author["name"],
- slug=json_author["slug"],
- sort_name=json_author["sortName"],
- notes=work_author["notes"],
- )
- )
-
- return work_authors
-
-
-def hydrate_json_work(
- json_work: json_works.JsonWork,
- all_json_authors: list[json_authors.JsonAuthor],
- all_json_works: list[json_works.JsonWork],
-) -> Work:
- return Work(
- title=json_work["title"],
- subtitle=json_work["subtitle"],
- sort_title=json_work["sortTitle"],
- year=json_work["year"],
- authors=hydrate_json_work_authors(
- json_work["authors"], all_json_authors=all_json_authors
- ),
- slug=json_work["slug"],
- kind=json_work["kind"],
- included_work_slugs=json_work["includedWorks"],
- included_in_work_slugs=[
- collection_work["slug"]
- for collection_work in json_works.works_including_work_slug(
- json_work["slug"], all_json_works=all_json_works
- )
- ],
- )
-
-
-def hydrate_json_author(json_author: json_authors.JsonAuthor) -> Author:
- return Author(
- name=json_author["name"],
- slug=json_author["slug"],
- sort_name=json_author["sortName"],
- )
-
-
-def hydrate_json_author_with_works(
- json_author: json_authors.JsonAuthor,
- all_json_authors: list[json_authors.JsonAuthor],
- all_json_works: list[json_works.JsonWork],
-) -> AuthorWithWorks:
- return AuthorWithWorks(
- name=json_author["name"],
- slug=json_author["slug"],
- sort_name=json_author["sortName"],
- works=[
- hydrate_json_work(
- json_work,
- all_json_works=all_json_works,
- all_json_authors=all_json_authors,
- )
- for json_work in json_works.works_for_author_slug(
- slug=json_author["slug"], all_json_works=all_json_works
- )
- ],
- )
diff --git a/booklog/data/core/queries.py b/booklog/data/core/queries.py
deleted file mode 100644
index b6a5798c..00000000
--- a/booklog/data/core/queries.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from __future__ import annotations
-
-from typing import Optional, Tuple
-
-from booklog.data.core import json_authors, json_works, orm
-
-
-def all_authors_and_works() -> Tuple[list[orm.AuthorWithWorks], list[orm.Work]]:
- all_json_authors = json_authors.deserialize_all()
- all_json_works = json_works.deserialize_all()
-
- return (
- all_authors(all_json_authors=all_json_authors, all_json_works=all_json_works),
- all_works(all_json_authors=all_json_authors, all_json_works=all_json_works),
- )
-
-
-def all_works(
- all_json_authors: Optional[list[json_authors.JsonAuthor]] = None,
- all_json_works: Optional[list[json_works.JsonWork]] = None,
-) -> list[orm.Work]:
- all_json_authors = all_json_authors or json_authors.deserialize_all()
- all_json_works = all_json_works or json_works.deserialize_all()
-
- return [
- orm.hydrate_json_work(
- json_work=json_work,
- all_json_authors=all_json_authors,
- all_json_works=all_json_works,
- )
- for json_work in all_json_works
- ]
-
-
-def all_authors(
- all_json_authors: Optional[list[json_authors.JsonAuthor]] = None,
- all_json_works: Optional[list[json_works.JsonWork]] = None,
-) -> list[orm.AuthorWithWorks]:
- all_json_authors = all_json_authors or json_authors.deserialize_all()
- all_json_works = all_json_works or json_works.deserialize_all()
-
- return [
- orm.hydrate_json_author_with_works(
- json_author=json_author,
- all_json_works=all_json_works,
- all_json_authors=all_json_authors,
- )
- for json_author in all_json_authors
- ]
-
-
-def search_authors(query: str) -> list[orm.AuthorWithWorks]:
- all_json_authors = json_authors.deserialize_all()
- all_json_works = json_works.deserialize_all()
-
- filtered_authors = filter(
- lambda json_author: query.lower() in json_author["name"].lower(),
- all_json_authors,
- )
-
- return [
- orm.hydrate_json_author_with_works(
- json_author=json_author,
- all_json_authors=all_json_authors,
- all_json_works=all_json_works,
- )
- for json_author in filtered_authors
- ]
-
-
-def search_works(query: str) -> list[orm.Work]:
- all_json_authors = json_authors.deserialize_all()
- all_json_works = json_works.deserialize_all()
-
- filtered_works = filter(
- lambda json_work: query.lower()
- in "{0}: {1}".format(json_work["title"], json_work["subtitle"]).lower(),
- all_json_works,
- )
-
- return [
- orm.hydrate_json_work(
- json_work=json_work,
- all_json_authors=all_json_authors,
- all_json_works=all_json_works,
- )
- for json_work in filtered_works
- ]
diff --git a/booklog/data/exports/api.py b/booklog/data/exports/api.py
deleted file mode 100644
index 8ad77441..00000000
--- a/booklog/data/exports/api.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from __future__ import annotations
-
-from booklog.data.core import api as core_api
-from booklog.data.exports import (
- authors,
- reading_progress,
- reading_stats,
- reviewed_works,
- unreviewed_works,
-)
-from booklog.data.readings import api as readings_api
-from booklog.data.reviews import api as reviews_api
-
-
-def export_data() -> None:
- (all_authors, all_works) = core_api.all_authors_and_works()
-
- all_readings = readings_api.all_readings(all_works=all_works)
- all_reviews = reviews_api.all_reviews(all_works=all_works)
-
- authors.export(authors=all_authors, reviews=all_reviews)
- unreviewed_works.export(works=all_works, reviews=all_reviews)
- reviewed_works.export(readings=all_readings, reviews=all_reviews)
- reading_progress.export(readings=all_readings, reviews=all_reviews)
- reading_stats.export(readings=all_readings, reviews=all_reviews)
diff --git a/booklog/data/exports/reading_progress.py b/booklog/data/exports/reading_progress.py
deleted file mode 100644
index f38cdf2a..00000000
--- a/booklog/data/exports/reading_progress.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import datetime
-from typing import TypedDict
-
-from booklog.data.exports.utils import export_tools
-from booklog.data.readings.api import Reading, TimelineEntry
-from booklog.data.reviews.api import Review
-from booklog.logger import logger
-
-JsonReadingProgressAuthor = TypedDict(
- "JsonReadingProgressAuthor",
- {
- "name": str,
- },
-)
-
-
-JsonReadingProgress = TypedDict(
- "JsonReadingProgress",
- {
- "sequence": str,
- "slug": str,
- "edition": str,
- "date": datetime.date,
- "progress": str,
- "reviewed": bool,
- "readingYear": int,
- "yearPublished": str,
- "title": str,
- "kind": str,
- "authors": list[JsonReadingProgressAuthor],
- "includedInSlugs": list[str],
- },
-)
-
-
-def build_json_reading_progress(
- reading: Reading,
- timeline_entry: TimelineEntry,
- reviewed: bool,
-) -> JsonReadingProgress:
- return JsonReadingProgress(
- sequence="{0}-{1}".format(timeline_entry.date, reading.sequence),
- slug=reading.work.slug,
- edition=reading.edition,
- kind=reading.work.kind,
- date=timeline_entry.date,
- progress=timeline_entry.progress,
- reviewed=reviewed,
- yearPublished=reading.work.year,
- title=reading.work.title,
- readingYear=timeline_entry.date.year,
- authors=[
- JsonReadingProgressAuthor(name=work_author.name)
- for work_author in reading.work.authors
- ],
- includedInSlugs=reading.work.included_in_work_slugs,
- )
-
-
-def export(
- readings: list[Reading],
- reviews: list[Review],
-) -> None:
- logger.log("==== Begin exporting {}...", "reading_progress")
-
- reviewed_work_slugs = {review.work.slug for review in reviews}
-
- json_progress = [
- build_json_reading_progress(
- reading=reading,
- timeline_entry=timeline_entry,
- reviewed=reading.work.slug in reviewed_work_slugs,
- )
- for reading in readings
- for timeline_entry in reading.timeline
- ]
-
- export_tools.serialize_dicts(
- sorted(json_progress, key=lambda progress: progress["sequence"], reverse=True),
- "reading_progress",
- )
diff --git a/booklog/data/exports/unreviewed_works.py b/booklog/data/exports/unreviewed_works.py
deleted file mode 100644
index 2b30352f..00000000
--- a/booklog/data/exports/unreviewed_works.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from typing import Optional, TypedDict
-
-from booklog.data.core.api import Work
-from booklog.data.exports.utils import export_tools
-from booklog.data.reviews.api import Review
-from booklog.logger import logger
-
-JsonWorkAuthor = TypedDict(
- "JsonWorkAuthor",
- {"name": str, "sortName": str, "slug": str, "notes": Optional[str]},
-)
-
-JsonUnreviewedWork = TypedDict(
- "JsonUnreviewedWork",
- {
- "slug": str,
- "includedInSlugs": list[str],
- "title": str,
- "subtitle": Optional[str],
- "sortTitle": str,
- "yearPublished": str,
- "authors": list[JsonWorkAuthor],
- "kind": str,
- },
-)
-
-
-def export(
- works: list[Work],
- reviews: list[Review],
-) -> None:
- logger.log("==== Begin exporting {}...", "unreviewed works")
-
- reviewed_work_slugs = {review.work.slug for review in reviews}
-
- json_unreviewed_works = [
- JsonUnreviewedWork(
- slug=work.slug,
- title=work.title,
- subtitle=work.subtitle,
- sortTitle=work.sort_title,
- yearPublished=work.year,
- kind=work.kind,
- authors=[
- JsonWorkAuthor(
- name=work_author.name,
- sortName=work_author.sort_name,
- slug=work_author.slug,
- notes=work_author.notes,
- )
- for work_author in work.authors
- ],
- includedInSlugs=work.included_in_work_slugs,
- )
- for work in works
- if work.slug not in reviewed_work_slugs
- ]
-
- export_tools.serialize_dicts_to_folder(
- json_unreviewed_works,
- "unreviewed_works",
- filename_key=lambda work: work["slug"],
- )
diff --git a/booklog/data/exports/utils/format_tools.py b/booklog/data/exports/utils/format_tools.py
deleted file mode 100644
index 6a7438eb..00000000
--- a/booklog/data/exports/utils/format_tools.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import re
-from typing import Union
-
-
-def pretty_file_size(num: float, suffix: str = "B") -> str:
- for unit in ("", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"):
- if abs(num) < 1024.0: # noqa: WPS459
- return "{0:.1f}{1}{2}".format(num, unit, suffix)
- num /= 1024.0
- return "{0:.1f}{1}{2}".format(num, "Yi", suffix)
-
-
-def humanize_int(value: Union[int, str]) -> str: # noqa: WPS110
- # lifted from https://github.com/jmoiron/humanize/blob/master/src/humanize/number.py
- """Converts an integer to a string containing commas every three digits.
- For example, 3000 becomes '3,000' and 45000 becomes '45,000'."""
- orig = str(value)
- new = re.sub(r"^(-?\d+)(\d{3})", r"\g<1>,\g<2>", orig)
- if orig == new:
- return new
-
- return humanize_int(new)
diff --git a/booklog/data/readings/api.py b/booklog/data/readings/api.py
deleted file mode 100644
index 53b368e9..00000000
--- a/booklog/data/readings/api.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from __future__ import annotations
-
-from booklog.data.readings import json_readings, orm, queries
-
-TimelineEntry = orm.TimelineEntry
-
-Reading = orm.Reading
-
-all_editions = queries.all_editions
-
-all_readings = queries.all_readings
-
-create_reading = orm.create_reading
-
-SequenceError = json_readings.SequenceError
diff --git a/booklog/data/readings/orm.py b/booklog/data/readings/orm.py
deleted file mode 100644
index e4f25ea2..00000000
--- a/booklog/data/readings/orm.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from __future__ import annotations
-
-import datetime
-from dataclasses import dataclass
-from typing import Optional
-
-from booklog.data.core import api as core_api
-from booklog.data.readings import json_readings
-
-
-@dataclass
-class TimelineEntry(object):
- date: datetime.date
- progress: str
-
-
-@dataclass(kw_only=True)
-class Reading(object):
- sequence: int
- edition: str
- timeline: list[TimelineEntry]
- edition_notes: Optional[str] = None
- work: core_api.Work
-
-
-def create_reading(
- work: core_api.Work,
- timeline: list[TimelineEntry],
- edition: str,
-) -> Reading:
- return hydrate_json_reading(
- json_reading=json_readings.create(
- work_slug=work.slug,
- timeline=[
- json_readings.JsonTimelineEntry(
- date=datetime.date.isoformat(timeline_entry.date),
- progress=timeline_entry.progress,
- )
- for timeline_entry in timeline
- ],
- edition=edition,
- ),
- work=work,
- )
-
-
-def hydrate_json_reading(
- json_reading: json_readings.JsonReading,
- work: core_api.Work,
-) -> Reading:
- return Reading(
- sequence=json_reading["sequence"],
- timeline=[
- TimelineEntry(
- date=datetime.date.fromisoformat(json_timeline_entry["date"]),
- progress=json_timeline_entry["progress"],
- )
- for json_timeline_entry in json_reading["timeline"]
- ],
- edition=json_reading["edition"],
- edition_notes=json_reading["edition_notes"],
- work=work,
- )
diff --git a/booklog/data/readings/queries.py b/booklog/data/readings/queries.py
deleted file mode 100644
index fa286cbf..00000000
--- a/booklog/data/readings/queries.py
+++ /dev/null
@@ -1,24 +0,0 @@
-from typing import Sequence
-
-from booklog.data.core import api as core_api
-from booklog.data.readings import json_readings, orm
-
-
-def all_editions() -> Sequence[str]:
- readings = json_readings.deserialize_all()
-
- return sorted(set([reading["edition"] for reading in readings]))
-
-
-def all_readings(
- all_works: list[core_api.Work],
-) -> list[orm.Reading]:
- return [
- orm.hydrate_json_reading(
- json_reading=json_reading,
- work=next(
- work for work in all_works if work.slug == json_reading["work_slug"]
- ),
- )
- for json_reading in json_readings.deserialize_all()
- ]
diff --git a/booklog/data/reviews/api.py b/booklog/data/reviews/api.py
deleted file mode 100644
index dfbd5deb..00000000
--- a/booklog/data/reviews/api.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from __future__ import annotations
-
-from booklog.data.reviews import orm, queries
-
-Review = orm.Review
-
-all_reviews = queries.all_reviews
-
-create_or_update = orm.create_or_update
diff --git a/booklog/data/reviews/orm.py b/booklog/data/reviews/orm.py
deleted file mode 100644
index 6580b6dd..00000000
--- a/booklog/data/reviews/orm.py
+++ /dev/null
@@ -1,68 +0,0 @@
-from __future__ import annotations
-
-import datetime
-from dataclasses import dataclass
-from typing import Optional, cast
-
-from booklog.data.core import api as core_api
-from booklog.data.reviews import markdown_reviews
-
-
-@dataclass
-class Review(object):
- work: core_api.Work
- date: datetime.date
- grade: str
- review_content: Optional[str] = None
-
- @property
- def grade_value(self) -> int:
- if self.grade == "Abandoned":
- return 0
-
- value_modifier = 1
-
- grade_map = {
- "A": 12,
- "B": 9,
- "C": 6,
- "D": 3,
- }
-
- grade_value = grade_map.get(self.grade[0], 1)
- modifier = self.grade[-1]
-
- if modifier == "+":
- grade_value += value_modifier
-
- if modifier == "-":
- grade_value -= value_modifier
-
- return grade_value
-
-
-def create_or_update(
- work: core_api.Work,
- date: datetime.date,
- grade: str = "Abandoned",
-) -> Review:
- return hydrate_markdown_review(
- markdown_review=markdown_reviews.create_or_update(
- work_slug=work.slug,
- date=datetime.date.isoformat(date),
- grade=grade,
- ),
- work=work,
- )
-
-
-def hydrate_markdown_review(
- markdown_review: markdown_reviews.MarkdownReview,
- work: core_api.Work,
-) -> Review:
- return Review(
- work=work,
- date=cast(datetime.date, markdown_review.yaml["date"]),
- grade=markdown_review.yaml["grade"],
- review_content=markdown_review.review_content,
- )
diff --git a/booklog/data/reviews/queries.py b/booklog/data/reviews/queries.py
deleted file mode 100644
index 96fea52f..00000000
--- a/booklog/data/reviews/queries.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from booklog.data.core import api as core_api
-from booklog.data.reviews import markdown_reviews, orm
-
-
-def all_reviews(
- all_works: list[core_api.Work],
-) -> list[orm.Review]:
- return [
- orm.hydrate_markdown_review(
- markdown_review=markdown_review,
- work=next(
- work
- for work in all_works
- if work.slug == markdown_review.yaml["work_slug"]
- ),
- )
- for markdown_review in markdown_reviews.deserialize_all()
- ]
diff --git a/booklog/data/utils/__init__.py b/booklog/data/utils/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/booklog/data/__init__.py b/booklog/exports/__init__.py
similarity index 100%
rename from booklog/data/__init__.py
rename to booklog/exports/__init__.py
diff --git a/booklog/exports/api.py b/booklog/exports/api.py
new file mode 100644
index 00000000..91944b43
--- /dev/null
+++ b/booklog/exports/api.py
@@ -0,0 +1,32 @@
+from __future__ import annotations
+
+from booklog.exports import (
+ authors,
+ reading_stats,
+ reading_timeline_entries,
+ reviewed_works,
+ unreviewed_works,
+)
+from booklog.repository import api as repository_api
+
+
+def export_data() -> None:
+ all_authors = list(repository_api.authors())
+ all_works = list(repository_api.works())
+ all_reviews = list(repository_api.reviews())
+ all_readings = list(repository_api.readings())
+
+ authors.export(
+ all_authors=all_authors, all_works=all_works, all_reviews=all_reviews
+ )
+ reviewed_works.export(
+ all_works=all_works,
+ all_authors=all_authors,
+ all_reviews=all_reviews,
+ all_readings=all_readings,
+ )
+ reading_timeline_entries.export(readings=repository_api.readings())
+ unreviewed_works.export(works=repository_api.works())
+ reading_stats.export(
+ readings=repository_api.readings(), reviews=repository_api.reviews()
+ )
diff --git a/booklog/data/exports/authors.py b/booklog/exports/authors.py
similarity index 52%
rename from booklog/data/exports/authors.py
rename to booklog/exports/authors.py
index e2c4c8b3..7d627c29 100644
--- a/booklog/data/exports/authors.py
+++ b/booklog/exports/authors.py
@@ -1,15 +1,8 @@
from typing import Optional, TypedDict
-from booklog.data.core.api import AuthorWithWorks, Work
-from booklog.data.exports.utils import export_tools, list_tools
-from booklog.data.reviews.orm import Review
-from booklog.logger import logger
-
-JsonWorkAuthor = TypedDict(
- "JsonWorkAuthor",
- {"name": str, "sortName": str, "slug": str, "notes": Optional[str]},
-)
-
+from booklog.exports import exporter, json_work_author
+from booklog.repository import api as repository_api
+from booklog.utils.logging import logger
JsonAuthorWork = TypedDict(
"JsonAuthorWork",
@@ -21,7 +14,7 @@
"sortTitle": str,
"grade": Optional[str],
"gradeValue": Optional[int],
- "authors": list[JsonWorkAuthor],
+ "authors": list[json_work_author.JsonWorkAuthor],
"reviewed": bool,
"includedInSlugs": list[str],
},
@@ -41,8 +34,10 @@
def build_json_author_work(
- work: Work,
- review: Optional[Review],
+ work: repository_api.Work,
+ review: Optional[repository_api.Review],
+ all_works: list[repository_api.Work],
+ all_authors: list[repository_api.Author],
) -> JsonAuthorWork:
return JsonAuthorWork(
title=work.title,
@@ -54,24 +49,24 @@ def build_json_author_work(
grade=review.grade if review else None,
gradeValue=review.grade_value if review else None,
authors=[
- JsonWorkAuthor(
- slug=work_author.slug,
- notes=work_author.notes,
- name=work_author.name,
- sortName=work_author.sort_name,
+ json_work_author.build_json_work_author(
+ work_author=work_author, all_authors=all_authors
)
- for work_author in work.authors
+ for work_author in work.work_authors
],
- includedInSlugs=work.included_in_work_slugs,
+ includedInSlugs=[work.slug for work in work.included_in_works(all_works)],
)
def build_json_author(
- author: AuthorWithWorks,
- reviews_by_slug: dict[str, Review],
+ author: repository_api.Author,
+ all_works: list[repository_api.Work],
+ all_reviews: list[repository_api.Review],
+ all_authors: list[repository_api.Author],
) -> JsonAuthor:
+ author_works = list(author.works(all_works))
reviewed_work_count = len(
- {author_work.slug for author_work in author.works} & reviews_by_slug.keys()
+ [author_work for author_work in author_works if author_work.review(all_reviews)]
)
return JsonAuthor(
@@ -81,34 +76,35 @@ def build_json_author(
works=[
build_json_author_work(
work=work,
- review=reviews_by_slug.get(work.slug),
+ review=work.review(all_reviews),
+ all_works=all_works,
+ all_authors=all_authors,
)
- for work in author.works
+ for work in author_works
],
reviewedWorkCount=reviewed_work_count,
- workCount=len(author.works),
+ workCount=len(author_works),
)
def export(
- authors: list[AuthorWithWorks],
- reviews: list[Review],
+ all_authors: list[repository_api.Author],
+ all_works: list[repository_api.Work],
+ all_reviews: list[repository_api.Review],
) -> None:
logger.log("==== Begin exporting {}...", "authors")
- reviews_by_slug = list_tools.list_to_dict_by_key(
- reviews, lambda review: review.work.slug
- )
-
json_authors = [
build_json_author(
author=author,
- reviews_by_slug=reviews_by_slug,
+ all_works=all_works,
+ all_reviews=all_reviews,
+ all_authors=all_authors,
)
- for author in authors
+ for author in all_authors
]
- export_tools.serialize_dicts_to_folder(
+ exporter.serialize_dicts_to_folder(
json_authors,
"authors",
filename_key=lambda json_author: json_author["slug"],
diff --git a/booklog/data/exports/utils/export_tools.py b/booklog/exports/exporter.py
similarity index 73%
rename from booklog/data/exports/utils/export_tools.py
rename to booklog/exports/exporter.py
index be24f028..c70941f9 100644
--- a/booklog/data/exports/utils/export_tools.py
+++ b/booklog/exports/exporter.py
@@ -3,8 +3,7 @@
from collections.abc import Iterable
from typing import Callable, TypeVar
-from booklog.data.exports.utils import format_tools
-from booklog.logger import logger
+from booklog.utils.logging import logger
DataClassType = TypeVar("DataClassType")
DictType = TypeVar("DictType")
@@ -23,7 +22,7 @@ def serialize_dicts(dicts: Iterable[DictType], file_name: str) -> None:
logger.log(
"Wrote {} ({}).",
json_file_name,
- format_tools.pretty_file_size(os.path.getsize(json_file_name)),
+ pretty_file_size(os.path.getsize(json_file_name)),
)
@@ -44,5 +43,13 @@ def serialize_dicts_to_folder(
logger.log(
"Wrote {} ({}).",
file_name,
- format_tools.pretty_file_size(os.path.getsize(file_name)),
+ pretty_file_size(os.path.getsize(file_name)),
)
+
+
+def pretty_file_size(num: float, suffix: str = "B") -> str:
+ for unit in ("", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"):
+ if abs(num) < 1024.0: # noqa: WPS459
+ return "{0:.1f}{1}{2}".format(num, unit, suffix)
+ num /= 1024.0
+ return "{0:.1f}{1}{2}".format(num, "Yi", suffix)
diff --git a/booklog/exports/json_work_author.py b/booklog/exports/json_work_author.py
new file mode 100644
index 00000000..b9812be6
--- /dev/null
+++ b/booklog/exports/json_work_author.py
@@ -0,0 +1,21 @@
+from typing import Optional, TypedDict
+
+from booklog.repository import api as repository_api
+
+JsonWorkAuthor = TypedDict(
+ "JsonWorkAuthor",
+ {"name": str, "sortName": str, "slug": str, "notes": Optional[str]},
+)
+
+
+def build_json_work_author(
+ work_author: repository_api.WorkAuthor, all_authors: list[repository_api.Author]
+) -> JsonWorkAuthor:
+ author = work_author.author(all_authors)
+
+ return JsonWorkAuthor(
+ slug=author.slug,
+ notes=work_author.notes,
+ name=author.name,
+ sortName=author.sort_name,
+ )
diff --git a/booklog/data/exports/utils/list_tools.py b/booklog/exports/list_tools.py
similarity index 100%
rename from booklog/data/exports/utils/list_tools.py
rename to booklog/exports/list_tools.py
diff --git a/booklog/data/exports/reading_stats.py b/booklog/exports/reading_stats.py
similarity index 75%
rename from booklog/data/exports/reading_stats.py
rename to booklog/exports/reading_stats.py
index 3593365b..827e19bc 100644
--- a/booklog/data/exports/reading_stats.py
+++ b/booklog/exports/reading_stats.py
@@ -2,11 +2,9 @@
from datetime import date
from typing import Callable, Iterable, TypedDict, TypeVar
-from booklog.data.core.api import Work
-from booklog.data.exports.utils import export_tools, list_tools
-from booklog.data.readings.api import Reading
-from booklog.data.reviews.api import Review
-from booklog.logger import logger
+from booklog.exports import exporter, list_tools
+from booklog.repository.api import Reading, Review, Work
+from booklog.utils.logging import logger
JsonMostReadAuthorReading = TypedDict(
"JsonMostReadAuthorReading",
@@ -85,36 +83,44 @@ def group_readings_by_author(
readings_by_author: dict[str, list[Reading]] = defaultdict(list)
for reading in readings:
- for work_author in reading.work.authors:
- readings_by_author[work_author.slug].append(reading)
+ for work_author in reading.work().work_authors:
+ readings_by_author[work_author.author_slug].append(reading)
return readings_by_author
+def build_json_most_read_author_reading(reading: Reading) -> JsonMostReadAuthorReading:
+ work = reading.work()
+
+ return JsonMostReadAuthorReading(
+ sequence=reading.sequence,
+ date=date_finished_or_abandoned(reading=reading),
+ slug=work.slug,
+ edition=reading.edition,
+ kind=work.kind,
+ title=work.title,
+ yearPublished=work.year,
+ includedInSlugs=[
+ included_in_work.slug for included_in_work in work.included_in_works()
+ ],
+ )
+
+
def build_most_read_authors(readings: list[Reading]) -> list[JsonMostReadAuthor]:
readings_by_author = group_readings_by_author(readings=readings)
return [
JsonMostReadAuthor(
name=next(
- reading_author.name
+ reading_work_author.author().name
for reading in readings
- for reading_author in reading.work.authors
- if reading_author.slug == author_slug
+ for reading_work_author in reading.work().work_authors
+ if reading_work_author.author_slug == author_slug
),
count=len(readings),
slug=author_slug,
readings=[
- JsonMostReadAuthorReading(
- sequence=reading.sequence,
- date=date_finished_or_abandoned(reading=reading),
- slug=reading.work.slug,
- edition=reading.edition,
- kind=reading.work.kind,
- title=reading.work.title,
- yearPublished=reading.work.year,
- includedInSlugs=reading.work.included_in_work_slugs,
- )
+ build_json_most_read_author_reading(reading=reading)
for reading in readings
],
)
@@ -142,7 +148,7 @@ def build_decade_distribution(works: list[Work]) -> list[JsonDistribution]:
def book_count(readings: list[Reading]) -> int:
- works = [reading.work for reading in readings]
+ works = [reading.work() for reading in readings]
return len([work for work in works if work.kind not in {"Short Story", "Novella"}])
@@ -153,7 +159,7 @@ def build_json_reading_stats(
reviews: list[Review],
most_read_authors: list[JsonMostReadAuthor],
) -> JsonReadingStats:
- works = [reading.work for reading in readings]
+ works = [reading.work() for reading in readings]
return JsonReadingStats(
span=span,
@@ -169,26 +175,29 @@ def build_json_reading_stats(
def export(
- readings: list[Reading],
- reviews: list[Review],
+ readings: Iterable[Reading],
+ reviews: Iterable[Review],
) -> None:
logger.log("==== Begin exporting {}...", "reading_stats")
+ all_readings = list(readings)
+ all_reviews = list(reviews)
+
json_reading_stats = [
build_json_reading_stats(
span="all-time",
- reviews=reviews,
- readings=readings,
- most_read_authors=build_most_read_authors(readings=readings),
+ reviews=all_reviews,
+ readings=all_readings,
+ most_read_authors=build_most_read_authors(readings=all_readings),
)
]
reviews_by_year = list_tools.group_list_by_key(
- reviews, lambda review: str(review.date.year)
+ all_reviews, lambda review: str(review.date.year)
)
readings_by_year = list_tools.group_list_by_key(
- readings,
+ all_readings,
lambda reading: str(date_finished_or_abandoned(reading).year),
)
@@ -198,11 +207,11 @@ def export(
span=year,
reviews=reviews_by_year[year],
readings=readings_for_year,
- most_read_authors=build_most_read_authors(readings=readings),
+ most_read_authors=build_most_read_authors(readings=readings_for_year),
)
)
- export_tools.serialize_dicts_to_folder(
+ exporter.serialize_dicts_to_folder(
json_reading_stats,
"reading_stats",
filename_key=lambda stats: stats["span"],
diff --git a/booklog/exports/reading_timeline_entries.py b/booklog/exports/reading_timeline_entries.py
new file mode 100644
index 00000000..0640bb5f
--- /dev/null
+++ b/booklog/exports/reading_timeline_entries.py
@@ -0,0 +1,80 @@
+import datetime
+from typing import Iterable, TypedDict
+
+from booklog.exports import exporter
+from booklog.repository.api import Reading, TimelineEntry
+from booklog.utils.logging import logger
+
+ExportsReadingTimelineEntryAuthor = TypedDict(
+ "ExportsReadingTimelineEntryAuthor",
+ {
+ "name": str,
+ },
+)
+
+
+ExportsReadingTimelineEntry = TypedDict(
+ "ExportsReadingTimelineEntry",
+ {
+ "sequence": str,
+ "slug": str,
+ "edition": str,
+ "date": datetime.date,
+ "progress": str,
+ "reviewed": bool,
+ "readingYear": int,
+ "yearPublished": str,
+ "title": str,
+ "kind": str,
+ "authors": list[ExportsReadingTimelineEntryAuthor],
+ "includedInSlugs": list[str],
+ },
+)
+
+
+def build_json_reading_progress(
+ reading: Reading,
+ timeline_entry: TimelineEntry,
+) -> ExportsReadingTimelineEntry:
+ work = reading.work()
+ reviewed = bool(work.review())
+
+ return ExportsReadingTimelineEntry(
+ sequence="{0}-{1}".format(timeline_entry.date, reading.sequence),
+ slug=work.slug,
+ edition=reading.edition,
+ kind=work.kind,
+ date=timeline_entry.date,
+ progress=timeline_entry.progress,
+ reviewed=reviewed,
+ yearPublished=work.year,
+ title=work.title,
+ readingYear=timeline_entry.date.year,
+ authors=[
+ ExportsReadingTimelineEntryAuthor(name=work_author.author().name)
+ for work_author in work.work_authors
+ ],
+ includedInSlugs=[
+ included_in_work.slug for included_in_work in work.included_in_works()
+ ],
+ )
+
+
+def export(
+ readings: Iterable[Reading],
+) -> None:
+ logger.log("==== Begin exporting {}...", "reading_timeline_entries")
+
+ json_progress = [
+ build_json_reading_progress(
+ reading=reading,
+ timeline_entry=timeline_entry,
+ )
+ for reading in readings
+ for timeline_entry in reading.timeline
+ ]
+
+ exporter.serialize_dicts(
+ sorted(json_progress, key=lambda progress: progress["sequence"], reverse=True),
+ "reading-timeline-entries",
+ )
diff --git a/booklog/data/exports/reviewed_works.py b/booklog/exports/reviewed_works.py
similarity index 61%
rename from booklog/data/exports/reviewed_works.py
rename to booklog/exports/reviewed_works.py
index 30a74a52..51622b79 100644
--- a/booklog/data/exports/reviewed_works.py
+++ b/booklog/exports/reviewed_works.py
@@ -1,15 +1,9 @@
import datetime
from typing import Optional, TypedDict
-from booklog.data.exports.utils import export_tools, list_tools
-from booklog.data.readings.api import Reading
-from booklog.data.reviews.api import Review
-from booklog.logger import logger
-
-JsonWorkAuthor = TypedDict(
- "JsonWorkAuthor",
- {"name": str, "sortName": str, "slug": str, "notes": Optional[str]},
-)
+from booklog.exports import exporter, json_work_author
+from booklog.repository import api as repository_api
+from booklog.utils.logging import logger
JsonTimelineEntry = TypedDict(
"JsonTimelineEntry",
@@ -41,7 +35,7 @@
"subtitle": Optional[str],
"sortTitle": str,
"yearPublished": str,
- "authors": list[JsonWorkAuthor],
+ "authors": list[json_work_author.JsonWorkAuthor],
"grade": str,
"gradeValue": int,
"kind": str,
@@ -52,7 +46,7 @@
)
-def build_json_reading(reading: Reading) -> JsonReading:
+def build_json_reading(reading: repository_api.Reading) -> JsonReading:
first_timeline_entry = sorted(reading.timeline, key=lambda entry: entry.date)[0]
last_timeline_entry = sorted(
@@ -79,63 +73,69 @@ def build_json_reading(reading: Reading) -> JsonReading:
def build_json_reviewed_work(
- readings: list[Reading],
- review: Review,
+ work: repository_api.Work,
+ readings_for_work: list[repository_api.Reading],
+ review: repository_api.Review,
+ all_authors: list[repository_api.Author],
+ all_works: list[repository_api.Work],
) -> JsonReviewedWork:
most_recent_reading = sorted(
- readings, key=lambda reading: reading.sequence, reverse=True
+ readings_for_work, key=lambda reading: reading.sequence, reverse=True
)[0]
return JsonReviewedWork(
sequence=most_recent_reading.sequence,
- slug=review.work.slug,
- title=review.work.title,
- subtitle=review.work.subtitle,
- sortTitle=review.work.sort_title,
- yearPublished=review.work.year,
+ slug=work.slug,
+ title=work.title,
+ subtitle=work.subtitle,
+ sortTitle=work.sort_title,
+ yearPublished=work.year,
grade=review.grade,
gradeValue=review.grade_value,
- kind=review.work.kind,
+ kind=work.kind,
date=review.date,
authors=[
- JsonWorkAuthor(
- name=work_author.name,
- sortName=work_author.sort_name,
- slug=work_author.slug,
- notes=work_author.notes,
+ json_work_author.build_json_work_author(
+ work_author=work_author, all_authors=all_authors
)
- for work_author in review.work.authors
+ for work_author in work.work_authors
+ ],
+ readings=[build_json_reading(reading) for reading in readings_for_work],
+ includedInSlugs=[
+ included_in_work.slug
+ for included_in_work in work.included_in_works(all_works)
],
- readings=[build_json_reading(reading) for reading in readings],
- includedInSlugs=review.work.included_in_work_slugs,
yearReviewed=review.date.year,
)
def export(
- readings: list[Reading],
- reviews: list[Review],
+ all_reviews: list[repository_api.Review],
+ all_works: list[repository_api.Work],
+ all_authors: list[repository_api.Author],
+ all_readings: list[repository_api.Reading],
) -> None:
logger.log("==== Begin exporting {}...", "reviewed_works")
json_reviewed_works = []
- readings_by_work_slug = list_tools.group_list_by_key(
- readings, lambda reading: reading.work.slug
- )
-
- for review in reviews:
- if not readings_by_work_slug[review.work.slug]:
+ for review in all_reviews:
+ work = review.work(all_works)
+ readings_for_work = list(work.readings(all_readings))
+ if not readings_for_work:
continue
json_reviewed_works.append(
build_json_reviewed_work(
- readings=readings_by_work_slug[review.work.slug],
+ work=work,
+ readings_for_work=readings_for_work,
review=review,
+ all_authors=all_authors,
+ all_works=all_works,
)
)
- export_tools.serialize_dicts_to_folder(
+ exporter.serialize_dicts_to_folder(
json_reviewed_works,
"reviewed_works",
filename_key=lambda work: work["slug"],
diff --git a/booklog/exports/unreviewed_works.py b/booklog/exports/unreviewed_works.py
new file mode 100644
index 00000000..4b18ff86
--- /dev/null
+++ b/booklog/exports/unreviewed_works.py
@@ -0,0 +1,51 @@
+from typing import Iterable, Optional, TypedDict
+
+from booklog.exports import exporter, json_work_author
+from booklog.repository.api import Work
+from booklog.utils.logging import logger
+
+ExportsUnreviewedWork = TypedDict(
+ "ExportsUnreviewedWork",
+ {
+ "slug": str,
+ "includedInSlugs": list[str],
+ "title": str,
+ "subtitle": Optional[str],
+ "sortTitle": str,
+ "yearPublished": str,
+ "authors": list[json_work_author.JsonWorkAuthor],
+ "kind": str,
+ },
+)
+
+
+def export(
+ works: Iterable[Work],
+) -> None:
+ logger.log("==== Begin exporting {}...", "unreviewed works")
+
+ json_unreviewed_works = [
+ ExportsUnreviewedWork(
+ slug=work.slug,
+ title=work.title,
+ subtitle=work.subtitle,
+ sortTitle=work.sort_title,
+ yearPublished=work.year,
+ kind=work.kind,
+ authors=[
+ json_work_author.build_json_work_author(work_author=work_author)
+ for work_author in work.work_authors
+ ],
+ includedInSlugs=[
+ included_in_work.slug for included_in_work in work.included_in_works()
+ ],
+ )
+ for work in works
+ if not work.review()
+ ]
+
+ exporter.serialize_dicts_to_folder(
+ json_unreviewed_works,
+ "unreviewed_works",
+ filename_key=lambda work: work["slug"],
+ )
diff --git a/booklog/repository/api.py b/booklog/repository/api.py
new file mode 100644
index 00000000..532d55a8
--- /dev/null
+++ b/booklog/repository/api.py
@@ -0,0 +1,268 @@
+from __future__ import annotations
+
+import datetime
+from dataclasses import dataclass
+from typing import Iterable, Optional
+
+from booklog.repository import json_authors, json_readings, json_works, markdown_reviews
+
+WORK_KINDS = json_works.KINDS
+
+WORK_KIND_TYPE = json_works.KIND_TYPE
+
+SequenceError = json_readings.SequenceError
+
+
+@dataclass
+class Author(object):
+ name: str
+ sort_name: str
+ slug: str
+
+ def works(self, cache: Optional[list[Work]] = None) -> Iterable[Work]:
+ works_iterable = cache or works()
+
+ return filter(
+ lambda work: self.slug
+ in {work_author.author_slug for work_author in work.work_authors},
+ works_iterable,
+ )
+
+
+@dataclass
+class WorkAuthor(object):
+ notes: Optional[str]
+ author_slug: str
+
+ def author(self, cache: Optional[list[Author]] = None) -> Author:
+ author_iterable = cache or authors()
+ return next(
+ author for author in author_iterable if author.slug == self.author_slug
+ )
+
+
+@dataclass
+class Work(object):
+ title: str
+ subtitle: Optional[str]
+ year: str
+ sort_title: str
+ slug: str
+ kind: json_works.KIND_TYPE
+ included_work_slugs: list[str]
+ work_authors: list[WorkAuthor]
+
+ def included_in_works(self, cache: Optional[list[Work]] = None) -> Iterable[Work]:
+ works_iterable = cache or works()
+ return filter(
+ lambda work: self.slug in work.included_work_slugs,
+ works_iterable,
+ )
+
+ def readings(self, cache: Optional[list[Reading]] = None) -> Iterable[Reading]:
+ readings_iterable = cache or readings()
+
+ for reading in readings_iterable:
+ if reading.work_slug == self.slug:
+ yield reading
+
+ def review(self, cache: Optional[list[Review]] = None) -> Optional[Review]:
+ reviews_iterable = cache or reviews()
+ return next(
+ (review for review in reviews_iterable if review.work_slug == self.slug),
+ None,
+ )
+
+
+@dataclass
+class TimelineEntry(object):
+ date: datetime.date
+ progress: str
+
+
+@dataclass(kw_only=True)
+class Reading(object):
+ sequence: int
+ edition: str
+ timeline: list[TimelineEntry]
+ edition_notes: Optional[str] = None
+ work_slug: str
+
+ def work(self, cache: Optional[list[Work]] = None) -> Work:
+ works_iterable = cache or works()
+ return next(work for work in works_iterable if work.slug == self.work_slug)
+
+
+@dataclass
+class Review(object):
+ work_slug: str
+ date: datetime.date
+ grade: str
+ review_content: Optional[str] = None
+
+ def work(self, cache: Optional[list[Work]] = None) -> Work:
+ works_iterable = cache or works()
+ return next(work for work in works_iterable if work.slug == self.work_slug)
+
+ @property
+ def grade_value(self) -> int:
+ if self.grade == "Abandoned":
+ return 0
+
+ value_modifier = 1
+
+ grade_map = {
+ "A": 12,
+ "B": 9,
+ "C": 6,
+ "D": 3,
+ }
+
+ grade_value = grade_map.get(self.grade[0], 1)
+ modifier = self.grade[-1]
+
+ if modifier == "+":
+ grade_value += value_modifier
+
+ if modifier == "-":
+ grade_value -= value_modifier
+
+ return grade_value
+
+
+def authors() -> Iterable[Author]:
+ for json_author in json_authors.read_all():
+ yield hydrate_json_author(json_author=json_author)
+
+
+def works() -> Iterable[Work]:
+ for json_work in json_works.read_all():
+ yield hydrate_json_work(json_work=json_work)
+
+
+def readings() -> Iterable[Reading]:
+ for json_reading in json_readings.read_all():
+ yield hydrate_json_reading(json_reading=json_reading)
+
+
+def reviews() -> Iterable[Review]:
+ for markdown_review in markdown_reviews.read_all():
+ yield hydrate_markdown_review(markdown_review=markdown_review)
+
+
+def reading_editions() -> set[str]:
+ return set([reading.edition for reading in readings()])
+
+
+def create_author(
+ name: str,
+) -> Author:
+ return hydrate_json_author(json_authors.create(name=name))
+
+
+def create_work( # noqa: WPS211
+ title: str,
+ subtitle: Optional[str],
+ year: str,
+ work_authors: list[WorkAuthor],
+ kind: json_works.KIND_TYPE,
+ included_work_slugs: Optional[list[str]] = None,
+) -> Work:
+ return hydrate_json_work(
+ json_work=json_works.create(
+ title=title,
+ subtitle=subtitle,
+ year=year,
+ work_authors=[
+ json_works.CreateWorkAuthor(
+ slug=work_author.author_slug, notes=work_author.notes
+ )
+ for work_author in work_authors
+ ],
+ kind=kind,
+ included_work_slugs=included_work_slugs,
+ ),
+ )
+
+
+def create_reading(
+ work: Work,
+ timeline: list[TimelineEntry],
+ edition: str,
+) -> Reading:
+ return hydrate_json_reading(
+ json_reading=json_readings.create(
+ work_slug=work.slug,
+ timeline=[
+ json_readings.JsonTimelineEntry(
+ date=datetime.date.isoformat(timeline_entry.date),
+ progress=timeline_entry.progress,
+ )
+ for timeline_entry in timeline
+ ],
+ edition=edition,
+ ),
+ )
+
+
+def create_or_update_review(
+ work: Work,
+ date: datetime.date,
+ grade: str = "Abandoned",
+) -> Review:
+ return hydrate_markdown_review(
+ markdown_review=markdown_reviews.create_or_update(
+ work_slug=work.slug,
+ date=date,
+ grade=grade,
+ )
+ )
+
+
+def hydrate_json_author(json_author: json_authors.JsonAuthor) -> Author:
+ return Author(
+ name=json_author["name"],
+ slug=json_author["slug"],
+ sort_name=json_author["sortName"],
+ )
+
+
+def hydrate_json_work(json_work: json_works.JsonWork) -> Work:
+ return Work(
+ title=json_work["title"],
+ subtitle=json_work["subtitle"],
+ sort_title=json_work["sortTitle"],
+ slug=json_work["slug"],
+ year=json_work["year"],
+ kind=json_work["kind"],
+ included_work_slugs=json_work["includedWorks"],
+ work_authors=[
+ WorkAuthor(author_slug=work_author["slug"], notes=work_author["notes"])
+ for work_author in json_work["authors"]
+ ],
+ )
+
+
+def hydrate_json_reading(json_reading: json_readings.JsonReading) -> Reading:
+ return Reading(
+ sequence=json_reading["sequence"],
+ timeline=[
+ TimelineEntry(
+ date=datetime.date.fromisoformat(json_timeline_entry["date"]),
+ progress=json_timeline_entry["progress"],
+ )
+ for json_timeline_entry in json_reading["timeline"]
+ ],
+ edition=json_reading["edition"],
+ edition_notes=json_reading["edition_notes"],
+ work_slug=json_reading["work_slug"],
+ )
+
+
+def hydrate_markdown_review(markdown_review: markdown_reviews.MarkdownReview) -> Review:
+ return Review(
+ work_slug=markdown_review.yaml["work_slug"],
+ date=markdown_review.yaml["date"],
+ grade=markdown_review.yaml["grade"],
+ review_content=markdown_review.review_content,
+ )
diff --git a/booklog/data/core/json_authors.py b/booklog/repository/json_authors.py
similarity index 52%
rename from booklog/data/core/json_authors.py
rename to booklog/repository/json_authors.py
index cfa25155..4ee052ee 100644
--- a/booklog/data/core/json_authors.py
+++ b/booklog/repository/json_authors.py
@@ -3,12 +3,12 @@
import json
import os
from glob import glob
-from typing import Any, TypedDict
+from typing import Iterable, TypedDict, cast
from slugify import slugify
-from booklog.data.utils import path_tools
-from booklog.logger import logger
+from booklog.utils import path_tools
+from booklog.utils.logging import logger
FOLDER_NAME = "authors"
@@ -31,41 +31,23 @@ def generate_sort_name(name: str) -> str:
return "{0}, {1}".format(last_name, " ".join(other_names))
-def author_for_slug(slug: str, all_json_authors: list[JsonAuthor]) -> JsonAuthor:
- return next(author for author in all_json_authors if author["slug"] == slug)
-
-
-def create(
- name: str,
-) -> JsonAuthor:
- slug = slugify(name)
-
- json_author = JsonAuthor(name=name, sortName=generate_sort_name(name), slug=slug)
+def create(name: str) -> JsonAuthor:
+ json_author = JsonAuthor(
+ name=name, sortName=generate_sort_name(name=name), slug=slugify(name)
+ )
serialize(json_author=json_author)
return json_author
-def deserialize_json_author(json_author: dict[str, Any]) -> JsonAuthor:
- return JsonAuthor(
- name=json_author["name"],
- sortName=json_author["sortName"],
- slug=json_author["slug"],
- )
-
-
-def deserialize_all() -> list[JsonAuthor]:
- authors: list[JsonAuthor] = []
-
+def read_all() -> Iterable[JsonAuthor]:
for file_path in glob(os.path.join(FOLDER_NAME, "*.json")):
with open(file_path, "r") as json_file:
- authors.append(deserialize_json_author(json.load(json_file)))
+ yield (cast(JsonAuthor, json.load(json_file)))
- return authors
-
-def serialize(json_author: JsonAuthor) -> None:
+def serialize(json_author: JsonAuthor) -> str:
file_path = os.path.join(FOLDER_NAME, "{0}.json".format(json_author["slug"]))
path_tools.ensure_file_path(file_path)
@@ -76,3 +58,5 @@ def serialize(json_author: JsonAuthor) -> None:
"Wrote {}.",
file_path,
)
+
+ return file_path
diff --git a/booklog/data/readings/json_readings.py b/booklog/repository/json_readings.py
similarity index 68%
rename from booklog/data/readings/json_readings.py
rename to booklog/repository/json_readings.py
index 4bd5c09a..9825ac1c 100644
--- a/booklog/data/readings/json_readings.py
+++ b/booklog/repository/json_readings.py
@@ -4,12 +4,12 @@
import os
import re
from glob import glob
-from typing import Any, Optional, Sequence, TypedDict
+from typing import Iterable, Optional, TypedDict, cast
from slugify import slugify
-from booklog.data.utils import path_tools
-from booklog.logger import logger
+from booklog.utils import path_tools
+from booklog.utils.logging import logger
FOLDER_NAME = "readings"
@@ -33,7 +33,7 @@ def create(
work_slug: str, timeline: list[JsonTimelineEntry], edition: str
) -> JsonReading:
json_reading = JsonReading(
- sequence=next_sequence(deserialize_all()),
+ sequence=next_sequence(),
work_slug=work_slug,
edition=edition,
timeline=timeline,
@@ -50,7 +50,8 @@ def __init__(self, message: str) -> None:
self.message = message
-def next_sequence(existing_instances: Sequence[JsonReading]) -> int:
+def next_sequence() -> int:
+ existing_instances = sorted(read_all(), key=lambda reading: reading["sequence"])
next_sequence_number = len(existing_instances) + 1
last_instance: Optional[JsonReading] = None
@@ -69,29 +70,10 @@ def next_sequence(existing_instances: Sequence[JsonReading]) -> int:
return next_sequence_number
-def deserialize_json_reading(json_reading: dict[str, Any]) -> JsonReading:
- return JsonReading(
- work_slug=json_reading["work_slug"],
- sequence=json_reading["sequence"],
- edition=json_reading["edition"],
- edition_notes=json_reading["edition_notes"],
- timeline=[
- JsonTimelineEntry(
- date=json_reading_entry["date"], progress=json_reading_entry["progress"]
- )
- for json_reading_entry in json_reading["timeline"]
- ],
- )
-
-
-def deserialize_all() -> list[JsonReading]:
- readings: list[JsonReading] = []
-
+def read_all() -> Iterable[JsonReading]:
for file_path in glob(os.path.join(FOLDER_NAME, "*.json")):
with open(file_path, "r") as json_file:
- readings.append(deserialize_json_reading(json.load(json_file)))
-
- return readings
+ yield cast(JsonReading, json.load(json_file))
def generate_file_path(json_reading: JsonReading) -> str:
diff --git a/booklog/data/core/json_works.py b/booklog/repository/json_works.py
similarity index 60%
rename from booklog/data/core/json_works.py
rename to booklog/repository/json_works.py
index ccb28548..aefffac4 100644
--- a/booklog/data/core/json_works.py
+++ b/booklog/repository/json_works.py
@@ -4,25 +4,24 @@
import os
from dataclasses import dataclass
from glob import glob
-from typing import Any, Optional, TypedDict
+from typing import Iterable, Literal, Optional, TypedDict, cast, get_args
from slugify import slugify
-from booklog.data.utils import path_tools
-from booklog.logger import logger
+from booklog.utils import path_tools
+from booklog.utils.logging import logger
FOLDER_NAME = "works"
-KINDS = set(
- [
- "Anthology",
- "Collection",
- "Nonfiction",
- "Novel",
- "Novella",
- "Short Story",
- ]
-)
+KIND_TYPE = Literal[
+ "Anthology",
+ "Collection",
+ "Nonfiction",
+ "Novel",
+ "Novella",
+ "Short Story",
+]
+KINDS = get_args(KIND_TYPE)
JsonWorkAuthor = TypedDict(
"JsonWorkAuthor",
@@ -41,34 +40,12 @@
"sortTitle": str,
"authors": list[JsonWorkAuthor],
"slug": str,
- "kind": str,
+ "kind": KIND_TYPE,
"includedWorks": list[str],
},
)
-def works_for_author_slug(
- slug: str,
- all_json_works: Optional[list[JsonWork]] = None,
-) -> list[JsonWork]:
- all_json_works = all_json_works or deserialize_all()
-
- return [
- work
- for work in all_json_works
- for author in work["authors"]
- if author["slug"] == slug
- ]
-
-
-def works_including_work_slug(
- slug: str, all_json_works: Optional[list[JsonWork]] = None
-) -> list[JsonWork]:
- all_json_works = all_json_works or deserialize_all()
-
- return [work for work in all_json_works if slug in work["includedWorks"]]
-
-
def generate_sort_title(title: str, subtitle: Optional[str]) -> str:
title_with_subtitle = title
@@ -97,7 +74,7 @@ def create( # noqa: WPS211
subtitle: Optional[str],
year: str,
work_authors: list[CreateWorkAuthor],
- kind: str,
+ kind: KIND_TYPE,
included_work_slugs: Optional[list[str]] = None,
) -> JsonWork:
slug = slugify(
@@ -126,32 +103,11 @@ def create( # noqa: WPS211
return json_work
-def deserialize_json_work(json_work: dict[str, Any]) -> JsonWork:
- return JsonWork(
- title=json_work["title"],
- subtitle=json_work["subtitle"],
- sortTitle=json_work["sortTitle"],
- year=json_work["year"],
- authors=[
- JsonWorkAuthor(
- slug=json_work_author["slug"], notes=json_work_author["notes"]
- )
- for json_work_author in json_work["authors"]
- ],
- slug=json_work["slug"],
- kind=json_work["kind"],
- includedWorks=json_work.get("includedWorks", []),
- )
-
-
-def deserialize_all() -> list[JsonWork]:
- works: list[JsonWork] = []
-
+def read_all() -> Iterable[JsonWork]:
+ print("here")
for file_path in glob(os.path.join(FOLDER_NAME, "*.json")):
with open(file_path, "r") as json_file:
- works.append(deserialize_json_work(json.load(json_file)))
-
- return works
+ yield (cast(JsonWork, json.load(json_file)))
def serialize(json_work: JsonWork) -> None:
diff --git a/booklog/data/reviews/markdown_reviews.py b/booklog/repository/markdown_reviews.py
similarity index 69%
rename from booklog/data/reviews/markdown_reviews.py
rename to booklog/repository/markdown_reviews.py
index 7afdb0a0..dbbc5dd6 100644
--- a/booklog/data/reviews/markdown_reviews.py
+++ b/booklog/repository/markdown_reviews.py
@@ -1,15 +1,16 @@
from __future__ import annotations
+import datetime
import os
import re
from dataclasses import dataclass
from glob import glob
-from typing import Any, Optional, TypedDict
+from typing import Any, Iterable, Optional, TypedDict, cast
import yaml
-from booklog.data.utils import path_tools
-from booklog.logger import logger
+from booklog.utils import path_tools
+from booklog.utils.logging import logger
FOLDER_NAME = "reviews"
@@ -32,18 +33,18 @@ class MarkdownReview(object):
class ReviewYaml(TypedDict):
work_slug: str
grade: str
- date: str
+ date: datetime.date
def create_or_update(
work_slug: str,
grade: str,
- date: str,
+ date: datetime.date,
) -> MarkdownReview:
markdown_review = next(
(
markdown_review
- for markdown_review in deserialize_all()
+ for markdown_review in read_all()
if markdown_review.yaml["work_slug"] == work_slug
),
None,
@@ -66,28 +67,14 @@ def create_or_update(
return markdown_review
-def deserialize(file_path: str) -> MarkdownReview:
- with open(file_path, "r") as review_file:
- _, frontmatter, review_content = FM_REGEX.split(review_file.read(), 2)
-
- review_yaml = yaml.safe_load(frontmatter)
-
- return MarkdownReview(
- yaml=ReviewYaml(
- work_slug=review_yaml["work_slug"],
- grade=review_yaml["grade"],
- date=review_yaml["date"],
- ),
- review_content=review_content,
- )
-
-
-def deserialize_all() -> list[MarkdownReview]:
- reviews: list[MarkdownReview] = []
- for review_file_path in glob(os.path.join(FOLDER_NAME, "*.md")):
- reviews.append(deserialize(review_file_path))
-
- return reviews
+def read_all() -> Iterable[MarkdownReview]:
+ for file_path in glob(os.path.join(FOLDER_NAME, "*.md")):
+ with open(file_path, "r") as review_file:
+ _, frontmatter, review_content = FM_REGEX.split(review_file.read(), 2)
+ yield MarkdownReview(
+ yaml=cast(ReviewYaml, yaml.safe_load(frontmatter)),
+ review_content=review_content,
+ )
def generate_file_path(markdown_review: MarkdownReview) -> str:
diff --git a/booklog/data/exports/__init__.py b/booklog/utils/__init__.py
similarity index 100%
rename from booklog/data/exports/__init__.py
rename to booklog/utils/__init__.py
diff --git a/booklog/logger.py b/booklog/utils/logging.py
similarity index 100%
rename from booklog/logger.py
rename to booklog/utils/logging.py
diff --git a/booklog/data/utils/path_tools.py b/booklog/utils/path_tools.py
similarity index 100%
rename from booklog/data/utils/path_tools.py
rename to booklog/utils/path_tools.py
diff --git a/booklog/data/utils/py.typed b/booklog/utils/py.typed
similarity index 100%
rename from booklog/data/utils/py.typed
rename to booklog/utils/py.typed
diff --git a/tests/cli/test_add_author.py b/tests/cli/test_add_author.py
index a3e62fde..25de12d9 100644
--- a/tests/cli/test_add_author.py
+++ b/tests/cli/test_add_author.py
@@ -10,7 +10,7 @@
@pytest.fixture
def mock_create_author(mocker: MockerFixture) -> MagicMock:
- return mocker.patch("booklog.cli.add_author.data_api.create_author")
+ return mocker.patch("booklog.cli.add_author.repository_api.create_author")
def test_calls_create_author(
diff --git a/tests/cli/test_add_reading.py b/tests/cli/test_add_reading.py
index d8f015b9..5ed89ab7 100644
--- a/tests/cli/test_add_reading.py
+++ b/tests/cli/test_add_reading.py
@@ -6,35 +6,39 @@
from pytest_mock import MockerFixture
from booklog.cli import add_reading
-from booklog.data import api as data_api
-from booklog.data.api import TimelineEntry
+from booklog.repository import api as repository_api
from tests.cli.conftest import MockInput
from tests.cli.keys import Backspace, Down, Enter
@pytest.fixture
def mock_create_reading(mocker: MockerFixture) -> MagicMock:
- return mocker.patch("booklog.cli.add_reading.data_api.create_reading")
+ return mocker.patch("booklog.cli.add_reading.repository_api.create_reading")
+
+
+@pytest.fixture
+def mock_create_review(mocker: MockerFixture) -> MagicMock:
+ return mocker.patch(
+ "booklog.cli.add_reading.repository_api.create_or_update_review"
+ )
@pytest.fixture(autouse=True)
-def created_author() -> data_api.Author:
- return data_api.create_author("Richard Laymon")
+def author_fixture() -> repository_api.Author:
+ return repository_api.create_author("Richard Laymon")
@pytest.fixture(autouse=True)
-def created_work(created_author: data_api.Author) -> data_api.Work:
- return data_api.create_work(
+def work_fixture(author_fixture: repository_api.Author) -> repository_api.Work:
+ return repository_api.create_work(
title="The Cellar",
subtitle=None,
year="1980",
kind="Novel",
included_work_slugs=[],
work_authors=[
- data_api.WorkAuthor(
- name=created_author.name,
- sort_name=created_author.sort_name,
- slug=created_author.slug,
+ repository_api.WorkAuthor(
+ author_slug=author_fixture.slug,
notes=None,
)
],
@@ -50,7 +54,9 @@ def stub_editions(mocker: MockerFixture) -> None:
"Paperback",
]
- mocker.patch("booklog.cli.add_reading.data_api.all_editions", return_value=editions)
+ mocker.patch(
+ "booklog.cli.add_reading.repository_api.reading_editions", return_value=editions
+ )
def clear_default_date() -> list[str]:
@@ -103,8 +109,11 @@ def enter_grade(grade: str) -> list[str]:
return [grade, Enter]
-def test_calls_add_reading(
- mock_input: MockInput, mock_create_reading: MagicMock, created_work: data_api.Work
+def test_calls_add_reading_and_add_review(
+ mock_input: MockInput,
+ mock_create_reading: MagicMock,
+ mock_create_review: MagicMock,
+ work_fixture: repository_api.Work,
) -> None:
mock_input(
[
@@ -131,12 +140,15 @@ def test_calls_add_reading(
add_reading.prompt()
mock_create_reading.assert_called_once_with(
- work=created_work,
+ work=work_fixture,
edition="Kindle",
timeline=[
- TimelineEntry(date=date(2016, 3, 10), progress="15%"),
- TimelineEntry(date=date(2016, 3, 11), progress="50%"),
- TimelineEntry(date=date(2016, 3, 12), progress="Finished"),
+ repository_api.TimelineEntry(date=date(2016, 3, 10), progress="15%"),
+ repository_api.TimelineEntry(date=date(2016, 3, 11), progress="50%"),
+ repository_api.TimelineEntry(date=date(2016, 3, 12), progress="Finished"),
],
- grade="A+",
+ )
+
+ mock_create_review.assert_called_once_with(
+ work=work_fixture, grade="A+", date=date(2016, 3, 12)
)
diff --git a/tests/cli/test_add_work.py b/tests/cli/test_add_work.py
index 9cf88c0b..8484b5d2 100644
--- a/tests/cli/test_add_work.py
+++ b/tests/cli/test_add_work.py
@@ -5,19 +5,19 @@
from pytest_mock import MockerFixture
from booklog.cli import add_work
-from booklog.data import api as data_api
+from booklog.repository import api as repository_api
from tests.cli.conftest import MockInput
from tests.cli.keys import Down, Enter, Escape
@pytest.fixture
def mock_create_work(mocker: MockerFixture) -> MagicMock:
- return mocker.patch("booklog.cli.add_work.data_api.create_work")
+ return mocker.patch("booklog.cli.add_work.repository_api.create_work")
@pytest.fixture(autouse=True)
-def created_author() -> data_api.Author:
- return data_api.create_author("Richard Laymon")
+def author_fixture() -> repository_api.Author:
+ return repository_api.create_author("Richard Laymon")
def enter_author(name: str = "Laymon") -> list[str]:
@@ -53,11 +53,13 @@ def enter_year_published(year: str) -> list[str]:
def test_calls_create_work(
- mock_input: MockInput, created_author: data_api.Author, mock_create_work: MagicMock
+ mock_input: MockInput,
+ author_fixture: repository_api.Author,
+ mock_create_work: MagicMock,
) -> None:
mock_input(
[
- *enter_author(created_author.name[:6]),
+ *enter_author(author_fixture.name[:6]),
*select_author_search_result(),
*enter_notes(),
"n",
@@ -75,11 +77,9 @@ def test_calls_create_work(
title="The Cellar",
subtitle=None,
work_authors=[
- data_api.WorkAuthor(
- name=created_author.name,
- sort_name=created_author.sort_name,
+ repository_api.WorkAuthor(
+ author_slug=author_fixture.slug,
notes=None,
- slug=created_author.slug,
)
],
year="1980",
diff --git a/tests/cli/test_main.py b/tests/cli/test_main.py
index 460abf59..627c8469 100644
--- a/tests/cli/test_main.py
+++ b/tests/cli/test_main.py
@@ -25,7 +25,7 @@ def mock_add_work(mocker: MockerFixture) -> MagicMock:
@pytest.fixture(autouse=True)
def mock_export_data(mocker: MockerFixture) -> MagicMock:
- return mocker.patch("booklog.cli.main.data_api.export_data")
+ return mocker.patch("booklog.cli.main.exports_api.export_data")
def test_calls_add_author(mock_input: MockInput, mock_add_author: MagicMock) -> None:
diff --git a/tests/conftest.py b/tests/conftest.py
index 43cbc6d4..5e65c0ce 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -11,7 +11,7 @@
def mock_exports_folder_name(mocker: MockerFixture, tmp_path: Path) -> None:
os.mkdir(tmp_path / "exports")
mocker.patch(
- "booklog.data.exports.utils.export_tools.EXPORT_FOLDER_NAME",
+ "booklog.exports.exporter.EXPORT_FOLDER_NAME",
tmp_path / "exports",
)
@@ -19,26 +19,24 @@ def mock_exports_folder_name(mocker: MockerFixture, tmp_path: Path) -> None:
@pytest.fixture(autouse=True)
def mock_readings_folder_name(mocker: MockerFixture, tmp_path: Path) -> None:
os.mkdir(tmp_path / "readings")
- mocker.patch(
- "booklog.data.readings.json_readings.FOLDER_NAME", tmp_path / "readings"
- )
+ mocker.patch("booklog.repository.json_readings.FOLDER_NAME", tmp_path / "readings")
@pytest.fixture(autouse=True)
def mock_reviews_folder_name(mocker: MockerFixture, tmp_path: Path) -> None:
os.mkdir(tmp_path / "reviews")
mocker.patch(
- "booklog.data.reviews.markdown_reviews.FOLDER_NAME", tmp_path / "reviews"
+ "booklog.repository.markdown_reviews.FOLDER_NAME", tmp_path / "reviews"
)
@pytest.fixture(autouse=True)
def mock_works_folder_name(mocker: MockerFixture, tmp_path: Path) -> None:
os.mkdir(tmp_path / "works")
- mocker.patch("booklog.data.core.json_works.FOLDER_NAME", tmp_path / "works")
+ mocker.patch("booklog.repository.json_works.FOLDER_NAME", tmp_path / "works")
@pytest.fixture(autouse=True)
def mock_authors_folder_name(mocker: MockerFixture, tmp_path: Path) -> None:
os.mkdir(tmp_path / "authors")
- mocker.patch("booklog.data.core.json_authors.FOLDER_NAME", tmp_path / "authors")
+ mocker.patch("booklog.repository.json_authors.FOLDER_NAME", tmp_path / "authors")
diff --git a/tests/data/__init__.py b/tests/data/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/tests/data/core/__init__.py b/tests/data/core/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/tests/data/core/test_api.py b/tests/data/core/test_api.py
deleted file mode 100644
index a9aaaefd..00000000
--- a/tests/data/core/test_api.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import json
-import os
-from pathlib import Path
-
-import pytest
-from syrupy.assertion import SnapshotAssertion
-from syrupy.extensions.json import JSONSnapshotExtension
-
-from booklog.data.core import api as core_api
-
-
-@pytest.fixture
-def snapshot_json(snapshot: SnapshotAssertion) -> SnapshotAssertion:
- return snapshot.with_defaults(extension_class=JSONSnapshotExtension)
-
-
-def test_create_author(tmp_path: Path, snapshot_json: SnapshotAssertion) -> None:
- core_api.create_author(name="Stephen King")
-
- with open(
- os.path.join(tmp_path / "authors", "stephen-king.json"),
- "r",
- ) as output_file:
- file_content = json.load(output_file)
-
- assert file_content == snapshot_json
-
-
-def test_create_create_work(tmp_path: Path, snapshot_json: SnapshotAssertion) -> None:
- author = core_api.create_author("Richard Laymon")
-
- core_api.create_work(
- title="The Cellar",
- subtitle=None,
- year="1980",
- work_authors=[
- core_api.WorkAuthor(
- author.name,
- author.sort_name,
- author.slug,
- notes=None,
- )
- ],
- kind="Novel",
- )
-
- with open(
- os.path.join(tmp_path / "works", "the-cellar-by-richard-laymon.json"),
- "r",
- ) as output_file:
- file_content = json.load(output_file)
-
- assert file_content == snapshot_json
diff --git a/tests/data/exports/__init__.py b/tests/data/exports/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/tests/data/exports/__snapshots__/test_api/test_exports_reviewed_works.json b/tests/data/exports/__snapshots__/test_api/test_exports_reviewed_works.json
deleted file mode 100644
index a10e4f2d..00000000
--- a/tests/data/exports/__snapshots__/test_api/test_exports_reviewed_works.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- "authors": [
- {
- "name": "Stephen King",
- "notes": null,
- "slug": "stephen-king",
- "sortName": "King, Stephen"
- }
- ],
- "date": "2016-03-10 00:00:00",
- "grade": "A+",
- "gradeValue": 13,
- "includedInSlugs": [],
- "kind": "Nonfiction",
- "sequence": 1,
- "slug": "on-writing-by-stephen-king",
- "sortTitle": "On Writing: A Memoir of the Craft",
- "title": "On Writing",
- "yearPublished": "2000",
- "yearReviewed": 2016
-}
diff --git a/tests/data/exports/test_api.py b/tests/data/exports/test_api.py
deleted file mode 100644
index 7724c650..00000000
--- a/tests/data/exports/test_api.py
+++ /dev/null
@@ -1,67 +0,0 @@
-import json
-import os
-from datetime import date
-from pathlib import Path
-
-import pytest
-from syrupy.assertion import SnapshotAssertion
-from syrupy.extensions.json import JSONSnapshotExtension
-
-from booklog.data.core import api as core_api
-from booklog.data.exports import api as exports_api
-from booklog.data.readings import api as readings_api
-from booklog.data.reviews import api as reviews_api
-
-
-@pytest.fixture(autouse=True)
-def init_data() -> None:
- author = core_api.create_author("Stephen King")
- work = core_api.create_work(
- title="On Writing",
- subtitle="A Memoir of the Craft",
- year="2000",
- work_authors=[
- core_api.WorkAuthor(
- slug=author.slug,
- name=author.name,
- sort_name=author.sort_name,
- notes=None,
- )
- ],
- kind="Nonfiction",
- )
- readings_api.create_reading(
- work=work,
- edition="Kindle",
- timeline=[
- readings_api.TimelineEntry(date=date(2016, 3, 10), progress="15%"),
- readings_api.TimelineEntry(date=date(2016, 3, 11), progress="50%"),
- readings_api.TimelineEntry(date=date(2016, 3, 12), progress="Finished"),
- ],
- )
- reviews_api.create_or_update(
- work=work,
- grade="A+",
- date=date(2016, 3, 10),
- )
-
-
-@pytest.fixture
-def snapshot_json(snapshot: SnapshotAssertion) -> SnapshotAssertion:
- return snapshot.with_defaults(extension_class=JSONSnapshotExtension)
-
-
-def test_exports_reviewed_works(
- tmp_path: Path, snapshot_json: SnapshotAssertion
-) -> None:
- exports_api.export_data()
-
- with open(
- os.path.join(
- tmp_path / "exports" / "reviewed_works", "on-writing-by-stephen-king.json"
- ),
- "r",
- ) as output_file:
- file_content = json.load(output_file)
-
- assert file_content == snapshot_json
diff --git a/tests/data/readings/test_api.py b/tests/data/readings/test_api.py
deleted file mode 100644
index de59e0b8..00000000
--- a/tests/data/readings/test_api.py
+++ /dev/null
@@ -1,107 +0,0 @@
-import json
-import os
-from datetime import date
-from pathlib import Path
-
-import pytest
-from syrupy.assertion import SnapshotAssertion
-from syrupy.extensions.json import JSONSnapshotExtension
-
-from booklog.data.core import api as core_api
-from booklog.data.readings import api as readings_api
-
-
-@pytest.fixture
-def created_author() -> core_api.Author:
- return core_api.create_author("Richard Laymon")
-
-
-@pytest.fixture
-def created_work(created_author: core_api.Author) -> core_api.Work:
- return core_api.create_work(
- title="The Cellar",
- subtitle=None,
- year="1980",
- kind="Novel",
- included_work_slugs=[],
- work_authors=[
- core_api.WorkAuthor(
- name=created_author.name,
- sort_name=created_author.sort_name,
- slug=created_author.slug,
- notes=None,
- )
- ],
- )
-
-
-@pytest.fixture
-def snapshot_json(snapshot: SnapshotAssertion) -> SnapshotAssertion:
- return snapshot.with_defaults(extension_class=JSONSnapshotExtension)
-
-
-def test_can_create_reading(
- tmp_path: Path, created_work: core_api.Work, snapshot_json: SnapshotAssertion
-) -> None:
- readings_api.create_reading(
- work=created_work,
- edition="Kindle",
- timeline=[
- readings_api.TimelineEntry(date=date(2016, 3, 10), progress="15%"),
- readings_api.TimelineEntry(date=date(2016, 3, 11), progress="50%"),
- readings_api.TimelineEntry(date=date(2016, 3, 12), progress="Finished"),
- ],
- )
-
- with open(
- os.path.join(tmp_path / "readings", "0001-the-cellar-by-richard-laymon.json"),
- "r",
- ) as output_file:
- file_content = output_file.read()
-
- assert file_content == snapshot_json
-
-
-def test_create_raises_error_if_sequence_out_of_sync(
- tmp_path: Path, created_work: core_api.Work
-) -> None:
- existing_reading = json.dumps(
- {
- "sequence": 3,
- "work_slug": "on-writing-by-stephen-king",
- "edition": "Kindle",
- "timeline": [
- {
- "date": "2016-03-10",
- "progress": "15%",
- },
- {
- "date": "2016-03-11",
- "progress": "50%",
- },
- {
- "date": "2016-03-12",
- "progress": "Finished",
- },
- ],
- "edition_notes": None,
- },
- indent=4,
- )
-
- with open(
- os.path.join(tmp_path / "readings", "0003-on-writing-by-stephen-king.json"),
- "w",
- ) as output_file:
- output_file.write(existing_reading)
-
- with pytest.raises(readings_api.SequenceError):
- readings_api.create_reading(
- work=created_work,
- edition="Kindle",
- timeline=[
- readings_api.TimelineEntry(date=date(2016, 3, 10), progress="15%"),
- readings_api.TimelineEntry(date=date(2016, 3, 11), progress="50%"),
- readings_api.TimelineEntry(date=date(2016, 3, 12), progress="Finished"),
- ],
- )
diff --git a/tests/data/reviews/__init__.py b/tests/data/reviews/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/tests/data/reviews/test_api.py b/tests/data/reviews/test_api.py
deleted file mode 100644
index 0fcb6712..00000000
--- a/tests/data/reviews/test_api.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import os
-from datetime import date
-from pathlib import Path
-
-import pytest
-from syrupy.assertion import SnapshotAssertion
-
-from booklog.data.core import api as core_api
-from booklog.data.reviews import api as reviews_api
-
-
-@pytest.fixture
-def created_author() -> core_api.Author:
- return core_api.create_author("Richard Laymon")
-
-
-@pytest.fixture
-def created_work(created_author: core_api.Author) -> core_api.Work:
- return core_api.create_work(
- title="The Cellar",
- subtitle=None,
- year="1980",
- kind="Novel",
- included_work_slugs=[],
- work_authors=[
- core_api.WorkAuthor(
- name=created_author.name,
- sort_name=created_author.sort_name,
- slug=created_author.slug,
- notes=None,
- )
- ],
- )
-
-
-def test_can_create_new_review(
- tmp_path: Path, created_work: core_api.Work, snapshot: SnapshotAssertion
-) -> None:
- reviews_api.create_or_update(
- work=created_work,
- grade="A+",
- date=date(2016, 3, 10),
- )
-
- with open(
- os.path.join(tmp_path / "reviews", "the-cellar-by-richard-laymon.md"), "r"
- ) as output_file:
- file_content = output_file.read()
-
- assert file_content == snapshot
-
-
-def test_can_update_existing_review(
- tmp_path: Path, created_work: core_api.Work, snapshot: SnapshotAssertion
-) -> None:
- existing_review = "---\nwork_slug: the-cellar-by-richard-laymon\ngrade: A+\ndate: 2016-03-10\n---\n\nSome review content we want to preserve between updates." # noqa: 501
-
- with open(
- os.path.join(tmp_path / "reviews", "the-cellar-by-richard-laymon.md"),
- "w",
- ) as first_output_file:
- first_output_file.write(existing_review)
-
- reviews_api.create_or_update(
- work=created_work,
- grade="C+",
- date=date(2017, 3, 12),
- )
-
- with open(
- os.path.join(tmp_path / "reviews", "the-cellar-by-richard-laymon.md"), "r"
- ) as output_file:
- file_content = output_file.read()
-
- assert file_content == snapshot
diff --git a/booklog/data/exports/utils/__init__.py b/tests/exports/__init__.py
similarity index 100%
rename from booklog/data/exports/utils/__init__.py
rename to tests/exports/__init__.py
diff --git a/tests/exports/__snapshots__/test_api/test_exports_authors.json b/tests/exports/__snapshots__/test_api/test_exports_authors.json
new file mode 100644
index 00000000..10f63117
--- /dev/null
+++ b/tests/exports/__snapshots__/test_api/test_exports_authors.json
@@ -0,0 +1,47 @@
+{
+ "name": "Stephen King",
+ "reviewedWorkCount": 1,
+ "slug": "stephen-king",
+ "sortName": "King, Stephen",
+ "workCount": 2,
+ "works": [
+ {
+ "authors": [
+ {
+ "name": "Stephen King",
+ "notes": null,
+ "slug": "stephen-king",
+ "sortName": "King, Stephen"
+ }
+ ],
+ "grade": "A+",
+ "gradeValue": 13,
+ "includedInSlugs": [],
+ "kind": "Nonfiction",
+ "reviewed": true,
+ "slug": "on-writing-by-stephen-king",
+ "sortTitle": "On Writing: A Memoir of the Craft",
+ "title": "On Writing",
+ "yearPublished": "2000"
+ },
+ {
+ "authors": [
+ {
+ "name": "Stephen King",
+ "notes": null,
+ "slug": "stephen-king",
+ "sortName": "King, Stephen"
+ }
+ ],
+ "grade": null,
+ "gradeValue": null,
+ "includedInSlugs": [],
+ "kind": "Novel",
+ "reviewed": false,
+ "slug": "the-stand-by-stephen-king",
+ "sortTitle": "Stand",
+ "title": "The Stand",
+ "yearPublished": "1978"
+ }
+ ]
+}
diff --git a/tests/exports/__snapshots__/test_api/test_exports_reading_timeline_entries.json b/tests/exports/__snapshots__/test_api/test_exports_reading_timeline_entries.json
new file mode 100644
index 00000000..8a4828b2
--- /dev/null
+++ b/tests/exports/__snapshots__/test_api/test_exports_reading_timeline_entries.json
@@ -0,0 +1,56 @@
+[
+ {
+ "authors": [
+ {
+ "name": "Stephen King"
+ }
+ ],
+ "date": "2016-03-12",
+ "edition": "Kindle",
+ "includedInSlugs": [],
+ "kind": "Nonfiction",
+ "progress": "Finished",
+ "readingYear": 2016,
+ "reviewed": true,
+ "sequence": "2016-03-12-1",
+ "slug": "on-writing-by-stephen-king",
+ "title": "On Writing",
+ "yearPublished": "2000"
+ },
+ {
+ "authors": [
+ {
+ "name": "Stephen King"
+ }
+ ],
+ "date": "2016-03-11",
+ "edition": "Kindle",
+ "includedInSlugs": [],
+ "kind": "Nonfiction",
+ "progress": "50%",
+ "readingYear": 2016,
+ "reviewed": true,
+ "sequence": "2016-03-11-1",
+ "slug": "on-writing-by-stephen-king",
+ "title": "On Writing",
+ "yearPublished": "2000"
+ },
+ {
+ "authors": [
+ {
+ "name": "Stephen King"
+ }
+ ],
+ "date": "2016-03-10",
+ "edition": "Kindle",
+ "includedInSlugs": [],
+ "kind": "Nonfiction",
+ "progress": "15%",
+ "readingYear": 2016,
+ "reviewed": true,
+ "sequence": "2016-03-10-1",
+ "slug": "on-writing-by-stephen-king",
+ "title": "On Writing",
+ "yearPublished": "2000"
+ }
+]
diff --git a/tests/exports/__snapshots__/test_api/test_exports_reviewed_works.json b/tests/exports/__snapshots__/test_api/test_exports_reviewed_works.json
new file mode 100644
index 00000000..c1ab3742
--- /dev/null
+++ b/tests/exports/__snapshots__/test_api/test_exports_reviewed_works.json
@@ -0,0 +1,47 @@
+{
+ "authors": [
+ {
+ "name": "Stephen King",
+ "notes": null,
+ "slug": "stephen-king",
+ "sortName": "King, Stephen"
+ }
+ ],
+ "date": "2016-03-10",
+ "grade": "A+",
+ "gradeValue": 13,
+ "includedInSlugs": [],
+ "kind": "Nonfiction",
+ "readings": [
+ {
+ "abandoned": false,
+ "date": "2016-03-12",
+ "edition": "Kindle",
+ "editionNotes": null,
+ "isAudioBook": false,
+ "readingTime": 3,
+ "sequence": 1,
+ "timeline": [
+ {
+ "date": "2016-03-10",
+ "progress": "15%"
+ },
+ {
+ "date": "2016-03-11",
+ "progress": "50%"
+ },
+ {
+ "date": "2016-03-12",
+ "progress": "Finished"
+ }
+ ]
+ }
+ ],
+ "sequence": 1,
+ "slug": "on-writing-by-stephen-king",
+ "sortTitle": "On Writing: A Memoir of the Craft",
+ "subtitle": "A Memoir of the Craft",
+ "title": "On Writing",
+ "yearPublished": "2000",
+ "yearReviewed": 2016
+}
diff --git a/tests/exports/__snapshots__/test_api/test_exports_unreviewed_works.json b/tests/exports/__snapshots__/test_api/test_exports_unreviewed_works.json
new file mode 100644
index 00000000..a157478b
--- /dev/null
+++ b/tests/exports/__snapshots__/test_api/test_exports_unreviewed_works.json
@@ -0,0 +1,17 @@
+{
+ "authors": [
+ {
+ "name": "Stephen King",
+ "notes": null,
+ "slug": "stephen-king",
+ "sortName": "King, Stephen"
+ }
+ ],
+ "includedInSlugs": [],
+ "kind": "Novel",
+ "slug": "the-stand-by-stephen-king",
+ "sortTitle": "Stand",
+ "subtitle": null,
+ "title": "The Stand",
+ "yearPublished": "1978"
+}
diff --git a/tests/exports/test_api.py b/tests/exports/test_api.py
new file mode 100644
index 00000000..6695938c
--- /dev/null
+++ b/tests/exports/test_api.py
@@ -0,0 +1,120 @@
+import json
+import os
+from datetime import date
+from pathlib import Path
+
+import pytest
+from syrupy.assertion import SnapshotAssertion
+from syrupy.extensions.json import JSONSnapshotExtension
+
+from booklog.exports import api as exports_api
+from booklog.repository import api as repository_api
+
+
+@pytest.fixture(autouse=True)
+def init_data() -> None:
+ author = repository_api.create_author("Stephen King")
+
+ work = repository_api.create_work(
+ title="On Writing",
+ subtitle="A Memoir of the Craft",
+ year="2000",
+ work_authors=[
+ repository_api.WorkAuthor(
+ author_slug=author.slug,
+ notes=None,
+ )
+ ],
+ kind="Nonfiction",
+ )
+
+ repository_api.create_work(
+ title="The Stand",
+ subtitle=None,
+ year="1978",
+ work_authors=[
+ repository_api.WorkAuthor(
+ author_slug=author.slug,
+ notes=None,
+ )
+ ],
+ kind="Novel",
+ )
+
+ repository_api.create_reading(
+ work=work,
+ edition="Kindle",
+ timeline=[
+ repository_api.TimelineEntry(date=date(2016, 3, 10), progress="15%"),
+ repository_api.TimelineEntry(date=date(2016, 3, 11), progress="50%"),
+ repository_api.TimelineEntry(date=date(2016, 3, 12), progress="Finished"),
+ ],
+ )
+ repository_api.create_or_update_review(
+ work=work,
+ grade="A+",
+ date=date(2016, 3, 10),
+ )
+
+
+@pytest.fixture
+def snapshot_json(snapshot: SnapshotAssertion) -> SnapshotAssertion:
+ return snapshot.with_defaults(extension_class=JSONSnapshotExtension)
+
+
+def test_exports_authors(tmp_path: Path, snapshot_json: SnapshotAssertion) -> None:
+ exports_api.export_data()
+
+ with open(
+ os.path.join(tmp_path / "exports" / "authors", "stephen-king.json"),
+ "r",
+ ) as output_file:
+ file_content = json.load(output_file)
+
+ assert file_content == snapshot_json
+
+
+def test_exports_reviewed_works(
+ tmp_path: Path, snapshot_json: SnapshotAssertion
+) -> None:
+ exports_api.export_data()
+
+ with open(
+ os.path.join(
+ tmp_path / "exports" / "reviewed_works", "on-writing-by-stephen-king.json"
+ ),
+ "r",
+ ) as output_file:
+ file_content = json.load(output_file)
+
+ assert file_content == snapshot_json
+
+
+def test_exports_unreviewed_works(
+ tmp_path: Path, snapshot_json: SnapshotAssertion
+) -> None:
+ exports_api.export_data()
+
+ with open(
+ os.path.join(
+ tmp_path / "exports" / "unreviewed_works", "the-stand-by-stephen-king.json"
+ ),
+ "r",
+ ) as output_file:
+ file_content = json.load(output_file)
+
+ assert file_content == snapshot_json
+
+
+def test_exports_reading_timeline_entries(
+ tmp_path: Path, snapshot_json: SnapshotAssertion
+) -> None:
+ exports_api.export_data()
+
+ with open(
+ os.path.join(tmp_path / "exports", "reading-timeline-entries.json"),
+ "r",
+ ) as output_file:
+ file_content = json.load(output_file)
+
+ assert file_content == snapshot_json
diff --git a/tests/data/exports/test_format_tools.py b/tests/exports/test_exporter.py
similarity index 59%
rename from tests/data/exports/test_format_tools.py
rename to tests/exports/test_exporter.py
index 21b9e915..6120c892 100644
--- a/tests/data/exports/test_format_tools.py
+++ b/tests/exports/test_exporter.py
@@ -1,13 +1,6 @@
import pytest
-from booklog.data.exports.utils import format_tools
-
-
-@pytest.mark.parametrize("test_input, expected", [(1000, "1,000"), (100, "100")])
-def test_humanize_int_adds_comma_when_appropriate(
- test_input: int, expected: str
-) -> None:
- assert format_tools.humanize_int(test_input) == expected
+from booklog.exports import exporter
@pytest.mark.parametrize(
@@ -28,4 +21,4 @@ def test_humanize_int_adds_comma_when_appropriate(
def test_pretty_file_size_returns_humanized_sizes(
test_input: float, expected: str
) -> None:
- assert format_tools.pretty_file_size(test_input) == expected
+ assert exporter.pretty_file_size(test_input) == expected
diff --git a/booklog/data/readings/__init__.py b/tests/repository/__init__.py
similarity index 100%
rename from booklog/data/readings/__init__.py
rename to tests/repository/__init__.py
diff --git a/tests/data/reviews/__snapshots__/test_api.ambr b/tests/repository/__snapshots__/test_api.ambr
similarity index 88%
rename from tests/data/reviews/__snapshots__/test_api.ambr
rename to tests/repository/__snapshots__/test_api.ambr
index de98b3d1..e4f31647 100644
--- a/tests/data/reviews/__snapshots__/test_api.ambr
+++ b/tests/repository/__snapshots__/test_api.ambr
@@ -4,7 +4,7 @@
---
work_slug: the-cellar-by-richard-laymon
grade: A+
- date: '2016-03-10'
+ date: 2016-03-10
---
@@ -15,7 +15,7 @@
---
work_slug: the-cellar-by-richard-laymon
grade: C+
- date: '2017-03-12'
+ date: 2017-03-12
---
Some review content we want to preserve between updates.
diff --git a/tests/data/readings/__snapshots__/test_api/test_can_create_reading.json b/tests/repository/__snapshots__/test_api/test_can_create_reading.json
similarity index 100%
rename from tests/data/readings/__snapshots__/test_api/test_can_create_reading.json
rename to tests/repository/__snapshots__/test_api/test_can_create_reading.json
diff --git a/tests/data/core/__snapshots__/test_api/test_create_author.json b/tests/repository/__snapshots__/test_api/test_create_author.json
similarity index 100%
rename from tests/data/core/__snapshots__/test_api/test_create_author.json
rename to tests/repository/__snapshots__/test_api/test_create_author.json
diff --git a/tests/data/core/__snapshots__/test_api/test_create_create_work.json b/tests/repository/__snapshots__/test_api/test_create_create_work.json
similarity index 100%
rename from tests/data/core/__snapshots__/test_api/test_create_create_work.json
rename to tests/repository/__snapshots__/test_api/test_create_create_work.json
diff --git a/tests/repository/test_api.py b/tests/repository/test_api.py
new file mode 100644
index 00000000..15a86ce1
--- /dev/null
+++ b/tests/repository/test_api.py
@@ -0,0 +1,197 @@
+import datetime
+import json
+import os
+from pathlib import Path
+
+import pytest
+from syrupy.assertion import SnapshotAssertion
+from syrupy.extensions.json import JSONSnapshotExtension
+
+from booklog.repository import api as repository_api
+
+
+@pytest.fixture
+def snapshot_json(snapshot: SnapshotAssertion) -> SnapshotAssertion:
+ return snapshot.with_defaults(extension_class=JSONSnapshotExtension)
+
+
+@pytest.fixture
+def author_fixture() -> repository_api.Author:
+ return repository_api.create_author("Richard Laymon")
+
+
+@pytest.fixture
+def work_fixture(author_fixture: repository_api.Author) -> repository_api.Work:
+ return repository_api.create_work(
+ title="The Cellar",
+ subtitle=None,
+ year="1980",
+ kind="Novel",
+ included_work_slugs=[],
+ work_authors=[
+ repository_api.WorkAuthor(
+ author_slug=author_fixture.slug,
+ notes=None,
+ )
+ ],
+ )
+
+
+def test_create_author(tmp_path: Path, snapshot_json: SnapshotAssertion) -> None:
+ repository_api.create_author(name="Stephen King")
+
+ with open(
+ os.path.join(tmp_path / "authors", "stephen-king.json"),
+ "r",
+ ) as output_file:
+ file_content = json.load(output_file)
+
+ assert file_content == snapshot_json
+
+
+def test_create_create_work(
+ author_fixture: repository_api.Author,
+ tmp_path: Path,
+ snapshot_json: SnapshotAssertion,
+) -> None:
+ repository_api.create_work(
+ title="The Cellar",
+ subtitle=None,
+ year="1980",
+ work_authors=[
+ repository_api.WorkAuthor(
+ author_slug=author_fixture.slug,
+ notes=None,
+ )
+ ],
+ kind="Novel",
+ )
+
+ with open(
+ os.path.join(tmp_path / "works", "the-cellar-by-richard-laymon.json"),
+ "r",
+ ) as output_file:
+ file_content = json.load(output_file)
+
+ assert file_content == snapshot_json
+
+
+def test_can_create_reading(
+ tmp_path: Path, work_fixture: repository_api.Work, snapshot_json: SnapshotAssertion
+) -> None:
+ repository_api.create_reading(
+ work=work_fixture,
+ edition="Kindle",
+ timeline=[
+ repository_api.TimelineEntry(
+ date=datetime.date(2016, 3, 10), progress="15%"
+ ),
+ repository_api.TimelineEntry(
+ date=datetime.date(2016, 3, 11), progress="50%"
+ ),
+ repository_api.TimelineEntry(
+ date=datetime.date(2016, 3, 12), progress="Finished"
+ ),
+ ],
+ )
+
+ with open(
+ os.path.join(tmp_path / "readings", "0001-the-cellar-by-richard-laymon.json"),
+ "r",
+ ) as output_file:
+ file_content = output_file.read()
+
+ assert file_content == snapshot_json
+
+
+def test_create_raises_error_if_sequence_out_of_sync(
+ tmp_path: Path, work_fixture: repository_api.Work
+) -> None:
+ existing_reading = json.dumps(
+ {
+ "sequence": 3,
+ "work_slug": "on-writing-by-stephen-king",
+ "edition": "Kindle",
+ "timeline": [
+ {
+ "date": "2016-03-10",
+ "progress": "15%",
+ },
+ {
+ "date": "2016-03-11",
+ "progress": "50%",
+ },
+ {
+ "date": "2016-03-12",
+ "progress": "Finished",
+ },
+ ],
+ "edition_notes": None,
+ },
+ indent=4,
+ )
+
+ with open(
+ os.path.join(tmp_path / "readings", "0003-on-writing-by-stephen-king.json"),
+ "w",
+ ) as output_file:
+ output_file.write(existing_reading)
+
+ with pytest.raises(repository_api.SequenceError):
+ repository_api.create_reading(
+ work=work_fixture,
+ edition="Kindle",
+ timeline=[
+ repository_api.TimelineEntry(
+ date=datetime.date(2016, 3, 10), progress="15%"
+ ),
+ repository_api.TimelineEntry(
+ date=datetime.date(2016, 3, 11), progress="50%"
+ ),
+ repository_api.TimelineEntry(
+ date=datetime.date(2016, 3, 12), progress="Finished"
+ ),
+ ],
+ )
+
+
+def test_can_create_new_review(
+ tmp_path: Path, work_fixture: repository_api.Work, snapshot: SnapshotAssertion
+) -> None:
+ repository_api.create_or_update_review(
+ work=work_fixture,
+ grade="A+",
+ date=datetime.date(2016, 3, 10),
+ )
+
+ with open(
+ os.path.join(tmp_path / "reviews", "the-cellar-by-richard-laymon.md"), "r"
+ ) as output_file:
+ file_content = output_file.read()
+
+ assert file_content == snapshot
+
+
+def test_can_update_existing_review(
+ tmp_path: Path, work_fixture: repository_api.Work, snapshot: SnapshotAssertion
+) -> None:
+ existing_review = "---\nwork_slug: the-cellar-by-richard-laymon\ngrade: A+\ndate: 2016-03-10\n---\n\nSome review content we want to preserve between updates." # noqa: 501
+
+ with open(
+ os.path.join(tmp_path / "reviews", "the-cellar-by-richard-laymon.md"),
+ "w",
+ ) as first_output_file:
+ first_output_file.write(existing_review)
+
+ repository_api.create_or_update_review(
+ work=work_fixture,
+ grade="C+",
+ date=datetime.date(2017, 3, 12),
+ )
+
+ with open(
+ os.path.join(tmp_path / "reviews", "the-cellar-by-richard-laymon.md"), "r"
+ ) as output_file:
+ file_content = output_file.read()
+
+ assert file_content == snapshot
diff --git a/booklog/data/reviews/__init__.py b/tests/utils/__init__.py
similarity index 100%
rename from booklog/data/reviews/__init__.py
rename to tests/utils/__init__.py
diff --git a/tests/test_logger.py b/tests/utils/test_logging.py
similarity index 95%
rename from tests/test_logger.py
rename to tests/utils/test_logging.py
index 58756cf4..2eaaa91b 100644
--- a/tests/test_logger.py
+++ b/tests/utils/test_logging.py
@@ -1,6 +1,6 @@
from pytest_mock import MockerFixture
-from booklog.logger import logger
+from booklog.utils.logging import logger
def test_formats_headers_with_yellow(mocker: MockerFixture) -> None: