Skip to content

Commit

Permalink
🔧 Consolidate needs data post-processing (#1039)
Browse files Browse the repository at this point in the history
This commit is a step towards being able to generate the needs.json without running the Sphinx `BUILD` phase.

Here we consolidate all functions that post-process the needs data, after it has been fully extracted from all documents and external sources.
We remove the individual logic from within these functions, to check if the post-processing has already been done, and instead check before running all the functions.

This refactor does not actually change the sphinx-needs process in any way, this will come later.
  • Loading branch information
chrisjsewell authored and iSOLveIT committed Oct 23, 2023
1 parent c01558e commit 5c32994
Show file tree
Hide file tree
Showing 7 changed files with 197 additions and 268 deletions.
2 changes: 1 addition & 1 deletion docs/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@ The following is an outline of the build events which this extension adds to the
- Check for dead links (``process_need_nodes -> check_links``)
- Generate back links (``process_need_nodes -> create_back_links``)
- Process constraints, for each ``Need`` node (``process_need_nodes -> process_constraints``)
- Perform all modifications on need data items, due to ``Needextend`` nodes (``process_need_nodes -> process_needextend``)
- Perform all modifications on need data items, due to ``Needextend`` nodes (``process_need_nodes -> extend_needs_data``)
- Format each ``Need`` node to give the desired visual output (``process_need_nodes -> print_need_nodes``)
- Process all other need specific nodes, replacing them with the desired visual output (``process_creator``)

Expand Down
46 changes: 10 additions & 36 deletions sphinx_needs/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,23 +30,6 @@ class NeedsFilterType(TypedDict):
amount: int


class NeedsWorkflowType(TypedDict):
"""
Used to store workflow status information for already executed tasks.
Some tasks like backlink_creation need be performed only once.
But most sphinx-events get called several times (for each single document file),
which would also execute our code several times...
"""

backlink_creation_links: bool
dynamic_values_resolved: bool
links_checked: bool
add_sections: bool
variant_option_resolved: bool
needs_extended: bool
needs_constraints: bool


class NeedsBaseDataType(TypedDict):
"""A base type for all data."""

Expand Down Expand Up @@ -448,27 +431,18 @@ def get_or_create_docs(self) -> dict[str, list[str]]:
self.env.needs_all_docs = {"all": []}
return self.env.needs_all_docs

def get_or_create_workflow(self) -> NeedsWorkflowType:
"""Get workflow information.
This is lazily created and cached in the environment.
"""
@property
def needs_is_post_processed(self) -> bool:
"""Whether needs have been post-processed."""
try:
return self.env.needs_workflow
return self.env.needs_is_post_processed
except AttributeError:
self.env.needs_workflow = {
"backlink_creation_links": False,
"dynamic_values_resolved": False,
"links_checked": False,
"add_sections": False,
"variant_option_resolved": False,
"needs_extended": False,
"needs_constraints": False,
}
for link_type in self.env.app.config.needs_extra_links:
self.env.needs_workflow["backlink_creation_{}".format(link_type["option"])] = False

return self.env.needs_workflow # type: ignore[return-value]
self.env.needs_is_post_processed = False
return self.env.needs_is_post_processed

@needs_is_post_processed.setter
def needs_is_post_processed(self, value: bool) -> None:
self.env.needs_is_post_processed = value

def get_or_create_services(self) -> ServiceManager:
"""Get information about services.
Expand Down
114 changes: 52 additions & 62 deletions sphinx_needs/directives/need.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,11 @@
from sphinx_needs.data import SphinxNeedsData
from sphinx_needs.debug import measure_time
from sphinx_needs.defaults import NEED_DEFAULT_OPTIONS
from sphinx_needs.directives.needextend import process_needextend
from sphinx_needs.directives.needextend import (
Needextend,
extend_needs_data,
remove_needextend_node,
)
from sphinx_needs.functions import (
find_and_replace_node_content,
resolve_dynamic_values,
Expand Down Expand Up @@ -376,29 +380,23 @@ def process_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str) -
return

env = app.env
needs_data = SphinxNeedsData(env)

# If no needs were defined, we do not need to do anything
if not hasattr(env, "needs_all_needs"):
if not needs_data.get_or_create_needs():
return

# Call dynamic functions and replace related node data with their return values
resolve_dynamic_values(env)

# Apply variant handling on options and replace its values with their return values
resolve_variants_options(env)

# check if we have dead links
check_links(env)

# Create back links of common links and extra links
for links in needs_config.extra_links:
create_back_links(env, links["option"])

process_constraints(app)
if not needs_data.needs_is_post_processed:
resolve_dynamic_values(env)
resolve_variants_options(env)
check_links(env)
create_back_links(env)
process_constraints(app)
extend_needs_data(app)
needs_data.needs_is_post_processed = True

# We call process_needextend here by our own, so that we are able
# to give print_need_nodes the already found need_nodes.
process_needextend(app, doctree, fromdocname)
for extend_node in doctree.findall(Needextend):
remove_needextend_node(extend_node)

print_need_nodes(app, doctree, fromdocname, list(doctree.findall(Need)))

Expand Down Expand Up @@ -432,18 +430,16 @@ def print_need_nodes(app: Sphinx, doctree: nodes.document, fromdocname: str, fou


def check_links(env: BuildEnvironment) -> None:
"""Checks if set links are valid or are dead (referenced need does not exist.)
For needs with dead links, an extra ``has_dead_links`` field is added and,
if the link is not allowed to be dead,
the ``has_forbidden_dead_links`` field is also added.
"""
Checks if set links are valid or are dead (referenced need does not exist.)
:param env: Sphinx environment
:return:
"""
config = NeedsSphinxConfig(env.config)
data = SphinxNeedsData(env)
workflow = data.get_or_create_workflow()
if workflow["links_checked"]:
return

needs = data.get_or_create_needs()
extra_links = getattr(env.config, "needs_extra_links", [])
extra_links = config.extra_links
for need in needs.values():
for link_type in extra_links:
dead_links_allowed = link_type.get("allow_dead_links", False)
Expand All @@ -464,45 +460,39 @@ def check_links(env: BuildEnvironment) -> None:
need["has_forbidden_dead_links"] = True
break # One found dead link is enough

# Finally set a flag so that this function gets not executed several times
workflow["links_checked"] = True


def create_back_links(env: BuildEnvironment, option: str) -> None:
"""
Create back-links in all found needs.
But do this only once, as all needs are already collected and this sorting is for all
needs and not only for the ones of the current document.
def create_back_links(env: BuildEnvironment) -> None:
"""Create back-links in all found needs.
:param env: sphinx environment
These are fields for each link type, ``<link_name>_back``,
which contain a list of all IDs of needs that link to the current need.
"""
data = SphinxNeedsData(env)
workflow = data.get_or_create_workflow()
option_back = f"{option}_back"
if workflow[f"backlink_creation_{option}"]: # type: ignore[literal-required]
return

needs_config = NeedsSphinxConfig(env.config)
needs = data.get_or_create_needs()
for key, need in needs.items():
need_link_value = [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
for link in need_link_value:
link_main = link.split(".")[0]
try:
link_part = link.split(".")[1]
except IndexError:
link_part = None

if link_main in needs:
if key not in needs[link_main][option_back]: # type: ignore[literal-required]
needs[link_main][option_back].append(key) # type: ignore[literal-required]

# Handling of links to need_parts inside a need
if link_part and link_part in needs[link_main]["parts"]:
if option_back not in needs[link_main]["parts"][link_part].keys():
needs[link_main]["parts"][link_part][option_back] = [] # type: ignore[literal-required]
needs[link_main]["parts"][link_part][option_back].append(key) # type: ignore[literal-required]

workflow[f"backlink_creation_{option}"] = True # type: ignore[literal-required]

for links in needs_config.extra_links:
option = links["option"]
option_back = f"{option}_back"

for key, need in needs.items():
need_link_value = [need[option]] if isinstance(need[option], str) else need[option] # type: ignore[literal-required]
for link in need_link_value:
link_main = link.split(".")[0]
try:
link_part = link.split(".")[1]
except IndexError:
link_part = None

if link_main in needs:
if key not in needs[link_main][option_back]: # type: ignore[literal-required]
needs[link_main][option_back].append(key) # type: ignore[literal-required]

# Handling of links to need_parts inside a need
if link_part and link_part in needs[link_main]["parts"]:
if option_back not in needs[link_main]["parts"][link_part].keys():
needs[link_main]["parts"][link_part][option_back] = [] # type: ignore[literal-required]
needs[link_main]["parts"][link_part][option_back].append(key) # type: ignore[literal-required]


def _fix_list_dyn_func(list: List[str]) -> List[str]:
Expand Down
Loading

0 comments on commit 5c32994

Please sign in to comment.