diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3e2280b..14d07b4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,6 +11,7 @@ env: GIT_COMMITTER_EMAIL: site@domain.com GIT_COMMITTER_NAME: Tara BRANCH_NAME: ${{ github.head_ref || github.ref_name }} + JUVIX_VERSION: v0.6.8 concurrency: group: "${{ github.workflow }}-${{ github.head_ref || github.run_id }}" cancel-in-progress: true @@ -31,7 +32,7 @@ jobs: uses: jaxxstorm/action-install-gh-release@v1.12.0 with: repo: anoma/juvix - tag: v0.6.6 + tag: ${{ env.JUVIX_VERSION }} cache: enable rename-to: juvix chmod: 0755 diff --git a/mkdocs_juvix/common/models/wikilink.py b/mkdocs_juvix/common/models/wikilink.py index 0474ae6..9e019bf 100644 --- a/mkdocs_juvix/common/models/wikilink.py +++ b/mkdocs_juvix/common/models/wikilink.py @@ -1,11 +1,7 @@ from typing import Optional -from mkdocs.plugins import get_plugin_logger - from .loc import FileLoc -log = get_plugin_logger("\033[94m[wikilinks]\033[0m") - class WikiLink: html_path: Optional[str] diff --git a/mkdocs_juvix/common/preprocesors/links.py b/mkdocs_juvix/common/preprocesors/links.py index c9ec14b..805e38b 100644 --- a/mkdocs_juvix/common/preprocesors/links.py +++ b/mkdocs_juvix/common/preprocesors/links.py @@ -1,20 +1,17 @@ import os import re -import time from pathlib import Path from typing import Any, List, Optional, Tuple from urllib.parse import urljoin import numpy as np # type: ignore -from colorama import Fore, Style # type: ignore from fuzzywuzzy import fuzz # type: ignore from markdown.preprocessors import Preprocessor # type: ignore -from mkdocs.plugins import get_plugin_logger -from mkdocs.structure.pages import Page from ncls import NCLS # type: ignore from mkdocs_juvix.common.models import FileLoc, WikiLink from mkdocs_juvix.env import ENV +from mkdocs_juvix.logger import log from mkdocs_juvix.utils import time_spent as time_spent_decorator WIKILINK_PATTERN = re.compile( @@ -29,10 +26,6 @@ re.VERBOSE, ) -log = get_plugin_logger( - f"{Fore.BLUE}[juvix_mkdocs] (preprocessor: wikilinks){Style.RESET_ALL}" -) - def time_spent(message: Optional[Any] = None, print_result: bool = False): return time_spent_decorator(log=log, message=message, print_result=print_result) @@ -189,7 +182,6 @@ def run(self, lines: List[str]) -> List[str]: return self._run("\n".join(lines)).split("\n") def _run(self, content: str) -> str: - log.info(f"{Fore.MAGENTA}Running wikilinks preprocessor{Style.RESET_ALL}") if ( self.absolute_path is None and self.relative_path is None diff --git a/mkdocs_juvix/env.py b/mkdocs_juvix/env.py index a8223e8..6d0f046 100644 --- a/mkdocs_juvix/env.py +++ b/mkdocs_juvix/env.py @@ -19,6 +19,12 @@ import mkdocs_juvix.utils as utils from mkdocs_juvix.juvix_version import MIN_JUVIX_VERSION +from mkdocs_juvix.utils import ( + compute_sha_over_folder, + fix_site_url, + hash_content_of, + is_juvix_markdown_file, +) log = get_plugin_logger(f"{Fore.BLUE}[juvix_mkdocs] (env) {Style.RESET_ALL}") @@ -30,7 +36,7 @@ class ENV: ROOT_PATH: Path DOCS_DIRNAME: str = getenv("DOCS_DIRNAME", "docs") DOCS_PATH: Path - CACHE_DIRNAME: str = getenv("CACHE_DIRNAME", ".hooks") + CACHE_DIRNAME: str = getenv("CACHE_DIRNAME", ".cache-juvix-mkdocs") CACHE_PATH: Path DIFF_ENABLED: bool DIFF_BIN: str @@ -47,72 +53,56 @@ class ENV: CLEAN_DEPS: bool = bool(getenv("CLEAN_DEPS", False)) UPDATE_DEPS: bool = bool(getenv("UPDATE_DEPS", False)) - REMOVE_CACHE: bool = bool( - getenv("REMOVE_CACHE", False) - ) # Whether the cache should be removed + REMOVE_CACHE: bool = bool(getenv("REMOVE_CACHE", False)) - JUVIX_ENABLED: bool = bool( - getenv("JUVIX_ENABLED", True) - ) # Whether the user wants to use Juvix + JUVIX_ENABLED: bool = bool(getenv("JUVIX_ENABLED", True)) JUVIX_FULL_VERSION: str - JUVIX_BIN_NAME: str = getenv("JUVIX_BIN", "juvix") # The name of the Juvix binary - JUVIX_BIN_PATH: str = getenv("JUVIX_PATH", "") # The path to the Juvix binaries + JUVIX_BIN_NAME: str = getenv("JUVIX_BIN", "juvix") + JUVIX_BIN_PATH: str = getenv("JUVIX_PATH", "") JUVIX_BIN: str = ( JUVIX_BIN_PATH + "/" + JUVIX_BIN_NAME if JUVIX_BIN_PATH != "" else JUVIX_BIN_NAME - ) # The full path to the Juvix binary + ) JUVIX_AVAILABLE: bool = shutil.which(JUVIX_BIN) is not None - FIRST_RUN: bool = bool( - getenv("FIRST_RUN", True) - ) # Whether this is the first time the plugin is run + FIRST_RUN: bool = bool(getenv("FIRST_RUN", True)) JUVIX_FOOTER_CSS_FILENAME: str = getenv( "JUVIX_FOOTER_CSS_FILENAME", "juvix_codeblock_footer.css" ) - CACHE_ORIGINALS_DIRNAME: str = getenv( - "CACHE_ORIGINALS_DIRNAME", ".original_files" - ) # The name of the directory where the original files are cached + CACHE_ORIGINALS_DIRNAME: str = getenv("CACHE_ORIGINALS_DIRNAME", ".originals") CACHE_PROJECT_HASH_FILENAME: str = getenv( - "CACHE_PROJECT_HASH_FILENAME", ".hash_compound_of_original_files" - ) # The name of the file where the hash of the original files is cached - - CACHE_ISABELLE_THEORIES_DIRNAME: str = getenv( - "CACHE_ISABELLE_THEORIES_DIRNAME", ".isabelle_theories" - ) # The name of the directory where the Isabelle Markdown files are cached - CACHE_ISABELLE_OUTPUT_PATH: Path - CACHE_HASHES_DIRNAME: str = getenv( - "CACHE_HASHES_DIRNAME", ".hashes_for_original_files" - ) # The name of the directory where the hashes are stored - CACHE_HTML_DIRNAME: str = getenv( - "CACHE_HTML_DIRNAME", ".html" - ) # The name of the directory where the HTML files are cached + "CACHE_PROJECT_HASH_FILENAME", ".compound_hash_of_originals" + ) + + ISABELLE_THEORIES_DIRNAME: str = getenv( + "CACHE_ISABELLE_THEORIES_DIRNAME", "isabelle_theories" + ) + ISABELLE_OUTPUT_PATH: Path + CACHE_HASHES_DIRNAME: str = getenv("CACHE_HASHES_DIRNAME", ".hashes") + CACHE_HTML_DIRNAME: str = getenv("CACHE_HTML_DIRNAME", ".html") DOCS_INDEXES_DIRNAME: str = getenv("DOCS_INDEXES_DIRNAME", "indexes") - CACHE_MARKDOWN_JUVIX_OUTPUT_DIRNAME: str = getenv( - "CACHE_MARKDOWN_JUVIX_OUTPUT_DIRNAME", - ".markdown_output_from_original_files", - ) # The name of the file where the Juvix Markdown files are stored - CACHE_WIKILINKS_DIRNAME: str = getenv("CACHE_WIKILINKS_DIRNAME", ".wikilinks") + CACHE_PROCESSED_MARKDOWN_DIRNAME: str = getenv( + "CACHE_PROCESSED_MARKDOWN_DIRNAME", + ".processed_markdown", + ) DOCS_IMAGES_DIRNAME: str = getenv("DOCS_IMAGES_DIRNAME", "images") CACHE_JUVIX_VERSION_FILENAME: str = getenv( "CACHE_JUVIX_VERSION_FILENAME", ".juvix_version" ) - ROOT_ABSPATH: Path # The path to the root directory used by MkDocs - CACHE_ABSPATH: Path # The path to the cache directory - DOCS_ABSPATH: Path # The path to the documentation directory - CACHE_ORIGINALS_ABSPATH: Path # The path to the original files cache directory - CACHE_MARKDOWN_JUVIX_OUTPUT_PATH: ( - Path # The path to the Juvix Markdown output directory - ) - CACHE_WIKILINKS_PATH: Path # The path to the wikilinks cache directory - CACHE_HTML_PATH: Path # The path to the HTML output directory - CACHE_PROJECT_HASH_FILEPATH: Path # The path to the Juvix Markdown output directory - CACHE_HASHES_PATH: Path # The path where hashes are stored (not the project hash) - JUVIX_FOOTER_CSS_FILEPATH: Path # The path to the Juvix footer CSS file - CACHE_JUVIX_VERSION_FILEPATH: Path # The path to the Juvix version file + ROOT_ABSPATH: Path + CACHE_ABSPATH: Path + DOCS_ABSPATH: Path + CACHE_ORIGINALS_ABSPATH: Path + CACHE_PROCESSED_MARKDOWN_PATH: Path + CACHE_HTML_PATH: Path + CACHE_PROJECT_HASH_FILEPATH: Path + CACHE_HASHES_PATH: Path + JUVIX_FOOTER_CSS_FILEPATH: Path + CACHE_JUVIX_VERSION_FILEPATH: Path TOKEN_ISABELLE_THEORY: str = "" SHOW_TODOS_IN_MD: bool INDEXES_PATH: Path @@ -129,7 +119,7 @@ def __init__(self, config: Optional[MkDocsConfig] = None): exit(1) self.ROOT_PATH = Path(config_file).parent - self.SITE_URL = config.get("site_url", "") # TODO: "" or "/" ? + self.SITE_URL = config.get("site_url", "") else: self.ROOT_PATH = Path(".").resolve() self.SITE_URL = "" @@ -164,44 +154,31 @@ def __init__(self, config: Optional[MkDocsConfig] = None): self.CACHE_ORIGINALS_ABSPATH: Path = ( self.CACHE_ABSPATH / self.CACHE_ORIGINALS_DIRNAME - ) # The path to the Juvix Markdown cache directory - self.ROOT_ABSPATH: Path = ( - self.CACHE_ABSPATH.parent - ) # The path to the root directory - self.DOCS_ABSPATH: Path = ( - self.ROOT_ABSPATH / self.DOCS_DIRNAME - ) # The path to the documentation directory - self.IMAGES_PATH: Path = ( - self.DOCS_ABSPATH / self.DOCS_IMAGES_DIRNAME - ) # The path to the images directory - - self.CACHE_MARKDOWN_JUVIX_OUTPUT_PATH: Path = ( - self.CACHE_ABSPATH / self.CACHE_MARKDOWN_JUVIX_OUTPUT_DIRNAME - ) # The path to the Juvix Markdown output directory - self.CACHE_HTML_PATH: Path = ( - self.CACHE_ABSPATH / self.CACHE_HTML_DIRNAME - ) # The path to the Juvix Markdown output directory - - self.CACHE_ISABELLE_OUTPUT_PATH: Path = ( - self.CACHE_ABSPATH / self.CACHE_ISABELLE_THEORIES_DIRNAME - ) # The path to the Isabelle output directory + ) + self.DOCS_ABSPATH: Path = self.ROOT_ABSPATH / self.DOCS_DIRNAME + self.IMAGES_PATH: Path = self.DOCS_ABSPATH / self.DOCS_IMAGES_DIRNAME + + self.CACHE_PROCESSED_MARKDOWN_PATH: Path = ( + self.CACHE_ABSPATH / self.CACHE_PROCESSED_MARKDOWN_DIRNAME + ) + self.CACHE_HTML_PATH: Path = self.CACHE_ABSPATH / self.CACHE_HTML_DIRNAME + + self.ISABELLE_OUTPUT_PATH: Path = ( + self.ROOT_ABSPATH / self.ISABELLE_THEORIES_DIRNAME + ) + self.ISABELLE_OUTPUT_PATH.mkdir(parents=True, exist_ok=True) self.CACHE_PROJECT_HASH_FILEPATH: Path = ( self.CACHE_ABSPATH / self.CACHE_PROJECT_HASH_FILENAME - ) # The path to the Juvix Markdown output directory - self.CACHE_HASHES_PATH: Path = ( - self.CACHE_ABSPATH / self.CACHE_HASHES_DIRNAME - ) # The path where hashes are stored (not the project hash) + ) + self.CACHE_HASHES_PATH: Path = self.CACHE_ABSPATH / self.CACHE_HASHES_DIRNAME self.JUVIX_FOOTER_CSS_FILEPATH: Path = ( self.DOCS_ABSPATH / "assets" / "css" / self.JUVIX_FOOTER_CSS_FILENAME ) self.CACHE_JUVIX_VERSION_FILEPATH: Path = ( self.CACHE_ABSPATH / self.CACHE_JUVIX_VERSION_FILENAME - ) # The path to the Juvix version file - self.CACHE_WIKILINKS_PATH: Path = ( - self.CACHE_ABSPATH / self.CACHE_WIKILINKS_DIRNAME - ) # The path to the wikilinks cache directory + ) if not self.DOCS_ABSPATH.exists(): log.error( @@ -217,7 +194,7 @@ def __init__(self, config: Optional[MkDocsConfig] = None): ): try: log.info( - f"Removing directory {Fore.RED}{self.CACHE_ABSPATH}{Style.RESET_ALL}" + f"{Fore.YELLOW}Removing directory {self.CACHE_ABSPATH}{Style.RESET_ALL}" ) shutil.rmtree(self.CACHE_ABSPATH, ignore_errors=True) except Exception as e: @@ -228,11 +205,10 @@ def __init__(self, config: Optional[MkDocsConfig] = None): # Create the cache directories self.CACHE_ORIGINALS_ABSPATH.mkdir(parents=True, exist_ok=True) - self.CACHE_MARKDOWN_JUVIX_OUTPUT_PATH.mkdir(parents=True, exist_ok=True) - self.CACHE_ISABELLE_OUTPUT_PATH.mkdir(parents=True, exist_ok=True) + self.CACHE_PROCESSED_MARKDOWN_PATH.mkdir(parents=True, exist_ok=True) + self.ISABELLE_OUTPUT_PATH.mkdir(parents=True, exist_ok=True) self.CACHE_HTML_PATH.mkdir(parents=True, exist_ok=True) self.CACHE_HASHES_PATH.mkdir(parents=True, exist_ok=True) - self.CACHE_WIKILINKS_PATH.mkdir(parents=True, exist_ok=True) self.JUVIX_VERSION = "" self.JUVIX_FULL_VERSION = "" @@ -276,9 +252,7 @@ def __init__(self, config: Optional[MkDocsConfig] = None): f"""Juvix version {Fore.RED}{MIN_JUVIX_VERSION}{Style.RESET_ALL} or higher is required. Please upgrade Juvix and try again.""" ) - self.JUVIX_ENABLED = False - self.JUVIX_AVAILABLE = False - return + exit(1) self.USE_DOT = bool(getenv("USE_DOT", True)) self.DOT_BIN = getenv("DOT_BIN", "dot") @@ -302,27 +276,10 @@ def wrapper(self, *args, **kwargs): return wrapper def read_markdown_file_from_cache(self, filepath: Path) -> Optional[str]: - if ( - cache_ABSpath - := self.compute_filepath_for_cached_output_of_juvix_markdown_file(filepath) - ): + if cache_ABSpath := self.compute_processed_filepath(filepath): return cache_ABSpath.read_text() return None - def read_wikilinks_file_from_cache(self, filepath: Path) -> Optional[str]: - if cache_ABSpath := self.get_filepath_for_wikilinks_in_cache(filepath): - return cache_ABSpath.read_text() - return None - - def write_wikilinks_file_to_cache(self, filepath: Path, content: str) -> None: - if cache_ABSpath := self.get_filepath_for_wikilinks_in_cache(filepath): - cache_ABSpath.write_text(content) - - def get_filepath_for_wikilinks_in_cache(self, filepath: Path) -> Optional[Path]: - filepath = filepath.absolute() - rel_to_docs = filepath.relative_to(self.DOCS_ABSPATH) - return self.CACHE_WIKILINKS_PATH / rel_to_docs.parent / filepath.name - def compute_filepath_for_cached_hash_for(self, filepath: Path) -> Path: file_abspath = filepath.absolute() return utils.get_filepath_for_cached_hash_for( @@ -346,18 +303,66 @@ def update_cache_for_file(self, filepath: Path, file_content: str) -> None: cache_filepath.write_text(file_content) self.update_hash_file(file_abspath) + def compute_filepath_for_original_file_in_cache(self, filepath: Path) -> Path: + file_abspath = filepath.absolute() + rel_to_docs = file_abspath.relative_to(self.DOCS_ABSPATH) + return self.CACHE_ORIGINALS_ABSPATH / rel_to_docs.parent / filepath.name + @lru_cache(maxsize=128) - def compute_filepath_for_cached_output_of_juvix_markdown_file( - self, filepath: Path + def compute_processed_filepath( + self, + filepath: Path, + relative_to: Optional[Path] = None, ) -> Path: - file_abspath = filepath.absolute() - md_filename = filepath.name.replace(".juvix.md", ".md") - file_rel_to_docs = file_abspath.relative_to(self.DOCS_ABSPATH) - return ( - self.CACHE_MARKDOWN_JUVIX_OUTPUT_PATH - / file_rel_to_docs.parent - / md_filename - ) + log.debug(f"Computing processed filepath for {filepath}") + + if filepath.name.endswith(".juvix.md"): + md_filename = filepath.name.replace(".juvix.md", ".md") + log.debug(f"Converted Juvix markdown filename to: {md_filename}") + else: + md_filename = filepath.name + log.debug(f"Using markdown filename: {md_filename}") + + # check if the filepath is absolute + if filepath.is_absolute(): + log.debug(f"Filepath is absolute: {filepath}") + filepath = filepath.relative_to(self.DOCS_ABSPATH) + processed_path = ( + self.CACHE_PROCESSED_MARKDOWN_PATH / filepath.parent / md_filename + ) + log.debug( + f"Computed processed filepath for absolute path: {processed_path}" + ) + return processed_path + else: + log.debug(f"Filepath is relative: {filepath}") + + if len(filepath.parts) > 0 and filepath.parts[0] in ["docs", "./docs"]: + filepath = Path(*filepath.parts[1:]) + processed_path = ( + self.CACHE_PROCESSED_MARKDOWN_PATH / filepath.parent / md_filename + ) + log.debug(f"Computed processed filepath for docs path: {processed_path}") + return processed_path + + if relative_to is None: + log.error("No relative path specified for the processed filepath") + return filepath + + if relative_to.is_file(): + processed_path = ( + self.CACHE_PROCESSED_MARKDOWN_PATH / relative_to.parent / md_filename + ) + log.debug(f"Computed processed filepath relative to file: {processed_path}") + return processed_path + else: + processed_path = ( + self.CACHE_PROCESSED_MARKDOWN_PATH / relative_to / md_filename + ) + log.debug( + f"Computed processed filepath relative to directory: {processed_path}" + ) + return processed_path def unqualified_module_name(self, filepath: Path) -> Optional[str]: fposix: str = filepath.as_posix() @@ -396,7 +401,11 @@ def get_filename_module_by_extension( """ The markdown filename is the same as the juvix file name but without the .juvix.md extension. """ + log.debug( + f"Getting filename module by extension for {filepath} with extension {extension}" + ) module_name = self.unqualified_module_name(filepath) + log.debug(f"Module name: {module_name}") return module_name + extension if module_name else None def update_hash_file(self, filepath: Path) -> Optional[Tuple[Path, str]]: @@ -422,34 +431,138 @@ def copy_directory(self, src: Path, dst: Path) -> None: except Exception as e: log.error(f"Error copying folder: {e}") - def compute_filepath_for_juvix_markdown_output_in_cache( + def compute_filepath_for_juvix_isabelle_output_in_cache( self, filepath: Path ) -> Optional[Path]: + if not is_juvix_markdown_file(filepath): + log.debug(f"Filepath is not a Juvix Markdown filepath: {filepath}") + return None + + log.debug(f"Computing filepath for Isabelle output in cache for {filepath}") cache_markdown_filename: Optional[str] = self.get_filename_module_by_extension( - filepath, extension=".md" + filepath, extension=".thy" ) + log.debug(f"Cache markdown filename: {cache_markdown_filename}") + if cache_markdown_filename is None: + log.debug(f"No Isabelle output filename found for {filepath}") return None - rel_to_docs = filepath.relative_to(self.DOCS_ABSPATH) + + if filepath.is_relative_to(self.DOCS_ABSPATH): + rel_to_docs = filepath.relative_to(self.DOCS_ABSPATH) + elif filepath.is_relative_to("./docs"): + rel_to_docs = filepath.relative_to("./docs") + elif filepath.is_relative_to("docs"): + rel_to_docs = filepath.relative_to("docs") + else: + rel_to_docs = filepath + cache_markdown_filepath: Path = ( - self.CACHE_MARKDOWN_JUVIX_OUTPUT_PATH - / rel_to_docs.parent - / cache_markdown_filename + self.ISABELLE_OUTPUT_PATH / rel_to_docs.parent / cache_markdown_filename + ) + cache_markdown_filepath.parent.mkdir(parents=True, exist_ok=True) + log.debug( + f"Computed filepath for Isabelle output in cache: {cache_markdown_filepath}" ) return cache_markdown_filepath - def compute_filepath_for_juvix_isabelle_output_in_cache( - self, filepath: Path + def find_file_in( + self, + _filepath: Path | str, + _relative_to: Optional[Path | str], + _base_path: Optional[Path | str], + cache: bool = True, ) -> Optional[Path]: - cache_isabelle_filename: Optional[str] = self.get_filename_module_by_extension( - filepath, extension=".thy" + """ + The filepath can be: + - Relative to the docs directory, e.g., "docs/..." or "./docs/..." + - Absolute, e.g., "/some/path/to/docs/..." + - Relative to the current working directory, in which case, relative_to + should be specified. + + Otherwise, the search will be done relative to + self.CACHE_PROCESSED_MARKDOWN_PATH first, or relative to the docs + directory otherwise. + + If the filepath is relative to the docs directory, the path to the + processed markdown file in the cache is obtained using + self.CACHE_PROCESSED_MARKDOWN_PATH. If the filepath is absolute, it is + checked for existence. If the filepath is relative to the current + working directory, relative_to is used to find the file. + """ + filepath: Path = Path(_filepath) if isinstance(_filepath, str) else _filepath + relative_to = ( + Path(_relative_to) if isinstance(_relative_to, str) else _relative_to ) - if cache_isabelle_filename is None: - return None - rel_to_docs = filepath.relative_to(self.DOCS_ABSPATH) - cache_isabelle_filepath: Path = ( - self.CACHE_ISABELLE_OUTPUT_PATH - / rel_to_docs.parent - / cache_isabelle_filename + base_path = Path(_base_path) if isinstance(_base_path, str) else _base_path + + filepath = Path(filepath.name.replace(".juvix.md", ".md")) + + log.debug(f"Attempting to find file: {filepath}") + + if filepath.is_relative_to("./docs") or filepath.is_relative_to("docs"): + filepath = ( + filepath.relative_to("./docs") + if filepath.is_relative_to("./docs") + else filepath.relative_to("docs") + ) + # Check if the filepath is relative to the docs directory + docs_relative_path = self.DOCS_ABSPATH / filepath + if docs_relative_path.exists(): + log.debug( + f"File found relative to docs directory: {docs_relative_path}" + ) + if not base_path and cache: + new_path = self.CACHE_PROCESSED_MARKDOWN_PATH / filepath + if new_path.exists(): + log.debug( + f"File found relative to cache processed markdown path: {new_path}" + ) + return new_path + new_path = base_path / filepath if base_path else docs_relative_path + if new_path.exists(): + log.debug(f"File found relative to base path: {new_path}") + return new_path + + # Check if the filepath is absolute + if filepath.is_absolute(): + log.debug(f"Filepath is absolute: {filepath}") + if filepath.exists(): + log.debug(f"File found at absolute path: {filepath}") + return filepath + else: + log.debug(f"File not found at absolute path: {filepath}") + return None + + # Check if the filepath is relative to the current working directory + if relative_to: + if isinstance(relative_to, str): + relative_to = Path(relative_to) + relative_to = relative_to.resolve().absolute() + if relative_to.is_file(): + relative_path = relative_to.parent / filepath + else: + relative_path = relative_to / filepath + + log.debug(f"Checking relative to provided path: {relative_path}") + if relative_path.exists(): + log.debug(f"File found relative to provided path: {relative_path}") + return relative_path if base_path is None else base_path / relative_path + + # Fallback to checking relative to the cache processed markdown path + cache_relative_path = self.CACHE_PROCESSED_MARKDOWN_PATH / filepath + log.debug( + f"Checking relative to cache processed markdown path: {cache_relative_path}" ) - return cache_isabelle_filepath + if cache_relative_path.exists(): + log.debug( + f"File found relative to cache processed markdown path: {cache_relative_path}" + ) + return ( + cache_relative_path + if base_path is None + else base_path / cache_relative_path + ) + + log.debug(f"File not found: {filepath}") + return None diff --git a/mkdocs_juvix/images.py b/mkdocs_juvix/images.py index eeadbe9..a7efa7f 100644 --- a/mkdocs_juvix/images.py +++ b/mkdocs_juvix/images.py @@ -11,17 +11,16 @@ from markdown.extensions import Extension # type: ignore from markdown.preprocessors import Preprocessor # type: ignore from mkdocs.config.defaults import MkDocsConfig # type: ignore -from mkdocs.plugins import BasePlugin, get_plugin_logger +from mkdocs.plugins import BasePlugin from mkdocs.structure.files import Files # type: ignore from mkdocs.structure.pages import Page from ncls import NCLS # type: ignore from mkdocs_juvix.common.utils import fix_site_url # type:ignore from mkdocs_juvix.env import ENV # type: ignore +from mkdocs_juvix.logger import log from mkdocs_juvix.utils import time_spent as time_spent_decorator -log = get_plugin_logger(f"{Fore.BLUE}[juvix_mkdocs]{Style.RESET_ALL} (images)") - IMAGES_PATTERN = re.compile( r""" !\[ @@ -89,7 +88,7 @@ def find_replacements( for match in pattern.finditer(text): start, end = match.span() if should_process_match(ignore_tree, start, end): - log.info( + log.debug( f"Processing image URL: {Fore.GREEN}{match.group('url')}{Style.RESET_ALL}" ) url = Path(match.group("url")) diff --git a/mkdocs_juvix/links.py b/mkdocs_juvix/links.py index 45fcf78..5241db4 100644 --- a/mkdocs_juvix/links.py +++ b/mkdocs_juvix/links.py @@ -13,7 +13,6 @@ from colorama import Fore, Style # type: ignore from markdown.extensions import Extension # type: ignore from mkdocs.config.defaults import MkDocsConfig -from mkdocs.plugins import get_plugin_logger from mkdocs.structure.files import File, Files from mkdocs.structure.pages import Page from mkdocs.utils import meta @@ -22,8 +21,7 @@ from mkdocs_juvix.common.preprocesors.links import WLPreprocessor from mkdocs_juvix.common.utils import fix_site_url, get_page_title from mkdocs_juvix.env import ENV - -log = get_plugin_logger(f"{Fore.BLUE}[juvix_mkdocs] (wikilinks) {Style.RESET_ALL}") +from mkdocs_juvix.logger import log files_relation: List[ResultEntry] = [] EXCLUDED_DIRS = [ @@ -93,7 +91,7 @@ def on_config(self, config: MkDocsConfig, **kwargs) -> MkDocsConfig: self.PAGE_LINK_DIAGS = self.env.CACHE_PATH / self.PAGE_LINK_DIAGSNAME self.PAGE_LINK_DIAGS.mkdir(parents=True, exist_ok=True) - log.info("Wikilinks plugin initialized") + log.debug("Wikilinks plugin initialized") return config def on_pre_build(self, config: MkDocsConfig) -> None: @@ -149,10 +147,6 @@ def on_files(self, files: Files, config: MkDocsConfig) -> None: ] ) - log.info( - f"Processing wikilinks for {Fore.GREEN}{len(files)}{Style.RESET_ALL} files" - ) - def process_file(file: File) -> None: pathFile: str | None = file.abs_src_path if pathFile is not None: @@ -183,9 +177,6 @@ def process_file(file: File) -> None: if self.LINKS_JSON.exists(): self.LINKS_JSON.unlink() - log.info( - f"> writing page aliases to {Fore.YELLOW}{self.LINKS_JSON}{Style.RESET_ALL}" - ) with open(self.LINKS_JSON, "w") as f: json.dump( { @@ -202,15 +193,6 @@ def process_file(file: File) -> None: indent=2, ) - # @mkdocs.plugins.event_priority(-200) - # def on_page_markdown( - # self, markdown, page: Page, config: MkDocsConfig, files: Files - # ) -> str: - # config["current_page"] = page # needed for the preprocessor - # config["links_number"] = [] - # markdown += "\n" + self.TOKEN_LIST_WIKILINKS + "\n" - # return markdown - def on_page_content( self, html, page: Page, config: MkDocsConfig, files: Files ) -> str: diff --git a/mkdocs_juvix/logger.py b/mkdocs_juvix/logger.py new file mode 100644 index 0000000..ed84c8c --- /dev/null +++ b/mkdocs_juvix/logger.py @@ -0,0 +1,68 @@ +import logging +from typing import Any, MutableMapping + +from colorama import Fore, Style # type: ignore + + +class PrefixedLogger(logging.LoggerAdapter): + """A logger adapter to prefix log messages.""" + + def __init__(self, prefix: str, logger: logging.Logger) -> None: + """ + Initialize the logger adapter. + + Arguments: + prefix: The string to insert in front of every message. + logger: The logger instance. + """ + super().__init__(logger, {}) + self.prefix = prefix + + def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, Any]: + """ + Process the message. + + Arguments: + msg: The message: + kwargs: Remaining arguments. + + Returns: + The processed message. + """ + return f"{self.prefix}: {msg}", kwargs + + +def get_plugin_logger(name: str) -> PrefixedLogger: + """ + Return a logger for plugins. + + Arguments: + name: The name to use with `logging.getLogger`. + + Returns: + A logger configured to work well in MkDocs, + prefixing each message with the plugin package name. + + Example: + ```python + from mkdocs.plugins import get_plugin_logger + + log = get_plugin_logger(__name__) + log.info("My plugin message") + ``` + """ + logger = logging.getLogger(f"mkdocs.plugins.{name}") + setattr(logger, "info", lambda msg: clear_screen() and getattr(logger, "info")(msg)) + return PrefixedLogger(name.split(".", 1)[0], logger) + + +log = get_plugin_logger(f"{Fore.BLUE}juvix_mkdocs{Style.RESET_ALL}") + + +def clear_screen(): + print("\033[H\033[J", end="", flush=True) + + +def clear_line(): + print("\033[A", end="", flush=True) + print("\033[K", end="\r", flush=True) diff --git a/mkdocs_juvix/main.py b/mkdocs_juvix/main.py index 954ad85..810cd57 100644 --- a/mkdocs_juvix/main.py +++ b/mkdocs_juvix/main.py @@ -1,5 +1,4 @@ import json -import os import re import shutil import subprocess @@ -16,16 +15,18 @@ from colorama import Back, Fore, Style # type: ignore from dotenv import load_dotenv from mkdocs.config.defaults import MkDocsConfig -from mkdocs.plugins import BasePlugin, PrefixedLogger, get_plugin_logger +from mkdocs.plugins import BasePlugin from mkdocs.structure.files import Files from mkdocs.structure.pages import Page from semver import Version +from tqdm import tqdm # type: ignore from watchdog.events import FileSystemEvent from mkdocs_juvix.common.preprocesors.links import WLPreprocessor from mkdocs_juvix.env import ENV, FIXTURES_PATH from mkdocs_juvix.images import process_images from mkdocs_juvix.links import TOKEN_LIST_WIKILINKS, WikilinksPlugin +from mkdocs_juvix.logger import clear_line, clear_screen, log from mkdocs_juvix.snippets import RE_SNIPPET_SECTION, SnippetPreprocessor from mkdocs_juvix.utils import ( compute_sha_over_folder, @@ -38,17 +39,23 @@ warnings.filterwarnings("ignore", category=DeprecationWarning) load_dotenv() -os.environ["DEBUG"] = "true" -log: PrefixedLogger = get_plugin_logger(f"{Fore.BLUE}[juvix_mkdocs]{Style.RESET_ALL}") +# os.environ["DEBUG"] = "true" + +SKIP_DIRS = [ + ".juvix-build", + ".git", + "images", + "assets", + "references", +] +ERROR_MESSAGE_EXTENSION = ".error-" def time_spent(message: Optional[Any] = None, print_result: bool = False): return time_spent_decorator(log=log, message=message, print_result=print_result) -ERROR_MESSAGE_EXTENSION = ".error-" - _pipeline: str = """ For reference, the Mkdocs Pipeline is the following: ├── on_startup(command, dirty) └── on_config(config) @@ -85,7 +92,7 @@ def template_error_message( filepath: Optional[Path], command: List[str], error_message: str ) -> str: return ( - f"Error processing {Fore.YELLOW}{filepath}{Style.RESET_ALL}:\n" + f"Error processing {Fore.GREEN}{filepath}{Style.RESET_ALL}:\n" f"Command: {Back.WHITE}{Fore.BLACK}{' '.join(command)}{Style.RESET_ALL}\n" f"Error message:\n{Fore.RED}{error_message}{Style.RESET_ALL}" ) @@ -107,8 +114,12 @@ def __init__(self, filepath: Path, env: ENV, config: MkDocsConfig): # File paths and locations self.absolute_filepath: Path = filepath.absolute() self.original_in_cache_filepath: Path = ( - self.env.CACHE_ORIGINALS_ABSPATH / filepath.name + self.env.CACHE_ORIGINALS_ABSPATH + / self.absolute_filepath.relative_to(self.env.DOCS_ABSPATH) ) + # copy the original file to the cache folder for faster lookup and control + self.copy_original_file_to_cache() + self.src_uri: str = filepath.as_posix() self.relative_filepath: Path = self.absolute_filepath.relative_to( env.DOCS_ABSPATH @@ -129,9 +140,7 @@ def __init__(self, filepath: Path, env: ENV, config: MkDocsConfig): # Markdown related, some filled later in the process self._markdown_output: Optional[str] = None self._metadata: Optional[dict] = None - self.cache_filepath: Path = ( - env.compute_filepath_for_cached_output_of_juvix_markdown_file(filepath) - ) + self.cache_filepath: Path = env.compute_processed_filepath(filepath) # the hash cache file is used to check if the file has changed self.hash_cache_filepath: Path = env.compute_filepath_for_cached_hash_for( self.absolute_filepath @@ -166,8 +175,6 @@ def __init__(self, filepath: Path, env: ENV, config: MkDocsConfig): "snippets": None, } - self.load_error_messages() - except Exception as e: log.error(f"Error initializing JuvixMarkdownFile: {e}") raise @@ -189,12 +196,6 @@ def to_dict(self) -> Dict[str, Any]: # Processing flags "needs_isabelle": self._needs_isabelle, "include_isabelle_at_bottom": self._include_isabelle_at_bottom, - "processed_juvix_markdown": self._processed_juvix_markdown, - "processed_juvix_isabelle": self._processed_juvix_isabelle, - "processed_juvix_html": self._processed_juvix_html, - "processed_images": self._processed_images, - "processed_wikilinks": self._processed_wikilinks, - "processed_snippets": self._processed_snippets, # Error handling "error_messages": self._cached_error_messages, } @@ -235,9 +236,7 @@ def cached_hash(self) -> Optional[str]: Return the cached hash of the markdown output generated by running `juvix markdown`. The hash is used to check if the file has changed. """ - markdown_is_cached = self.is_cached() - hash_cache_filepath_exists = self.hash_cache_filepath.exists() - if markdown_is_cached and hash_cache_filepath_exists: + if self.is_cached() and self.hash_cache_filepath.exists(): return self.hash_cache_filepath.read_text().strip() else: return None @@ -252,6 +251,14 @@ def hash(self) -> Optional[str]: else: return None + def reset_processed_flags(self): + self._processed_juvix_markdown = False + self._processed_images = False + self._processed_isabelle = False + self._processed_snippets = False + self._processed_wikilinks = False + self._processed_errors = False + def changed_since_last_run(self) -> bool: """ Check if the original file has changed since the last time `juvix markdown` @@ -259,14 +266,32 @@ def changed_since_last_run(self) -> bool: location is different from the cached hash. """ if not self.is_cached(): + log.debug( + f"> File {Fore.GREEN}{self}{Style.RESET_ALL} has no cached output" + ) + self.reset_processed_flags() return True try: cached_hash = self.cached_hash if cached_hash is None: + log.debug( + f"> The hash for this file {Fore.GREEN}{self}{Style.RESET_ALL} is not stored in the cache" + ) + self.reset_processed_flags() return True - return self.hash != cached_hash + cond = self.hash != cached_hash + if cond: + log.debug( + f"> The file {Fore.YELLOW}{self.relative_filepath}{Style.RESET_ALL} " + f"has changed since last run" + ) + self.reset_processed_flags() + return cond except Exception as e: - log.error(f"Error checking if file changed: {e}") + log.error( + f"Error checking if file changed: {e}, so we assume it has changed" + ) + self.reset_processed_flags() return True # ------------------------------------------------------------------ @@ -282,50 +307,78 @@ def markdown_output(self) -> Optional[str]: """ if self._markdown_output is None: try: - self._markdown_output = self.generate_markdown_output() + log.debug( + f"> Because it was asked markdown and it was not cached, " + f"generating markdown output for {Fore.GREEN}{self}{Style.RESET_ALL}" + ) + self.run_pipeline(save_markdown=True, force=True) + return self._markdown_output except Exception as e: log.error(f"Error generating markdown output: {e}") return None return self._markdown_output - @time_spent(message="> saving markdown output in cache", print_result=True) + def run_pipeline(self, save_markdown: bool = True, force: bool = False) -> None: + """ + Run the pipeline of tasks to generate the markdown output of the file. + Be aware that this may get the wrong output if the snippets in the file + are not cached. + """ + if self.changed_since_last_run(): + self.generate_original_markdown(save_markdown=save_markdown) + if is_juvix_markdown_file(self.absolute_filepath): + self.generate_juvix_markdown(save_markdown=save_markdown, force=force) + self.generate_isabelle_theories( + save_markdown=save_markdown, force=force + ) + self.generate_images(save_markdown=save_markdown, force=force) + self.generate_wikilinks(save_markdown=save_markdown, force=force) + self.generate_snippets(save_markdown=save_markdown, force=force) + + @time_spent(message="> saving markdown output") def save_markdown_output(self, md_output: str) -> Optional[Path]: """ Cache the input provided as the cached markdown. Update the hash of the file to future checks. """ + log.debug(f"> length of markdown output: {len(md_output)}") + self._markdown_output = md_output + self.cache_filepath.parent.mkdir(parents=True, exist_ok=True) try: - self._markdown_output = md_output - self.cache_filepath.parent.mkdir(parents=True, exist_ok=True) self.cache_filepath.write_text(md_output) + except Exception as e: + log.error(f"Error saving markdown output: {e}") + return None + try: self.env.update_hash_file(self.absolute_filepath) - return self.cache_filepath.relative_to(self.env.ROOT_ABSPATH) except Exception as e: log.error(f"Error saving markdown output: {e}") return None - def _read_original_markdown(self) -> Optional[str]: + return self.cache_filepath.absolute() + + def copy_original_file_to_cache(self) -> None: + try: + self.original_in_cache_filepath.parent.mkdir(parents=True, exist_ok=True) + shutil.copy(self.absolute_filepath, self.original_in_cache_filepath) + except Exception as e: + log.error(f"Error copying original file to cache: {e}") + + def save_and_read_original_markdown_from_cache(self) -> Optional[str]: """ - Auxiliary function to read the original markdown found in the cache - folder originals. + Read the original markdown from the cache folder. """ + self.copy_original_file_to_cache() markdown_output = None - if not self.original_in_cache_filepath.exists(): - try: - shutil.copy(self.absolute_filepath, self.original_in_cache_filepath) - except Exception as e: - log.error(f"Error copying original file to cache: {e}") - return None if ( self.original_in_cache_filepath.exists() and self.original_in_cache_filepath.is_file() - and self.original_in_cache_filepath.as_posix().endswith(".md") ): markdown_output = self.original_in_cache_filepath.read_text() if markdown_output is None: log.error( - f"Failed to extract markdown output for " + f"Failed to extract original content from " f"{Fore.GREEN}{self.absolute_filepath}{Style.RESET_ALL}" ) return markdown_output @@ -334,10 +387,24 @@ def _read_original_markdown(self) -> Optional[str]: # Error handling for markdown output # ------------------------------------------------------------------ + def has_error_message(self, kind: str = "markdown") -> bool: + """Check if there is an error message for the given kind.""" + flag = False + for value in self._cached_error_messages.values(): + if value is not None: + flag = True + break + log.debug( + f"> file flags:{Fore.YELLOW}{self.relative_filepath}{Style.RESET_ALL}" + ) + log.debug(f" has error message: {Fore.YELLOW}{flag}{Style.RESET_ALL}") + return flag + def save_error_message(self, error_message: str, kind: str = "markdown") -> None: """Save the error message to a cache file.""" ext = ERROR_MESSAGE_EXTENSION + kind error_filepath = self.cache_filepath.with_suffix(ext) + error_filepath.parent.mkdir(parents=True, exist_ok=True) error_filepath.write_text(error_message) self._cached_error_messages[kind] = error_message @@ -349,26 +416,30 @@ def get_error_message(self, kind: str = "markdown") -> Optional[str]: ext = ERROR_MESSAGE_EXTENSION + kind error_filepath = self.cache_filepath.with_suffix(ext) if error_filepath.exists(): - error_message = error_filepath.read_text() - self._cached_error_messages[kind] = error_message - return error_message + try: + error_message = error_filepath.read_text() + self._cached_error_messages[kind] = error_message + return error_message + except Exception as e: + log.error(f"Error reading error message file: {e}") + return None return None - def load_error_messages(self) -> None: - """Load the error messages from cache and file of previous run.""" + def load_and_print_saved_error_messages(self) -> None: + """Print the error messages saved in the cache.""" for kind in self._cached_error_messages: error_message = self.get_error_message(kind) self._cached_error_messages[kind] = error_message if error_message: log.error( template_error_message( - self.absolute_filepath, + self.relative_filepath, [kind], error_message, ) ) - def clear_error_messages(self, kind: str = "markdown") -> None: + def clear_error_messages(self, kind: Optional[str] = None) -> None: """Clear the error message from cache and file of previous run.""" def clear_error_message(kind: str) -> None: @@ -382,15 +453,14 @@ def clear_error_message(kind: str) -> None: if kind: clear_error_message(kind) - else: - for kind in self._cached_error_messages: - clear_error_message(kind) + return + for kind in self._cached_error_messages: + clear_error_message(kind) def add_errors_to_markdown( self, content: str, - clear_errors: bool = False, - ) -> Optional[str]: + ) -> str: """ Format the error message to include it in the Markdown output of the file, so that it is rendered nicely. The filepath is used to extract the @@ -400,10 +470,7 @@ def add_errors_to_markdown( # check if there are any error messages if all(value is None for value in self._cached_error_messages.values()): - return None - - if clear_errors: - self.clear_error_messages() + return content def format_error_message(kind: str) -> str: error_message = self.get_error_message(kind) @@ -423,18 +490,21 @@ def format_error_message(kind: str) -> str: if self._cached_error_messages[kind] ) - metadata = parse_front_matter(content) + _output = content + metadata = parse_front_matter(_output) if metadata: - end_index: int = content.find("---", 3) - front_matter: str = content[3:end_index].strip() - return ( + end_index: int = _output.find("---", 3) + front_matter: str = _output[3:end_index].strip() + _output = ( f"---\n" f"{front_matter}\n" f"---\n\n" f"{formatted_error_msgs}\n\n" - f"{content[end_index+3:]}" + f"{_output[end_index+3:]}" ) - return f"{formatted_error_msgs}\n\n{content or ''}" + else: + _output = f"{formatted_error_msgs}\n\n{content or ''}" + return _output # ------------------------------------------------------------ # Root Juvix Project Path @@ -460,7 +530,7 @@ def _run_juvix_root_project_path(self) -> Optional[Path]: self.save_error_message(str(e), "juvix_root") log.error( template_error_message( - self.absolute_filepath, + self.relative_filepath, self._build_juvix_root_project_path_command(), str(e), ) @@ -492,8 +562,7 @@ def _build_juvix_markdown_command(self) -> List[str]: "--no-colors", ] - @time_spent(message="> running juvix markdown") - def process_juvix_markdown(self) -> Optional[str]: + def _run_command_juvix_markdown(self, force: bool = False) -> Optional[str]: """ Run the Juvix Markdown command and return the output. If the command fails, save the error message and return None. @@ -503,7 +572,14 @@ def process_juvix_markdown(self) -> Optional[str]: if not is_juvix_markdown_file(self.absolute_filepath): return None + self._processed_juvix_markdown = ( + False if force else self._processed_juvix_markdown + ) + self.clear_error_messages("juvix_markdown") + module_name = ".".join(self.relative_filepath.parts[-2:]) + log.debug(f"> juvix markdown for {Fore.MAGENTA}{module_name}{Style.RESET_ALL}") + try: result = subprocess.run( self._build_juvix_markdown_command(), @@ -517,13 +593,6 @@ def process_juvix_markdown(self) -> Optional[str]: return result.stdout except subprocess.CalledProcessError as e: self.save_error_message(e.stderr, "juvix_markdown") - log.error( - template_error_message( - self.absolute_filepath, - self._build_juvix_markdown_command(), - e.stderr, - ) - ) return None except Exception as e: self.save_error_message(str(e), "juvix_markdown") @@ -540,6 +609,8 @@ def process_juvix_markdown(self) -> Optional[str]: def _build_juvix_html_command(self) -> List[str]: return [ self.env.JUVIX_BIN, + "--log-level=error", + "--no-colors", "html", "--strip-prefix", self.env.DOCS_DIRNAME, @@ -556,8 +627,7 @@ def _build_juvix_html_command(self) -> List[str]: self.absolute_filepath.as_posix(), ] - @time_spent(message="> generating html") - def process_juvix_html(self, update_assets: bool = False) -> None: + def _process_juvix_html(self, update_assets: bool = False) -> None: """ Generate the HTML output running the pipeline if the file has changed since the last time `juvix html` was run on it. Otherwise, it reads @@ -570,6 +640,10 @@ def process_juvix_html(self, update_assets: bool = False) -> None: self.clear_error_messages("juvix_html") try: + clear_line() + log.info( + f"> running juvix html on {Fore.MAGENTA}{self.relative_filepath}{Style.RESET_ALL}" + ) output = subprocess.run( self._build_juvix_html_command(), cwd=self.env.DOCS_ABSPATH, @@ -642,10 +716,11 @@ def _build_juvix_isabelle_command(self) -> List[str]: cmd = [ self.env.JUVIX_BIN, "--log-level=error", + "--no-colors", "isabelle", "--stdout", "--output-dir", - self.env.CACHE_ISABELLE_OUTPUT_PATH.as_posix(), + self.env.ISABELLE_OUTPUT_PATH.as_posix(), self.absolute_filepath.as_posix(), ] # TODO: remove this once that branch is merged @@ -653,7 +728,6 @@ def _build_juvix_isabelle_command(self) -> List[str]: cmd.insert(3, "--non-recursive") return cmd - @time_spent(message="> running juvix isabelle") def _run_juvix_isabelle(self) -> Optional[str]: """ Run the Juvix Isabelle command and return the output. If the command @@ -663,8 +737,12 @@ def _run_juvix_isabelle(self) -> Optional[str]: """ if not is_juvix_markdown_file(self.absolute_filepath): return None + self.clear_error_messages("juvix_isabelle") try: + log.debug( + f"> running juvix isabelle on {Fore.MAGENTA}{self.relative_filepath}{Style.RESET_ALL}" + ) result = subprocess.run( self._build_juvix_isabelle_command(), cwd=self.env.DOCS_ABSPATH, @@ -674,13 +752,6 @@ def _run_juvix_isabelle(self) -> Optional[str]: ) if result.returncode != 0: self.save_error_message(result.stderr, "juvix_isabelle") - log.error( - template_error_message( - self.absolute_filepath, - self._build_juvix_isabelle_command(), - result.stderr, - ) - ) else: self.clear_error_messages("juvix_isabelle") return result.stdout @@ -688,6 +759,7 @@ def _run_juvix_isabelle(self) -> Optional[str]: log.error(f"Error running Juvix Isabelle on {self}: {e}") return None + @time_spent(message="> processing isabelle translation") def process_isabelle_translation( self, content: str, modify_markdown_output: bool = True ) -> Optional[str]: @@ -723,7 +795,7 @@ def _save_isabelle_theory(self, result: str) -> Optional[str]: if not self.cached_isabelle_filepath: log.error( template_error_message( - self.absolute_filepath, + self.relative_filepath, self._build_juvix_isabelle_command(), "Could not determine the Isabelle file name for: " f"{self.relative_filepath}", @@ -779,25 +851,35 @@ def _fix_unclosed_snippet_annotations(isabelle_output: str) -> str: # Juvix Markdown files # ------------------------------------------------------------------------ - def generate_markdown_output(self, save: bool = True) -> Optional[str]: + def _skip_generation(self, process_tags: List[str] = []) -> bool: """ - Generate the markdown output running the pipeline if the file has changed - since the last time `juvix markdown` was run on it. Otherwise, it reads - the cached markdown output from the cache file. - - pipeline: - - run juvix markdown (cache if necessary) - - process isabelle (cache if necessary based on metadata) - - process images (cache if necessary) - - process snippets (cache if necessary) - - process wikilinks (cache if necessary) - - generate html - - Note: Do not call metadata before the markdown output is generated. + Skip the generation of the markdown output if the file has not changed + since the last time the pipeline was run on it. """ + for tag in process_tags: + if tag == "juvix_markdown" and not self._processed_juvix_markdown: + return False + if tag == "isabelle" and not self._processed_juvix_isabelle: + return False + if tag == "snippets" and not self._processed_snippets: + return False + if tag == "wikilinks" and not self._processed_wikilinks: + return False + if tag == "images" and not self._processed_images: + return False + if tag == "errors" and not self._processed_errors: + return False + return not self.changed_since_last_run() + + def skip_and_use_cache_for_process( + self, process_tag: str, force: bool = False + ) -> Optional[str]: + if force: + setattr(self, process_tag, False) try: - if not self.changed_since_last_run(): + if self._skip_generation(process_tags=[process_tag]): log.debug(f"Reading cached markdown from {self.cache_filepath}") + self.load_and_print_saved_error_messages() return self.cache_filepath.read_text() except Exception as e: log.error( @@ -805,147 +887,174 @@ def generate_markdown_output(self, save: bool = True) -> Optional[str]: f"{Fore.GREEN}{self.cache_filepath}{Style.RESET_ALL}:\n{e}" ) return None + return None - _markdown_output = self._read_original_markdown() - if _markdown_output is None: + def generate_original_markdown(self, save_markdown: bool = True) -> None: + """ + Save the original markdown output for the file for later use. + """ + _markdown_output = self.save_and_read_original_markdown_from_cache() + if save_markdown and _markdown_output: + try: + self.save_markdown_output(_markdown_output) + except Exception as e: + log.error(f"Failed to save markdown output, we however continue: {e}") + + def generate_juvix_markdown( + self, save_markdown: bool = True, force: bool = False + ) -> Optional[str]: + """ + Generate the markdown output for the file. + """ + if not is_juvix_markdown_file(self.absolute_filepath): + log.debug( + f"> Skipping markdown generation for {Fore.GREEN}{self}{Style.RESET_ALL} " + f"because it is not a Juvix Markdown file" + ) return None - markdown_output: str = _markdown_output + if result := self.skip_and_use_cache_for_process( + force=force, + process_tag="juvix_markdown", + ): + log.debug( + f"> Returning cached markdown output for {Fore.GREEN}{self}{Style.RESET_ALL}" + ) + return result + + markdown_output: str = self.cache_filepath.read_text() metadata = parse_front_matter(markdown_output) or {} + preprocess = metadata.get("preprocess", {}) + _output = None + if preprocess.get("juvix", True) and ( + not self._processed_juvix_markdown or force + ): + _output = self._run_command_juvix_markdown() + if _output and save_markdown: + self._processed_juvix_markdown = True + self.save_markdown_output(_output) + else: + self._processed_juvix_markdown = False + return _output - # ------------------------------------------------------------ - # Process Juvix - # ------------------------------------------------------------ - needs_juvix_markdown = is_juvix_markdown_file(self.absolute_filepath) and ( - preprocess.get("juvix", True) - ) - juvix_succeeded = False - if needs_juvix_markdown: - _output = self.process_juvix_markdown() - self._processed_juvix_markdown = False - if _output: - markdown_output = _output - juvix_succeeded = True - self._processed_juvix_markdown = True - - # ------------------------------------------------------------ - # Process Images - # ------------------------------------------------------------ - needs_images = preprocess.get("images", True) - if needs_images: - self._processed_images = False - _output = process_images( - self.env, - markdown_output, - self.absolute_filepath, - ) - if _output: - markdown_output = _output - self._processed_images = True + @time_spent(message="> generating isabelle theories") + def generate_isabelle_theories( + self, save_markdown: bool = True, force: bool = False + ) -> Optional[str]: + """ + Process the Isabelle translation, saving the output to the cache folder. + """ + if not is_juvix_markdown_file(self.absolute_filepath): + return None + + if result := self.skip_and_use_cache_for_process( + force=force, + process_tag="isabelle", + ): + return result - # ------------------------------------------------------------ - # Process Isabelle - # ------------------------------------------------------------ + markdown_output: str = self.cache_filepath.read_text() + metadata = parse_front_matter(markdown_output) or {} + preprocess = metadata.get("preprocess", {}) self._needs_isabelle = preprocess.get("isabelle", False) self._needs_isabelle_at_bottom = preprocess.get( "isabelle_at_bottom", self._needs_isabelle, ) - if (self._needs_isabelle or self._needs_isabelle_at_bottom) and juvix_succeeded: + _output = None + if ( + (self._needs_isabelle or self._needs_isabelle_at_bottom) + and not self._processed_juvix_isabelle + or force + ): _output = self.process_isabelle_translation( content=markdown_output, modify_markdown_output=self._needs_isabelle_at_bottom, ) - self._processed_isabelle = False - if _output: - markdown_output = _output - self._processed_isabelle = True - - # ------------------------------------------------------------ - # Process Snippets - # ------------------------------------------------------------ + if _output and save_markdown: + self._processed_juvix_isabelle = True + self.save_markdown_output(_output) + else: + self._processed_juvix_isabelle = False + return _output + + @time_spent(message="> extracting snippets") + def generate_snippets( + self, save_markdown: bool = True, force: bool = False + ) -> Optional[str]: + """ + Modify the markdown output by adding the snippets. This requires the + preprocess of Juvix and Isabelle to be ocurred before. + """ + _markdown_output: str = self.cache_filepath.read_text() + metadata = parse_front_matter(_markdown_output) or {} + preprocess = metadata.get("preprocess", {}) needs_snippets = preprocess.get("snippets", True) - if needs_snippets: - self._processed_snippets = False - _output = self.process_snippets(content=markdown_output) - self._processed_snippets = False - if _output: - markdown_output = _output + _output = None + if needs_snippets and (not self._processed_snippets or force): + _output = self.run_snippet_preprocessor(content=_markdown_output) + if _output and save_markdown: self._processed_snippets = True - - # ------------------------------------------------------------ - # Process Wikilinks - # ------------------------------------------------------------ - _output = self.process_wikilinks( - content=markdown_output, - modify_markdown_output=True, - ) - self._processed_wikilinks = False - if _output: - markdown_output = _output - self._processed_wikilinks = True - - # ------------------------------------------------------------ - # Add the error messages to the markdown output - # ------------------------------------------------------------ - _output = self.add_errors_to_markdown( - content=markdown_output, clear_errors=True - ) - self._processed_errors = False - if _output: - markdown_output = _output - self._processed_errors = True - - # ------------------------------------------------------------ - # Save the markdown output - # ------------------------------------------------------------ - if save: - try: - self.save_markdown_output(markdown_output) - except Exception as e: - log.error(f"Failed to save markdown output, we however continue: {e}") - # Continue even if saving fails - return markdown_output + self.save_markdown_output(_output) + return _output @time_spent(message="> processing snippets") - def process_snippets( + def run_snippet_preprocessor( self, content: Optional[str] = None, - base_path: List[Path] = [Path(".")], - restrict_base_path: bool = True, - encoding: str = "utf-8", - check_paths: bool = True, - auto_append: List[str] = [], - url_download: bool = True, - url_max_size: int = 32 * 1024 * 1024, - url_timeout: int = 10, - url_request_headers: dict = {}, - dedent_subsections: bool = True, - tab_length: int = 2, ) -> str: snippet_preprocessor = SnippetPreprocessor() - snippet_preprocessor.base_path = base_path - snippet_preprocessor.restrict_base_path = restrict_base_path - snippet_preprocessor.encoding = encoding - snippet_preprocessor.check_paths = check_paths - snippet_preprocessor.auto_append = auto_append - snippet_preprocessor.url_download = url_download - snippet_preprocessor.url_max_size = url_max_size - snippet_preprocessor.url_timeout = url_timeout - snippet_preprocessor.url_request_headers = url_request_headers - snippet_preprocessor.dedent_subsections = dedent_subsections - snippet_preprocessor.tab_length = tab_length + snippet_preprocessor.enhanced_mdfile = self + snippet_preprocessor.base_path = [ + self.cache_filepath.parent.resolve().absolute(), + self.env.CACHE_PROCESSED_MARKDOWN_PATH.resolve().absolute(), + ] if content: try: - content = "\n".join(snippet_preprocessor.run(content.split("\n"))) + _output = snippet_preprocessor.run( + content.split("\n"), file_name=self.cache_filepath + ) + if isinstance(_output, Exception): + raise _output + content = "\n".join(_output) except Exception as e: - log.error(f"Error processing snippets: {e}") + self.save_error_message(str(e), "snippets") + return content or "Something went wrong processing snippets" + @time_spent(message="> generating wikilinks") + def generate_wikilinks( + self, save_markdown: bool = True, force: bool = False + ) -> Optional[str]: + """ + Modify the markdown output by adding the wikilinks. This requires the + preprocess of Juvix and Isabelle to be ocurred before. + """ + _output = None + self._processed_wikilinks = False if force else self._processed_wikilinks + _markdown_output = self.markdown_output + if _markdown_output is None: + log.error( + f"Failed to read markdown output from {self.relative_filepath} when processing for wikilinks" + ) + return None + metadata = parse_front_matter(_markdown_output) or {} + preprocess = metadata.get("preprocess", {}) + needs_wikilinks = preprocess.get("wikilinks", True) + if needs_wikilinks and not self._processed_wikilinks: + _output = self.run_wikilinks_preprocessor( + content=_markdown_output, + modify_markdown_output=True, + ) + if _output and save_markdown: + self.save_markdown_output(_output) + self._processed_wikilinks = True + return _output + @time_spent(message="> processing wikilinks") - def process_wikilinks( + def run_wikilinks_preprocessor( self, content: str, modify_markdown_output: bool = True, @@ -962,14 +1071,52 @@ def process_wikilinks( content = wl_preprocessor._run(content) return content + "\n" + TOKEN_LIST_WIKILINKS + "\n" + def generate_images( + self, save_markdown: bool = True, force: bool = False + ) -> Optional[str]: + """ + Modify the markdown output by adding the images. This requires the + preprocess of Juvix and Isabelle to be ocurred before. + """ + _output = None + _markdown_output = self.cache_filepath.read_text() + metadata = parse_front_matter(_markdown_output) or {} + preprocess = metadata.get("preprocess", {}) + needs_images = preprocess.get("images", True) + if needs_images and (not self._processed_images or force): + _output = process_images( + self.env, + _markdown_output, + self.absolute_filepath, + ) + if _output and save_markdown: + self._processed_images = True + self.save_markdown_output(_output) + else: + self._processed_images = False + return _output -SKIP_DIRS = [ - ".juvix-build", - ".git", - "images", - "assets", - "references", -] + @time_spent(message="> generating errors") + def write_errors_in_markdown( + self, save_markdown: bool = True, force: bool = False + ) -> Optional[str]: + """ + Modify the markdown output by adding the errors. This requires the + preprocess of Juvix and Isabelle to be ocurred before. + """ + _output = None + _markdown_output = self.cache_filepath.read_text() + metadata = parse_front_matter(_markdown_output) or {} + preprocess = metadata.get("preprocess", {}) + needs_errors = preprocess.get("errors", True) + if needs_errors and (not self._processed_errors or force): + _output = self.add_errors_to_markdown(content=_markdown_output) + if save_markdown: + self.save_markdown_output(_output) + self._processed_errors = True + else: + self._processed_errors = False + return _output class EnhancedMarkdownCollection: @@ -1010,7 +1157,7 @@ def __init__(self, config, env: ENV, docs: Optional[Path] = None): log.error(f"Error initializing JuvixMarkdownCollection: {e}") raise - @time_spent(message="> Storing original files in cache for faster lookup") + @time_spent(message="> storing original files in cache for faster lookup") def cache_orginals(self) -> List[EnhancedMarkdownFile]: """ Cache the original Juvix Markdown files in the cache folder for faster @@ -1018,20 +1165,39 @@ def cache_orginals(self) -> List[EnhancedMarkdownFile]: """ try: - log.info( - f"Collecting Markdown files for pre-processing from {Fore.GREEN}{self.docs_path}{Style.RESET_ALL}" + md_files = list(self.docs_path.rglob("*.md")) + log.debug( + f"Collecting {Fore.GREEN}{len(md_files)}{Style.RESET_ALL} " + f"Markdown files for pre-processing from " + f"{Fore.GREEN}{self.docs_path}{Style.RESET_ALL}" ) self.files = [] - md_files = self.docs_path.rglob("*.md") - for file in md_files: - if not set(file.parts) & set(SKIP_DIRS): + files_to_process = [ + file for file in md_files if not set(file.parts) & set(SKIP_DIRS) + ] + + with tqdm( + total=len(files_to_process), desc="> creating cache database" + ) as pbar: + for file in files_to_process: enhanced_file = EnhancedMarkdownFile(file, self.env, self.config) self.files.append(enhanced_file) - if enhanced_file.changed_since_last_run(): - if not enhanced_file.original_in_cache_filepath.exists(): - shutil.copy(file, enhanced_file.original_in_cache_filepath) + enhanced_file.original_in_cache_filepath.parent.mkdir( + parents=True, exist_ok=True + ) + current_file = enhanced_file.relative_filepath + pbar.set_postfix_str( + f"{Fore.MAGENTA}{current_file}{Style.RESET_ALL}" + ) + + log.debug( + f"Copying original content from {file} to {enhanced_file.original_in_cache_filepath} for safe content extraction" + ) + shutil.copy(file, enhanced_file.original_in_cache_filepath) + pbar.update(1) return self.files + except Exception as e: log.error(f"Error getting Markdown files in {self.docs_path}: {e}") return [] @@ -1055,7 +1221,6 @@ def get_enhanced_file_entry(self, filepath: Path) -> Optional[EnhancedMarkdownFi ) @property - @time_spent(message="> computing hash") def hash(self) -> str: """ Compute the hash of the folder containing the original Juvix Markdown @@ -1068,7 +1233,6 @@ def hash(self) -> str: log.error(f"Error computing hash: {e}") return "" - @time_spent(message="Did Markdown files change?", print_result=True) def has_changes(self) -> bool: """ Check if the markdown files, including Juvix Files, have changed since @@ -1093,15 +1257,14 @@ def update_cached_hash(self) -> Optional[str]: log.error(f"Error updating cached hash of the entire project: {e}") return None - @time_spent(message="> checking if html cache is empty", print_result=True) def is_html_cache_empty(self) -> bool: """Check if the folder containing the HTML cache is empty.""" return len(list(self.env.CACHE_HTML_PATH.glob("*"))) == 0 - @time_spent(message="> saving juvix modules json", print_result=True) def save_juvix_modules_json(self) -> Optional[Path]: """ Save the Juvix modules JSON file to the cache folder. + Return the path of the JSON file relative to the root of the project. """ if self.files is None: return None @@ -1122,10 +1285,13 @@ def save_juvix_modules_json(self) -> Optional[Path]: return None @time_spent() - def run_pipeline( + def run_pipeline_on_collection( self, - generate_markdown: bool = True, - generate_html: bool = True, + generate_juvix_markdown: bool = True, + generate_juvix_isabelle: bool = True, + generate_snippets: bool = True, + generate_wikilinks: bool = True, + generate_images: bool = True, ) -> None: """ Process the files pipeline. First, generate the markdown output for all @@ -1133,21 +1299,109 @@ def run_pipeline( Juvix Markdown files. """ if self.files is None: - log.error("No files to process") + log.debug("> no files to process") return + clear_screen() log.info( - f"Processing {Fore.GREEN}{len(self.files)}{Style.RESET_ALL} files... for Markdown={generate_markdown} and HTML={generate_html}" + f"> running pipeline on {Fore.GREEN}{len(self.files)}{Style.RESET_ALL} files" ) - for file in self.files: - if generate_markdown: - file.generate_markdown_output(save=True) + files_to_process = [ + file + for file in self.files + if file.changed_since_last_run() or file.has_error_message() + ] - if generate_html: - log.info("Adding auxiliary files to the HTML...") - self.generate_html() + if len(files_to_process) == 0: + log.debug(f"{Fore.YELLOW}no files to process{Style.RESET_ALL}") + else: + log.info( + f"> {Fore.GREEN}{len(files_to_process)}{Style.RESET_ALL} file{'s' if len(files_to_process) > 1 else ''} need to be processed due to changes or errors on previous processing" + ) + with tqdm( + total=len(files_to_process), + desc="> collecting original markdown for safe lookup", + ) as pbar: + for file in files_to_process: + current_file = file.relative_filepath + pbar.set_postfix_str( + f"{Fore.MAGENTA}{current_file}{Style.RESET_ALL}" + ) + file.generate_original_markdown() + pbar.update(1) + + clear_line() + juvix_files = [ + file + for file in files_to_process + if is_juvix_markdown_file(file.absolute_filepath) + ] + if generate_juvix_markdown: + with tqdm( + total=len(juvix_files), desc="> processing Juvix markdown" + ) as pbar: + for file in juvix_files: + current_file = file.relative_filepath + pbar.set_postfix_str( + f"{Fore.MAGENTA}{current_file}{Style.RESET_ALL}" + ) + file.generate_juvix_markdown() + pbar.update(1) + + clear_line() + if generate_juvix_isabelle: + with tqdm( + total=len(juvix_files), desc="> processing Isabelle theories" + ) as pbar: + for file in juvix_files: + current_file = file.relative_filepath + pbar.set_postfix_str( + f"{Fore.MAGENTA}{current_file}{Style.RESET_ALL}" + ) + file.generate_isabelle_theories() + pbar.update(1) + + clear_line() + if generate_images: + with tqdm( + total=len(files_to_process), desc="> processing images" + ) as pbar: + for file in files_to_process: + file.generate_images() + current_file = file.relative_filepath + pbar.set_postfix_str( + f"{Fore.MAGENTA}{current_file}{Style.RESET_ALL}" + ) + pbar.update(1) + + clear_line() + if generate_wikilinks: + with tqdm( + total=len(files_to_process), desc="> processing wikilinks" + ) as pbar: + for file in files_to_process: + current_file = file.relative_filepath + pbar.set_postfix_str( + f"{Fore.MAGENTA}{current_file}{Style.RESET_ALL}" + ) + file.generate_wikilinks() + pbar.update(1) + + clear_line() + # snippets are generated on all the files + if generate_snippets: + with tqdm(total=len(self.files), desc="> extracting snippets") as pbar: + for file in self.files: + current_file = file.relative_filepath + pbar.set_postfix_str( + f"{Fore.MAGENTA}{current_file}{Style.RESET_ALL}" + ) + file.generate_snippets() + pbar.update(1) + + clear_line() self.update_cached_hash() self.save_juvix_modules_json() @@ -1158,7 +1412,6 @@ def remove_html_cache(self) -> None: except Exception as e: log.error(f"Error removing HTML cache folder: {e}") - @time_spent(message="> generating html") def generate_html(self, force: bool = False) -> None: """ Generate the HTML output for all the Juvix Markdown files. In case the @@ -1167,35 +1420,28 @@ def generate_html(self, force: bool = False) -> None: every Juvix Markdown file individually (not recommended). """ - if self.files is None: - log.error("No files to process") - return - needs_to_generate_html = self.is_html_cache_empty() or self.has_changes() - - if not needs_to_generate_html and not force: - log.info("No changes detected, skipping HTML generation") + if self.files is None and not needs_to_generate_html and not force: + log.info("No files or changes detected, skipping HTML generation") return - if self.everything_html_file and needs_to_generate_html: - self.everything_html_file.process_juvix_html(update_assets=True) + log.info("> adding auxiliary HTML files...") + if self.everything_html_file and (needs_to_generate_html or force): + self.everything_html_file._process_juvix_html(update_assets=True) return - log.info( - f"{Fore.YELLOW}Generating HTML per file... (Recommend to create " - f"{Fore.GREEN}`everything.juvix.md`{Fore.YELLOW} file at the level " - f"of the docs folder){Style.RESET_ALL}" - ) - self.remove_html_cache() self.env.CACHE_HTML_PATH.mkdir(parents=True, exist_ok=True) - for file in self.files: - if ( - is_juvix_markdown_file(file.absolute_filepath) - and needs_to_generate_html - ): - file.process_juvix_html(update_assets=True) + @time_spent(message="> generating HTML for files") + def run_html_generation(files: List[EnhancedMarkdownFile]) -> None: + for file in files: + if is_juvix_markdown_file(file.absolute_filepath) and ( + needs_to_generate_html or force + ): + file._process_juvix_html(update_assets=True) + + run_html_generation(self.files) # -------------------------------------------------------------------------- # Juvix dependencies @@ -1255,9 +1501,10 @@ def update_juvix_dependencies(self) -> bool: class JuvixPlugin(BasePlugin): enhanced_collection: EnhancedMarkdownCollection wikilinks_plugin: WikilinksPlugin + first_run: bool = True def on_startup(self, *, command: str, dirty: bool) -> None: - pass + clear_screen() def on_config(self, config: MkDocsConfig) -> MkDocsConfig: self.env = ENV(config) @@ -1279,18 +1526,19 @@ def on_config(self, config: MkDocsConfig) -> MkDocsConfig: "\n- JUVIX_PATH" ) - if self.env.juvix_enabled: + if self.env.juvix_enabled and self.first_run: self.enhanced_collection = EnhancedMarkdownCollection( - env=self.env, config=config + env=self.env, + config=config, ) self.enhanced_collection.cache_orginals() - self.add_footer_css_file_to_extra_css() + if self.env.CLEAN_DEPS: self.enhanced_collection.clean_juvix_dependencies() if self.env.UPDATE_DEPS: self.enhanced_collection.update_juvix_dependencies() - self.env.FIRST_RUN = False + self.first_run = False return config @@ -1300,19 +1548,15 @@ def on_pre_build(self, config: MkDocsConfig) -> None: typechecking as part of the markdown processing, we include the error message as part of the content of the page """ - # if self.env.FIRST_RUN and self.env.juvix_enabled: self.wikilinks_plugin.on_pre_build(config) # This needs to be run first - self.enhanced_collection.run_pipeline( - generate_markdown=True, - generate_html=False, - ) - # self.env.FIRST_RUN = False + self.enhanced_collection.run_pipeline_on_collection() def on_files(self, files: Files, *, config: MkDocsConfig) -> Optional[Files]: """ List of the files to be included in the final build. These are copied to the site directory. """ + self.wikilinks_plugin.on_files(files, config) return Files( [ @@ -1341,15 +1585,15 @@ def on_page_read_source(self, page: Page, config: MkDocsConfig) -> Optional[str] self.enhanced_collection.get_enhanced_file_entry(abs_src_path) ) if file: - return file.markdown_output + return file.write_errors_in_markdown() else: log.error( - f"File not found in collection: {Fore.YELLOW}{abs_src_path}{Style.RESET_ALL}" + f"{Fore.RED}File not found in collection: " + f"{Fore.YELLOW}{abs_src_path}{Style.RESET_ALL}, Try rerun " + f"the build process." ) except Exception as e: log.error(f"Error getting file from collection: {e}") - - # exit(1) return None def on_page_markdown( @@ -1360,22 +1604,17 @@ def on_page_markdown( name. This is done to avoid having to change the file name in the navigation menu and to make the URLs consistent. """ - + config["current_page"] = page abs_src_str: Optional[str] = page.file.abs_src_path if not abs_src_str: return markdown - abs_src_path: Path = Path(abs_src_str) - if not is_juvix_markdown_file(abs_src_path): - return markdown - page.file.name = page.file.name.replace(".juvix", "") page.file.url = page.file.url.replace(".juvix", "") page.file.dest_uri = page.file.dest_uri.replace(".juvix", "") page.file.abs_dest_path = page.file.abs_dest_path.replace(".juvix", "") - config["current_page"] = page config["links_number"] = [] return markdown @@ -1388,17 +1627,21 @@ def on_post_page(self, output: str, page: Page, config: MkDocsConfig) -> str: soup = BeautifulSoup(output, "html.parser") for a in soup.find_all("a"): a["href"] = a["href"].replace(".juvix.html", ".html") - return str(soup) def on_post_build(self, config: MkDocsConfig) -> None: - log.info("> post build task: generating HTML for files") - self.enhanced_collection.run_pipeline( - generate_markdown=False, - generate_html=True, - ) + log.debug("> post build task: generating HTML for files") + self.enhanced_collection.generate_html() + self.enhanced_collection.update_cached_hash() + self.enhanced_collection.save_juvix_modules_json() self.move_html_cache_to_site_dir() - self.wikilinks_plugin.on_post_build(config) + # self.wikilinks_plugin.on_post_build(config) + + files_to_check = ( + self.enhanced_collection.files if self.enhanced_collection.files else [] + ) + for file in files_to_check: + file.load_and_print_saved_error_messages() def move_html_cache_to_site_dir(self) -> None: """ @@ -1412,6 +1655,7 @@ def move_html_cache_to_site_dir(self) -> None: log.error("No site directory specified. Skipping HTML cache move.") return + clear_line() log.info( f"> moving HTML cache to site directory: {Fore.GREEN}{self.env.SITE_DIR}{Style.RESET_ALL}" ) @@ -1466,21 +1710,43 @@ def wrapper(event: FileSystemEvent) -> None: ): return - log.info(f"Serving file: {fpathstr}") + # clear the console + print("\033[H\033[J", end="", flush=True) + fpath = Path(fpathstr) + if fpath.is_relative_to(self.env.DOCS_ABSPATH): + log.info( + f"> {Fore.CYAN}Serving file: {Fore.GREEN}{fpath.relative_to(self.env.DOCS_ABSPATH)}{Style.RESET_ALL}" + ) + else: + log.info( + f"> {Fore.CYAN}Serving file: {Fore.GREEN}{fpath}{Style.RESET_ALL}" + ) file: Optional[EnhancedMarkdownFile] = ( self.enhanced_collection.get_enhanced_file_entry(fpath) ) if file: if not file.changed_since_last_run(): - log.info( - f"(serving) No changes detected in {Fore.GREEN}{fpathstr}{Style.RESET_ALL}" - ) + if fpath.is_relative_to(self.env.DOCS_ABSPATH): + log.info( + f"{Fore.YELLOW}No changes detected in " + f"{Fore.GREEN}{fpath.relative_to(self.env.DOCS_ABSPATH) }{Style.RESET_ALL}" + ) + else: + log.info( + f"{Fore.YELLOW}No changes detected in " + f"{Fore.GREEN}{fpath}{Style.RESET_ALL}" + ) return else: - log.info( - f"(serving) Changes detected in {Fore.GREEN}{fpathstr}{Style.RESET_ALL}" - ) + if fpath.is_relative_to(self.env.DOCS_ABSPATH): + log.info( + f"> changes detected in {Fore.GREEN}{fpath.relative_to(self.env.DOCS_ABSPATH)}{Style.RESET_ALL}" + ) + else: + log.info( + f"> changes detected in {Fore.GREEN}{fpath}{Style.RESET_ALL}" + ) return callback(event) return wrapper @@ -1520,7 +1786,6 @@ def _generate_code_block_footer_css_file( log.error(f"Error writing to CSS file: {e}") return None - @time_spent(message="> generating codeblock footer CSS file") def add_footer_css_file_to_extra_css(self) -> MkDocsConfig: css_file = self.env.JUVIX_FOOTER_CSS_FILEPATH # Check if we need to create or update the codeblock footer CSS @@ -1530,7 +1795,7 @@ def add_footer_css_file_to_extra_css(self) -> MkDocsConfig: != Version.parse(self.env.JUVIX_VERSION) ) if needs_to_update_cached_juvix_version: - log.info( + log.debug( f"> Juvix version: {Back.WHITE}{Fore.BLACK}{self.env.JUVIX_VERSION.strip()}{Back.RESET}{Style.RESET_ALL}" ) self.env.CACHE_JUVIX_VERSION_FILEPATH.write_text(self.env.JUVIX_VERSION) @@ -1540,7 +1805,7 @@ def add_footer_css_file_to_extra_css(self) -> MkDocsConfig: css_file, self.env.JUVIX_VERSION ) if path: - log.info( + log.debug( f"> codeblock footer CSS file generated and saved to " f"{Fore.GREEN}{path.as_posix()}{Style.RESET_ALL}" ) @@ -1559,7 +1824,6 @@ def add_footer_css_file_to_extra_css(self) -> MkDocsConfig: # -------------------------------------------------------------------------- -@time_spent(message="> parsing front matter") def parse_front_matter(content: str) -> Optional[dict]: if not content.startswith("---"): return None diff --git a/mkdocs_juvix/snippets.py b/mkdocs_juvix/snippets.py index d6302a2..baa0b01 100644 --- a/mkdocs_juvix/snippets.py +++ b/mkdocs_juvix/snippets.py @@ -40,14 +40,11 @@ from colorama import Fore, Style # type: ignore from markdown import Extension # type: ignore from markdown.preprocessors import Preprocessor # type: ignore -from mkdocs.plugins import get_plugin_logger from mkdocs_juvix.env import ENV -from mkdocs_juvix.utils import find_file_in_subdirs # type: ignore +from mkdocs_juvix.logger import log from mkdocs_juvix.utils import time_spent as time_spent_decorator -log = get_plugin_logger(f"{Fore.BLUE}[juvix_mkdocs] (snippets) {Style.RESET_ALL}") - def time_spent(message: Optional[Any] = None, print_result: bool = False): return time_spent_decorator(log=log, message=message, print_result=print_result) @@ -99,18 +96,18 @@ class SnippetMissingError(Exception): class SnippetPreprocessor(Preprocessor): """Handle snippets in Markdown content.""" + base_path: List[Path] + restrict_base_path: bool + encoding: str + check_paths: bool + auto_append: List[str] + url_download: bool + url_max_size: int + url_timeout: float + url_request_headers: dict + dedent_subsections: bool + tab_length: int env: ENV - base_path: List[Path] = [Path("."), Path("includes")] - restrict_base_path: bool = True - encoding: str = "utf-8" - check_paths: bool = True - auto_append: List[str] = [] - url_download: bool = True - url_max_size: int = DEFAULT_URL_SIZE - url_timeout: float = DEFAULT_URL_TIMEOUT - url_request_headers: dict = DEFAULT_URL_REQUEST_HEADERS - dedent_subsections: bool = True - tab_length: int = 2 def __init__( self, @@ -120,6 +117,18 @@ def __init__( ): """Initialize.""" + self.base_path: List[Path] = [Path("."), Path("includes")] + self.restrict_base_path: bool = True + self.encoding: str = "utf-8" + self.check_paths: bool = True + self.auto_append: List[str] = [] + self.url_download: bool = True + self.url_max_size: int = DEFAULT_URL_SIZE + self.url_timeout: float = DEFAULT_URL_TIMEOUT + self.url_request_headers: dict = DEFAULT_URL_REQUEST_HEADERS + self.dedent_subsections: bool = True + self.tab_length: int = 2 + if env is None: self.env = ENV(config) else: @@ -144,6 +153,7 @@ def __init__( self.url_timeout = config["url_timeout"] self.url_request_headers = config["url_request_headers"] self.dedent_subsections = config["dedent_subsections"] + if md is not None and hasattr(md, "tab_length"): self.tab_length = md.tab_length else: @@ -156,8 +166,6 @@ def extract_section( self, section, lines, - is_juvix=False, - is_isabelle=False, backup_lines=None, backup_path=None, ): @@ -208,32 +216,18 @@ def extract_section( # We are currently in a section, so append the line if start: new_lines.append(ln) - showed_error = False if not found and self.check_paths: - if not is_juvix: - log.error( - f"[!] Snippet section {Fore.YELLOW}{section}{Style.RESET_ALL} could not be located" - ) - showed_error = True - # juvix - elif backup_lines is not None: + if backup_lines is not None: return self.extract_section( section, backup_lines, - is_juvix=False, - is_isabelle=False, backup_lines=None, backup_path=backup_path, ) - - if not showed_error: - log.error( - f"Snippet section {Fore.YELLOW}{section}{Style.RESET_ALL} not found. " - f"It might be inside a Juvix code block, unsupported in Juvix v0.6.6 or earlier. " - f"Consider using a section snippet. " - f"Error in file {Fore.GREEN}{backup_path}{Style.RESET_ALL} for section " - f"{Fore.YELLOW}{section}{Style.RESET_ALL}." - ) + new_lines.append( + f"\n!!! failure\n\n" + f" Snippet section '{section}' not found! Please report this issue on GitHub!\n" + ) return self.dedent(new_lines) if self.dedent_subsections else new_lines def dedent(self, lines): @@ -241,31 +235,6 @@ def dedent(self, lines): return textwrap.dedent("\n".join(lines)).split("\n") - def get_snippet_path(self, path) -> Optional[str]: - """Get snippet path.""" - snippet = None - for base in self.base_path: - base_path = Path(base) - if base_path.exists(): - if base_path.is_dir(): - if self.restrict_base_path: - filename = (base_path / path).resolve() - if not str(filename).startswith(str(base_path)): - continue - else: - filename = base_path / path - if filename.exists(): - snippet = str(filename) - break - else: - dirname = base_path.parent - filename = dirname / path - if filename.exists() and filename.samefile(base_path): - snippet = str(filename) - break - - return snippet - @functools.lru_cache() # noqa: B019 def download(self, url): """ @@ -305,9 +274,122 @@ def download(self, url): ln.decode(self.encoding).rstrip("\r\n") for ln in response.readlines() ] + def _get_snippet_path(self, base_paths: List[Path], path: Path): + snippet = None + for base in base_paths: + if Path(base).exists(): + if Path(base).is_dir(): + log.debug( + f"Base path is a directory: {Fore.MAGENTA}{base}{Style.RESET_ALL}" + ) + if self.restrict_base_path: + filename = Path(base).absolute() / path + log.debug( + f"Checking restricted base path: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + if not filename.as_posix().startswith(base.as_posix()): + log.debug( + f"Rejected file not under base path: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + continue + else: + if filename.exists(): + log.debug( + f"Accepted file under base path: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + return filename + else: + log.debug( + f"File does not exist: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + else: + filename = Path(base).absolute() / path + log.debug( + f"Checking unrestricted base path: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + if filename.exists(): + log.debug( + f"Snippet found: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + snippet = filename + break + else: + dirname = Path(base).parent + filename = dirname / path + log.debug( + f"Checking file in directory: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + if filename.exists(): + log.debug( + f"Snippet found: {Fore.MAGENTA}{filename}{Style.RESET_ALL}" + ) + snippet = filename + break + return snippet + + def get_snippet_path(self, path: Path | str): + """Get snippet path.""" + log.debug(f"{Fore.CYAN}> getting snippet path for {path}{Style.RESET_ALL}") + if isinstance(path, str): + path = Path(path) + base_paths = self.base_path + just_raw = path and path.as_posix().endswith("!") + search_for_juvix_isabelle_output = False + + if path and path.as_posix().endswith(".juvix.md!thy"): + search_for_juvix_isabelle_output = True + log.debug( + f"Path ends with .juvix.md!thy: {Fore.MAGENTA}{path}{Style.RESET_ALL}" + ) + juvix_path = path.with_name(path.name.replace("!thy", "")) + log.debug(f"Juvix path: {Fore.MAGENTA}{juvix_path}{Style.RESET_ALL}") + # isabelle_path = juvix_path + isabelle_path = ( + self.env.compute_filepath_for_juvix_isabelle_output_in_cache(juvix_path) + ) + log.debug(f"Isabelle path: {Fore.MAGENTA}{isabelle_path}{Style.RESET_ALL}") + if isabelle_path is not None and isabelle_path.exists(): + path = isabelle_path + log.debug( + f"Changed path to Isabelle file: {Fore.MAGENTA}{path}{Style.RESET_ALL}" + ) + + if just_raw: + path = Path(path.as_posix()[:-1]) + log.debug(f"Requested raw snippet: {path}") + base_paths = [self.env.DOCS_ABSPATH] + + if path.is_relative_to(self.env.DOCS_ABSPATH): + path = path.relative_to(self.env.DOCS_ABSPATH) + if path.is_relative_to("docs"): + log.debug(f"Path is relative to docs: {path}") + path = path.relative_to("docs") + if path.is_relative_to("./docs"): + log.debug(f"Path is relative to ./docs: {path}") + path = path.relative_to("./docs") + + if path.is_relative_to(self.env.ISABELLE_OUTPUT_PATH): + log.debug(f"Path is relative to Isabelle output path: {path}") + path = path.relative_to(self.env.ISABELLE_OUTPUT_PATH) + if path.is_relative_to(self.env.ISABELLE_THEORIES_DIRNAME): + log.debug(f"Path is relative to Isabelle theories directory: {path}") + path = path.relative_to(self.env.ISABELLE_THEORIES_DIRNAME) + + if path.as_posix().endswith(".thy") or search_for_juvix_isabelle_output: + log.debug(f"Path is an Isabelle file: {path}") + base_paths = [self.env.ISABELLE_OUTPUT_PATH] + + if not just_raw and path.as_posix().endswith(".juvix.md"): + path = Path(path.as_posix().replace(".juvix.md", ".md")) + + return self._get_snippet_path(base_paths, path) + def parse_snippets( - self, lines, file_name=None, is_url=False, is_juvix=False, is_isabelle=False - ) -> list[str]: + self, + lines, + file_name: Optional[Path | str] = None, + is_url: bool = False, + ) -> list[str] | Exception: """Parse snippets snippet.""" if file_name: # Track this file. @@ -376,18 +458,20 @@ def parse_snippets( end = None start = None section = None + log.debug(f"{Fore.YELLOW}>>>>>> path: {path}{Style.RESET_ALL}") m = RE_SNIPPET_FILE.match(path) if m is None: + log.debug(f"{Fore.YELLOW}>>>>>> m is None{Style.RESET_ALL}") continue + path = m.group(1).strip() if not path: if self.check_paths: - raise SnippetMissingError( - "1. Snippet at path '{}' could not be found".format(path) - ) + return SnippetMissingError("No path specified for snippet") else: continue + ending = m.group(3) if ending and len(ending) > 1: end = int(ending[1:]) @@ -407,59 +491,31 @@ def parse_snippets( # Make sure we don't process `path` as a local file reference. url = self.url_download and is_link - # juvix.md with or without ! with or without thy - just_raw = path and path.endswith("!") - if just_raw: - path = path[:-1] - - is_isabelle = False - requires_thy = path and path.endswith("!thy") - if requires_thy: - path = path[:-4] - is_isabelle = True - - snippet = ( - find_file_in_subdirs( - self.env.ROOT_ABSPATH, - self.base_path, # type: ignore - Path(path), # type: ignore - ) - if not url - else path - ) + found_snippet = self.get_snippet_path(path) - is_juvix = False - if snippet: - original = snippet - if not just_raw and snippet.endswith(".juvix.md"): - snippet = self.env.compute_filepath_for_cached_output_of_juvix_markdown_file( - Path(snippet) - ) + if found_snippet is None: + if self.check_paths: + msg = f"Error type 3. Snippet at path '{Fore.MAGENTA}{path}{Style.RESET_ALL}' could not be found" + return SnippetMissingError(msg) - if not snippet.exists(): - log.warning( - f"Juvix Markdown file does not exist: {Fore.RED}{snippet}{Style.RESET_ALL}, report this issue on GitHub!" - ) - snippet = original + log.debug(f"{Fore.GREEN}Snippet found:{found_snippet}{Style.RESET_ALL}") - if requires_thy: - snippet = self.env.compute_filepath_for_juvix_isabelle_output_in_cache( - Path(original) - ) - if snippet is None: - snippet = original + if found_snippet is None: + log.debug( + f" Snippet not found in cache, using path: {path}" + ) + snippet = path + else: + log.debug( + f" Snippet found in cache, using filepath: {found_snippet}" + ) + snippet = ( + found_snippet.as_posix() if found_snippet and not url else path + ) - log.info( - f"The requested file is an Isabelle file: {Fore.GREEN}{snippet}{Style.RESET_ALL}" - ) - if snippet is not None and not Path(snippet).exists(): - log.warning( - f"Isabelle file does not exist: {Fore.RED}{snippet}{Style.RESET_ALL}, " - f"did you forget e.g. to add `isabelle: true` to the meta in the corresponding Juvix file?" - ) - snippet = original + if snippet: + original = snippet - is_juvix = True if isinstance(snippet, Path): snippet = snippet.as_posix() @@ -467,21 +523,10 @@ def parse_snippets( if snippet in self.seen: continue - original_lines = [] - - if is_juvix: - with codecs.open(original, "r", encoding=self.encoding) as f: - original_lines = [ln.rstrip("\r\n") for ln in f] - if start is not None or end is not None: - s = slice(start, end) - original_lines = ( - self.dedent(original_lines[s]) - if self.dedent_subsections - else original_lines[s] - ) - if not url: # Read file content + if isinstance(snippet, Path): + snippet = snippet.as_posix() with codecs.open(snippet, "r", encoding=self.encoding) as f: s_lines = [ln.rstrip("\r\n") for ln in f] if start is not None or end is not None: @@ -495,9 +540,6 @@ def parse_snippets( s_lines = self.extract_section( section, s_lines, - is_juvix, - is_isabelle, - original_lines, original, ) else: @@ -522,31 +564,28 @@ def parse_snippets( else s_lines[s] ) elif section: - s_lines = self.extract_section( - section, s_lines, is_juvix, is_isabelle - ) + s_lines = self.extract_section(section, s_lines) except SnippetMissingError: if self.check_paths: - raise + return SnippetMissingError( + f"Error type 2 while processing {Fore.MAGENTA}{file_name}{Style.RESET_ALL} when trying to extract snippet: {snippet}" + ) s_lines = [] # Process lines looking for more snippets - new_lines.extend( - [ - space + l2 - for l2 in self.parse_snippets( - s_lines, - file_name=snippet, - is_url=url, - is_juvix=is_juvix, - is_isabelle=is_isabelle, - ) - ] + parsed_snippets = self.parse_snippets( + s_lines, + file_name=snippet, + is_url=url, ) + if isinstance(parsed_snippets, Exception): + return parsed_snippets + new_lines.extend([space + l2 for l2 in parsed_snippets]) elif self.check_paths: - log.error("2. Snippet at path '{}' could not be found".format(path)) - exit(1) + raise SnippetMissingError( + f"3. Snippet at path '{path}' could not be found!" + ) # Pop the current file name out of the cache if file_name: @@ -554,15 +593,16 @@ def parse_snippets( return new_lines - def run(self, lines: List[str]) -> List[str]: + def run( + self, lines: List[str], file_name: Optional[Path | str] = None + ) -> List[str] | Exception: """Process snippets.""" - - self.seen: set[str] = set() + self.seen: set[Path | str] = set() if self.auto_append: lines.extend( "\n\n-8<-\n{}\n-8<-\n".format("\n\n".join(self.auto_append)).split("\n") ) - return self.parse_snippets(lines) + return self.parse_snippets(lines, file_name=file_name) class SnippetExtension(Extension): diff --git a/mkdocs_juvix/utils.py b/mkdocs_juvix/utils.py index 9eb8f94..b277701 100644 --- a/mkdocs_juvix/utils.py +++ b/mkdocs_juvix/utils.py @@ -1,5 +1,4 @@ import hashlib -import logging import os import pickle import time @@ -9,9 +8,9 @@ from colorama import Fore, Style # type: ignore from mkdocs.config.defaults import MkDocsConfig -from mkdocs.plugins import PrefixedLogger -log = logging.getLogger("mkdocs") +from mkdocs_juvix.logger import PrefixedLogger, log + EXCLUDED_DIRS = { ".git", ".hooks", @@ -80,11 +79,14 @@ def find_file_in_subdirs( full_path = base_dir / filepath if full_path.exists(): return full_path.absolute().as_posix() - subdirs = [base_dir / "images"] + list(subdirs) + subdirs = [base_dir / "images"] + list(set(subdirs)) for subdir in subdirs: full_path = Path(subdir) / filepath.name if full_path.exists(): - return full_path.absolute().as_posix() + if full_path.is_file(): + return full_path.absolute().as_posix() + else: + log.error(f"Found directory {full_path} instead of file") return None @@ -172,6 +174,10 @@ def wrapper(*args, **kwargs): start_time = time.time() result = None exception = None + if message: + log_message = f"{Fore.YELLOW}{message} ...{Style.RESET_ALL}" + if os.getenv("DEBUG", False): + log.info(log_message) try: result = func(*args, **kwargs) except Exception as e: @@ -179,10 +185,13 @@ def wrapper(*args, **kwargs): end_time = time.time() elapsed_time = end_time - start_time - log_message = f"{Fore.BLUE}({elapsed_time:.3f}s){Style.RESET_ALL}" + log_message = ( + f"done in {Fore.BLUE}{elapsed_time:.3f} seconds{Style.RESET_ALL}" + ) if print_result and result: - log_message = f"{Fore.MAGENTA}{result}{Style.RESET_ALL} {log_message}" - log_message = f"{Fore.YELLOW}{message or func.__name__}{Style.RESET_ALL}: {log_message}" + log_message = f"`{func.__name__} =>` {result} - {log_message}" + if not print_result: + log_message = f"`{func.__name__}` - {log_message}" if os.getenv("DEBUG", False): log.info(log_message) if exception: diff --git a/pyproject.toml b/pyproject.toml index 851b866..b0f142b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "mkdocs-juvix-plugin" -version = "0.4.0" +version = "0.4.1" description = "MkDocs documentation with support for Juvix Markdown files" authors = ["Jonathan Prieto-Cubides, and GitHub contributors"] license = "MIT" diff --git a/src/cli.py b/src/cli.py index 180f04e..ea95d22 100644 --- a/src/cli.py +++ b/src/cli.py @@ -284,7 +284,7 @@ def new( Please upgrade Juvix and try again.""", fg="red", ) - return + exit(1) except subprocess.CalledProcessError: click.secho( @@ -784,7 +784,22 @@ def install_poetry_package(package_name, skip_flag=False, development_flag=False help="Path to the mkdocs configuration file", show_default=True, ) -def serve(project_path: Path, no_open: bool, quiet: bool, config_file: Path): +@click.option("--debug", is_flag=True, help="Set the environment variable DEBUG to 1") +@click.option( + "--remove-cache", "-r", is_flag=True, help="Remove the cache before serving" +) +@click.option( + "--verbose", "-v", is_flag=True, help="Set the environment variable VERBOSE to 1" +) +def serve( + project_path: Path, + no_open: bool, + quiet: bool, + config_file: Path, + debug: bool, + verbose: bool, + remove_cache: bool, +): """This is a wrapper around `poetry run mkdocs serve`. It is used to serve the project using mkdocs.""" @@ -797,12 +812,24 @@ def serve(project_path: Path, no_open: bool, quiet: bool, config_file: Path): fg="red", ) return - + previous_debug: str | None = os.environ.get("DEBUG") + if debug: + os.environ["DEBUG"] = "1" + if remove_cache: + try: + shutil.rmtree(project_path / ".cache-juvix-mkdocs") + except Exception: + click.secho("Failed to remove .cache-juvix-mkdocs folder.", fg="red") + if previous_debug: + os.environ["DEBUG"] = previous_debug + return mkdocs_serve_cmd = ["poetry", "run", "mkdocs", "serve", "--clean"] if not no_open: mkdocs_serve_cmd.append("--open") if quiet: mkdocs_serve_cmd.append("-q") + if verbose: + mkdocs_serve_cmd.append("-v") if config_file: mkdocs_serve_cmd.append(f"--config-file={config_file}") try: @@ -810,10 +837,15 @@ def serve(project_path: Path, no_open: bool, quiet: bool, config_file: Path): except subprocess.CalledProcessError as e: click.secho("Failed to start the server.", fg="red") click.secho(f"Error: {e}", fg="red") + if previous_debug: + os.environ["DEBUG"] = previous_debug except FileNotFoundError: click.secho("Failed to start the server.", fg="red") click.secho("Make sure Poetry is installed and in your system PATH.", fg="red") + if previous_debug: + os.environ["DEBUG"] = previous_debug + @cli.command() @click.option( @@ -831,8 +863,22 @@ def serve(project_path: Path, no_open: bool, quiet: bool, config_file: Path): help="Path to the mkdocs configuration file", show_default=True, ) +@click.option("--debug", is_flag=True, help="Set the environment variable DEBUG to 1") +@click.option( + "--remove-cache", "-r", is_flag=True, help="Remove the cache before building" +) @click.option("--quiet", "-q", is_flag=True, help="Run mkdocs build in quiet mode") -def build(project_path: Path, config_file: Path, quiet: bool): +@click.option( + "--verbose", "-v", is_flag=True, help="Set the environment variable VERBOSE to 1" +) +def build( + project_path: Path, + config_file: Path, + debug: bool, + remove_cache: bool, + quiet: bool, + verbose: bool, +): """This is a wrapper around `poetry run mkdocs build`.""" click.secho("Running in project path: ", nl=False) click.secho(f"{project_path}", fg="blue") @@ -843,17 +889,31 @@ def build(project_path: Path, config_file: Path, quiet: bool): fg="red", ) return - + previous_debug: str | None = os.environ.get("DEBUG") + if debug: + os.environ["DEBUG"] = "1" mkdocs_build_cmd = ["poetry", "run", "mkdocs", "build"] if config_file: mkdocs_build_cmd.append(f"--config-file={config_file}") if quiet: mkdocs_build_cmd.append("-q") + if verbose: + mkdocs_build_cmd.append("-v") + if remove_cache: + try: + shutil.rmtree(project_path / ".cache-juvix-mkdocs") + except Exception: + click.secho("Failed to remove .cache-juvix-mkdocs folder.", fg="red") + if previous_debug: + os.environ["DEBUG"] = previous_debug + return try: subprocess.run(mkdocs_build_cmd, cwd=project_path, check=True) except subprocess.CalledProcessError as e: click.secho("Failed to build the project.", fg="red") click.secho(f"Error: {e}", fg="red") + if previous_debug: + os.environ["DEBUG"] = previous_debug if __name__ == "__main__": diff --git a/src/fixtures/.gitignore b/src/fixtures/.gitignore index a3640a9..3c120b1 100644 --- a/src/fixtures/.gitignore +++ b/src/fixtures/.gitignore @@ -1,4 +1,4 @@ site/ .DS_Store .juvix-build/ -.hooks/ +.cache-juvix-mkdocs/ diff --git a/src/fixtures/everything.juvix.md b/src/fixtures/everything.juvix.md index d2e5703..9220c86 100644 --- a/src/fixtures/everything.juvix.md +++ b/src/fixtures/everything.juvix.md @@ -11,4 +11,3 @@ import test; import isabelle; import diagrams; ``` - diff --git a/src/fixtures/tutorial/hello.juvix.md b/src/fixtures/tutorial/hello.juvix.md new file mode 100644 index 0000000..b5ca00d --- /dev/null +++ b/src/fixtures/tutorial/hello.juvix.md @@ -0,0 +1,9 @@ +```juvix +module tutorial.hello; +``` + + +```juvix +axiom A : Type; +``` + diff --git a/src/fixtures/tutorial/snippets.md b/src/fixtures/tutorial/snippets.md index b6d9de9..298a30a 100644 --- a/src/fixtures/tutorial/snippets.md +++ b/src/fixtures/tutorial/snippets.md @@ -23,6 +23,10 @@ which provides the following output: --8<-- "docs/test.juvix.md:main" +You can also include relative paths: + +--8<-- "./hello.juvix.md:axiom" + !!! info