diff --git a/Tools/InitialiseForDev.py b/Tools/InitialiseForDev.py index 6a7bbe3..7730568 100644 --- a/Tools/InitialiseForDev.py +++ b/Tools/InitialiseForDev.py @@ -35,7 +35,7 @@ def initialise_setup(registry_dir): # Set the local git exclude file so that all diffed_sources folders are ignored git_info_dir = os.path.join(registry_dir, ".git", "info") exclude_file_path = os.path.join(git_info_dir, "exclude") - exclude_pattern = 'diffed_sources/' + exclude_pattern = "diffed_sources/" # Ensure the .git/info directory exists if not os.path.exists(git_info_dir): @@ -52,13 +52,13 @@ def initialise_setup(registry_dir): # Check if the pattern already exists in the file if any(exclude_pattern in line for line in lines): logging.info(f"'{exclude_pattern}' already exists in exclude file.") - return + else: + # Append the pattern to the file + with open(exclude_file_path, "a") as file: + file.write(f"\n{exclude_pattern}\n") + print(f"Added '{exclude_pattern}' to {exclude_file_path}") - # Append the pattern to the file - with open(exclude_file_path, "a") as file: - file.write(f"\n{exclude_pattern}\n") - - print(f"Added '{exclude_pattern}' to {exclude_file_path}") + print("Initialization complete.") def create_folders(boost_lib_dirs): diff --git a/Tools/helper.py b/Tools/helper.py deleted file mode 100644 index 771b553..0000000 --- a/Tools/helper.py +++ /dev/null @@ -1,70 +0,0 @@ -import os -import argparse - -file_extension = ".cc" - - -def find_cpp_files(directory): - cpp_files = [] - for root, dirs, files in os.walk(directory): - for file in files: - if file.endswith(file_extension): - # Calculate the relative path from the directory to the file - relative_path = os.path.relpath(os.path.join(root, file), directory) - cpp_files.append(relative_path) - return sorted(cpp_files) - - -def generate_bazel_build(directory, library_name, extra_deps): - header = 'load("@boost.rules.tools//:tools.bzl", "boost_test_set")\n\n' - deps = [' "@boost.' + library_name + "//:" + library_name + '",'] + [ - ' "' + dep + '",' for dep in extra_deps - ] - deps_variable = "DEPS = [\n" + "\n".join(deps) + "\n]\n\n" - cpp_files = find_cpp_files(directory) - - test_suite = ( - "test_suite(\n" - ' name = "tests",\n' - " tests = boost_test_set(\n" - ' file_extensions = ".cc",\n' - " names = [\n" - + "\n".join( - [f' "{file.replace(file_extension, "")}",' for file in cpp_files] - ) - + '\n ],\n deps = DEPS,\n ),\n)\n' - ) - - return header + deps_variable + test_suite - - -def main(): - parser = argparse.ArgumentParser( - description="Generate a BUILD.bazel file for Boost library tests." - ) - parser.add_argument( - "directory", - type=str, - help="The path to the directory containing the test files.", - ) - parser.add_argument( - "library", type=str, help='The name of the Boost library, e.g., "accumulators".' - ) - parser.add_argument( - "--extra_deps", - nargs="*", - default=[], - help="Additional dependencies to be added to the DEPS variable. Space-separated if multiple.", - ) - args = parser.parse_args() - - build_content = generate_bazel_build(args.directory, args.library, args.extra_deps) - - with open(os.path.join(args.directory, "BUILD.bazel"), "w") as file: - file.write(build_content) - - print("BUILD.bazel file generated successfully in", args.directory) - - -if __name__ == "__main__": - main() diff --git a/old/boostGenerator.py b/old/boostGenerator.py deleted file mode 100644 index 5643520..0000000 --- a/old/boostGenerator.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import json -import requests -import subprocess -import sys - -def generate_source_json(folder_path): - # Extract the library name from the folder path - library_name = folder_path.split("/")[-1].split(".")[1] - - # Check if the specific subfolder exists - subfolder_path = os.path.join(folder_path, "1.83.0.bzl.1") - if not os.path.exists(subfolder_path): - print("Subfolder '1.83.0.bzl.1' not found.") - return - - # Define the URL - url = f"https://github.com/boostorg/{library_name}/archive/refs/tags/boost-1.83.0.tar.gz" - - # Download the file - response = requests.get(url) - if response.status_code == 200: - file_name = os.path.join( - "/Users/lukeaguilar/Downloads", f"{library_name}-boost-1.83.0.tar.gz" - ) - with open(file_name, "wb") as file: - file.write(response.content) - - # Compute SHA256 and encode in base64 - integrity_cmd = f"openssl dgst -sha256 -binary {file_name} | openssl base64 -A" - integrity_result = subprocess.run( - integrity_cmd, shell=True, capture_output=True, text=True - ) - integrity = "sha256-" + integrity_result.stdout.strip() - - print(f"Calculated hash: {integrity}") - - # Create the source.json content - source_data = { - "integrity": integrity, - "patch_strip": 0, - "patches": {}, - "url": url, - } - - # Write the source.json file - with open(os.path.join(subfolder_path, "source.json"), "w") as json_file: - json.dump(source_data, json_file, indent=2) - - print(f"source.json created/updated successfully in {subfolder_path}") - else: - print("Failed to download the file.") - - -# Usage -if __name__ == "__main__": - if len(sys.argv) != 2: - print("Usage: python script_name.py ") - sys.exit(1) - - folder_path = sys.argv[1] - generate_source_json(folder_path) \ No newline at end of file diff --git a/old/depsGen.py b/old/depsGen.py deleted file mode 100644 index 4cc72d5..0000000 --- a/old/depsGen.py +++ /dev/null @@ -1,210 +0,0 @@ -import os -import sys -import subprocess -from html.parser import HTMLParser - - -# Function to clone the Git repository, if it doesn't exist -def clone_repo(repo_url, dir_name): - if not os.path.exists(dir_name): - subprocess.run(["git", "clone", repo_url]) - - -# Clone the Git repository -repo_url = "https://github.com/pdimov/boostdep-report.git" -clone_repo(repo_url, "boostdep-report") - -# Set the boost version -boostVersion = "boost-1.83.0" -path_to_html = os.path.join("boostdep-report", boostVersion, "module-overview.html") - - -# Define a parser to extract dependencies -class BoostDependencyParser(HTMLParser): - def __init__(self, ignore_deps): - super().__init__() - self.ignore_deps = ignore_deps # List of dependencies to ignore - self.current_library = None # Store the current library name - self.dependencies = {} # Dictionary to hold libraries and their dependencies - self.in_h2_tag = False # Flag to indicate if we're inside an h2 tag - - def handle_starttag(self, tag, attrs): - # Detect the start of a library name (h2 tag) - if tag == "h2": - self.in_h2_tag = True - elif tag == "p" and self.current_library: - # We're now in the dependencies list for the current library - self.in_h2_tag = False - - def handle_endtag(self, tag): - # Detect the end of an h2 tag - if tag == "h2": - self.in_h2_tag = False - - def handle_data(self, data): - if self.in_h2_tag: - # Capture the library name - self.current_library = data.strip() - self.dependencies[self.current_library] = [] - elif self.current_library and not self.in_h2_tag: - # Filter and add dependencies - deps = [ - dep.strip() - for dep in data.split() - if dep.strip() not in self.ignore_deps - ] - self.dependencies[self.current_library].extend(deps) - self.current_library = None # Reset for next library - - -# List of dependencies to ignore -ignore_deps = ["(unknown)"] - -# Create an instance of the parser with the ignore dependencies -parser = BoostDependencyParser(ignore_deps) -with open(path_to_html, "r") as file: - parser.feed(file.read()) - -# Post-process dependencies: replace '~' with '.' -for lib, deps in list(parser.dependencies.items()): - # Replace '~' with '.' in library names - new_lib_name = lib.replace("~", ".") - parser.dependencies[new_lib_name] = [dep.replace("~", ".") for dep in deps] - if new_lib_name != lib: - del parser.dependencies[lib] - -# Print out each library and its dependencies -# for lib, deps in parser.dependencies.items(): -# print(f"{lib} = {deps}") - - -# def detect_cycles(graph): -# """ -# Detects cycles in a dependency graph. -# :param graph: Dictionary representing the dependency graph. -# :return: List of cycles found in the graph. -# """ - -# def visit(node, path): -# if node in path: -# return path[path.index(node) :] # Return the cycle -# if node not in graph: -# return [] # No dependencies to explore - -# path.append(node) -# for neighbour in graph[node]: -# cycle = visit(neighbour, path.copy()) -# if cycle: -# return cycle -# return [] - -# cycles = [] -# for lib in graph: -# cycle = visit(lib, []) -# if cycle: -# cycles.append(cycle) - -# return cycles - - -# # Detect and print cyclic dependencies -# cyclic_dependencies = detect_cycles(parser.dependencies) -# for cycle in cyclic_dependencies: -# print(" -> ".join(cycle)) - - -# Step 1: Accept a folder path from the command line -folder_path = sys.argv[ - 1 -] # The folder path is expected as the first command line argument - -# Step 2: Extract the library name from the folder path -# library_name = os.path.basename(folder_path).replace("boost.", "") -library_name = os.path.basename(folder_path.rstrip("/\\")).replace("boost.", "") - -print(f"Library name: {library_name}") - -# Step 3: Modify the MODULE.bazel file -module_bazel_path = os.path.join(folder_path, "1.83.0.bzl.1", "MODULE.bazel") -deps_whitelist = ["boost.rules.tools", "platforms", "bazel_skylib", "bzip2", "lzma", "zlib", "zstd"] # Whitelist of dependencies to keep - -new_lines = [] # List to store modified lines of the file - -with open(module_bazel_path, "r") as file: - for line in file: - # Keep lines that are not bazel_dep or are whitelisted - if not line.startswith("bazel_dep") or any( - whitelist in line for whitelist in deps_whitelist - ): - new_lines.append(line) - -# Step 4: Add bazel_dep for each dependency of the library -if library_name in parser.dependencies: - for dep in parser.dependencies[library_name]: - new_lines.append(f'bazel_dep(name = "boost.{dep}", version = "1.83.0.bzl.1")\n') - -# Write the modified content back to the MODULE.bazel file -with open(module_bazel_path, "w") as file: - file.writelines(new_lines) - - -# Function to extract the last segment of a dependency name -def last_segment(dep_name): - return dep_name.split(".")[-1] - - -# Modify the BUILD.bazel file -build_bazel_path = os.path.join(folder_path, "1.83.0.bzl.1", "BUILD.bazel") -# print(f"Modifying BUILD.bazel at {build_bazel_path}") - -# Read the content of the BUILD.bazel file -with open(build_bazel_path, "r") as file: - lines = file.readlines() - -inside_boost_library, correct_library = False, False -new_lines = [] # List to store modified lines of the file - -for line in lines: - # print("line" + line) - if "boost_library(" in line: - inside_boost_library = True - # print(f"Starting boost_library block for {library_name}") - new_lines.append(line) - continue - - if inside_boost_library: - if line.strip().startswith("deps = ["): - # print("Found deps section, starting to process dependencies.") - correct_library = True - new_lines.append(line) - continue - - if correct_library and line.strip().startswith('"@boost.'): - # print(f"Skipping old dependency line: {line.strip()}") - continue # Skip this line as it's an old dependency - - if correct_library and line.strip().startswith("]"): - # End of deps section - # print("Found end of deps section. Adding new deps") - - # Add new dependencies - if library_name in parser.dependencies: - for dep in parser.dependencies[library_name]: - dep_line = f' "@boost.{dep}//:{last_segment(dep)}",\n' - new_lines.append(dep_line) - # print(f"Added dependency: {dep_line.strip()}") - - # Append end of deps section - new_lines.append(" ],\n") - continue - - if ")" in line and inside_boost_library: - inside_boost_library = False - # print(f"Ending boost_library block for {library_name}") - - new_lines.append(line) - -# Write the modified content back to the BUILD.bazel file -with open(build_bazel_path, "w") as file: - file.writelines(new_lines) - # print("Finished modifying BUILD.bazel.") diff --git a/old/diffCreator.py b/old/diffCreator.py deleted file mode 100644 index c5b088e..0000000 --- a/old/diffCreator.py +++ /dev/null @@ -1,129 +0,0 @@ -import os -import sys -import json -import shutil -import requests -import tarfile -import subprocess -from pathlib import Path - - -def copy_directory(src, dst): - """ - Recursively copies files from src to dst directory, - creating subdirectories if necessary. - """ - copied_paths = [] - - if not dst.exists(): - dst.mkdir(parents=True) - for item in src.iterdir(): - dest_path = dst / item.name - if item.is_dir(): - copy_directory(item, dest_path) - else: - shutil.copy2(item, dest_path) - dest_item = dest_path / item.name - copied_paths.append(dest_item) - return copied_paths - - -def main(folder_path): - # Step 1: Path validation (Assuming folder_path is passed correctly) - folder = Path(folder_path) - if not folder.is_dir(): - raise ValueError(f"The path {folder_path} is not a valid directory") - - # Step 2: Create "patches" folder - patches_folder = folder / "1.83.0.bzl.1" / "patches" - patches_folder.mkdir(parents=True, exist_ok=True) - - # Step 3: Parse source.json - with open(folder / "1.83.0.bzl.1" / "source.json", "r") as f: - source_data = json.load(f) - url = source_data["url"] - - # Step 4: Create "diffed_sources" folder - diffed_sources_folder = folder / "diffed_sources" - diffed_sources_folder.mkdir(parents=True, exist_ok=True) - - # Step 5: Download and extract the URL - # response = requests.get(url) - # tar_path = diffed_sources_folder / "downloaded.tar.gz" - # with open(tar_path, "wb") as f: - # f.write(response.content) - # with tarfile.open(tar_path, "r:gz") as tar: - # tar.extractall(path=diffed_sources_folder) # TODO RE-ADD THIS! - - # Find the first directory inside 'diffed_sources_folder' - first_directory_inside_diffed_sources = None - for item in diffed_sources_folder.iterdir(): - if item.is_dir(): - first_directory_inside_diffed_sources = item - break - - if first_directory_inside_diffed_sources is None: - raise Exception("No directory found inside 'diffed_sources'") - - # Keep track of copied files and directories - copied_paths = [] - - # Assuming folder and first_directory_inside_diffed_sources are defined - # Step 6: Copy files and directories - source_folder = folder / "1.83.0.bzl.1" - copied_paths = [] - for item in source_folder.iterdir(): - if item.name != "source.json" and item.name != "patches": - dest_path = first_directory_inside_diffed_sources / item.name - if item.is_file(): - copied_paths.append(dest_path) - shutil.copy2(item, dest_path) # Use copy2 to preserve metadata - elif item.is_dir(): - copied_paths += copy_directory( - item, dest_path - ) # Use the custom function for directories - - # Step 7: Create git diff - subprocess.run(["git", "init"], cwd=first_directory_inside_diffed_sources) - - # Convert copied paths to relative paths and add them to the staging area - for path in copied_paths: - relative_path = path.relative_to(first_directory_inside_diffed_sources) - subprocess.run( - ["git", "add", str(relative_path)], - cwd=first_directory_inside_diffed_sources, - ) - - # Create and write the git diff - diff_command = ["git", "diff", "--cached"] - diff = subprocess.check_output( - diff_command, cwd=first_directory_inside_diffed_sources - ) - with open(patches_folder / "patch.diff", "wb") as f: - f.write(diff) - - # Calculate SHA256 hash of 'patch.diff' and encode in base64 - patch_diff_path = patches_folder / "patch.diff" - integrity_cmd = ( - f"openssl dgst -sha256 -binary {patch_diff_path} | openssl base64 -A" - ) - integrity_result = subprocess.run( - integrity_cmd, shell=True, capture_output=True, text=True - ) - integrity = "sha256-" + integrity_result.stdout.strip() - - print(f"Calculated hash: {integrity}") - - # Update 'source.json' with the new patch information - source_json_path = folder / "1.83.0.bzl.1" / "source.json" - with open(source_json_path, "r") as f: - data = json.load(f) - - data["patches"] = {"patch.diff": integrity} - - with open(source_json_path, "w") as f: - json.dump(data, f, indent=2) - - -if __name__ == "__main__": - main(sys.argv[1]) diff --git a/old/sourcePatch.py b/old/sourcePatch.py deleted file mode 100644 index c4f4427..0000000 --- a/old/sourcePatch.py +++ /dev/null @@ -1,44 +0,0 @@ -import os -import json -import sys - -def update_source_json(folder_path): - # Extract the library name from the folder path - # library_name = folder_path.split("/")[-1].split(".")[1] - library_name = os.path.basename(folder_path.rstrip("/\\")).replace("boost.", "") - - # Check if the specific subfolder exists - subfolder_path = os.path.join(folder_path, "1.83.0.bzl.1") - if not os.path.exists(subfolder_path): - print("Subfolder '1.83.0.bzl.1' not found.") - return - - # Path to the source.json file - json_file_path = os.path.join(subfolder_path, "source.json") - - # Check if the source.json file exists - if not os.path.exists(json_file_path): - print("source.json file not found.") - return - - # Read the existing source.json - with open(json_file_path, "r") as json_file: - source_data = json.load(json_file) - - # Add the strip_prefix key - source_data["strip_prefix"] = f"{library_name}-boost-1.83.0" - - # Write the updated source.json file - with open(json_file_path, "w") as json_file: - json.dump(source_data, json_file, indent=2) - - print(f"source.json updated successfully in {subfolder_path}") - -# Usage -if __name__ == "__main__": - if len(sys.argv) != 2: - print("Usage: python script_name.py ") - sys.exit(1) - - folder_path = sys.argv[1] - update_source_json(folder_path)