from __future__ import annotations import json import os import sys import textwrap from typing import Dict from typing import Tuple from mdutils.mdutils import MdUtils from scripts._docs.configuration_docs_common import BASE_PATH from scripts._docs.configuration_docs_common import ExtractedParam from scripts._docs.configuration_docs_common import extracted_filename from scripts._docs.gen_docs_generic import ALL_DEBUGGERS from scripts._docs.gen_docs_generic import strip_ansi_color from scripts._docs.gen_docs_generic import update_files_simple from scripts._docs.gen_docs_generic import verify_existence from scripts._docs.gen_docs_generic import verify_files_simple def convert_to_markdown(scope: str, debugger_to_params: Dict[str, list[ExtractedParam]]) -> str: """ Returns: The contents of the file corresponding to the passed scope. """ # filename doesn't matter since we only use mdFile.get_md_text() mdFile = MdUtils(scope) mdFile.new_header(level=1, title=scope) all_params: set[str] = set() for _, params in debugger_to_params.items(): all_params.update([param.name for param in params]) for param_name in sorted(all_params): # Make a (debugger name, parameter) list in case some # debuggers disagree on what some parameter should # display. We won't add debuggers that don't have the # parameter. param_variants: list[Tuple[str, ExtractedParam]] = [] for debugger, dparams in debugger_to_params.items(): # Slow but whatever for dparam in dparams: if param_name == dparam.name: param_variants.append((debugger, dparam)) break assert param_variants mdFile.new_header(level=2, title="**" + param_name + "**") # Note about supported debuggers if the parameter isn't # available everywhere. if len(param_variants) != len(ALL_DEBUGGERS): supported_list = ", ".join([x[0].upper() for x in param_variants]) md = '' md += f"(only in {supported_list})" md += "\n" mdFile.write(md) debuggers_agree = all(x[1] == param_variants[0][1] for x in param_variants) if debuggers_agree: mdFile.new_paragraph(param_variants[0][1].set_show_doc) mdFile.new_paragraph(param_variants[0][1].help_docstring) else: for debugger, dparam in sorted(param_variants): # Content tabs # https://squidfunk.github.io/mkdocs-material/reference/content-tabs/ mdFile.write(f'\n=== "{debugger.upper()}"') indented_set_show_doc = textwrap.indent(dparam.set_show_doc, " ") mdFile.new_paragraph(indented_set_show_doc) indented_help_docstring = textwrap.indent(dparam.help_docstring, " ") mdFile.new_paragraph(indented_help_docstring) mdFile.write("\n\n----------\n") autogen_warning = ( "" ) return autogen_warning + "\n" + strip_ansi_color(mdFile.get_md_text()) def check_index(num_scopes: int): assert ( num_scopes == 3 and "It seems a new scope has been added, " f"please update the index file ({INDEX_PATH}) and bump this number accordingly." ) def convert_all_to_markdown( extracted: list[Tuple[str, Dict[str, list[ExtractedParam]]]], ) -> Dict[str, str]: result = {} # Enumerate all scopes we can see. all_scopes: set[str] = set() for _, data in extracted: for scope in data.keys(): all_scopes.add(scope) # Check if the index is up to date while # we are here. check_index(len(all_scopes)) # Generate markdown for those scopes. for scope in all_scopes: # We allow debuggers to disagree on exactly how the file # will look like since a file corresponds to a scope. # The convert_to_markdown() function will check and forbid # per-paramater disagreements. debugger_to_paramlist: Dict[str, list[ExtractedParam]] = {} for debugger, data in extracted: if scope in data: debugger_to_paramlist[debugger] = data[scope] filename = os.path.join(BASE_PATH, f"{scope}.md") result[filename] = convert_to_markdown(scope, debugger_to_paramlist) return result def read_extracted() -> list[Tuple[str, Dict[str, list[ExtractedParam]]]]: """ Read json files from disk. Returns: A list of tuples of the form: (debugger name, scope-mapped extracted parameters for that debugger). """ result: list[Tuple[str, Dict[str, ExtractedParam]]] = [] for debugger in ALL_DEBUGGERS: filepath = extracted_filename(debugger) print(f"Consuming {filepath}..") with open(filepath, "r") as file: raw_data = json.loads(file.read()) # Convert the dict objs to ExtractedParams data: Dict[str, ExtractedParam] = {} for scope, param_list in raw_data.items(): data[scope] = [ExtractedParam(**param_dict) for param_dict in param_list] result.append((debugger, data)) # We consumed the temporary file, we can delete it now. os.remove(filepath) return result # NOTE: the docs/configuration/index.md file is # not autogenerated. INDEX_PATH = os.path.join(BASE_PATH, "index.md") def main(): if len(sys.argv) > 1: print("This script doesn't accept any arguments.") print("See top of the file for usage.") sys.exit(1) just_verify = False if os.getenv("PWNDBG_DOCGEN_VERIFY"): just_verify = True print("\n==== Parameter Documentation ====") extracted = read_extracted() markdowned = convert_all_to_markdown(extracted) if just_verify: print("Checking if all files are in place..") missing, extra = verify_existence(list(markdowned.keys()) + [INDEX_PATH], BASE_PATH) if missing or extra: print("To add mising files please run ./scripts/generate-docs.sh.") print("To remove extra files please remove them manually.") sys.exit(2) print("Every file is where it should be!") print("Verifying contents...") err = verify_files_simple(markdowned, skip=[INDEX_PATH]) if err: print("VERIFICATION FAILED. The files differ from what would be auto-generated.") print("Error:", err) print("Please run ./scripts/generate-docs.sh from project root and commit the changes.") sys.exit(3) print("Verification successful!") else: print("Updating files...") update_files_simple(markdowned) print("Update successful.") missing, extra = verify_existence(list(markdowned.keys()) + [INDEX_PATH], BASE_PATH) if len(missing) == 1 and missing[0] == INDEX_PATH: print( f"The index ({INDEX_PATH}) is missing. That is a hand-written file, please write it." ) sys.exit(4) assert ( not missing and "Some files (and not the index) are missing, which should be impossible." ) if extra: sys.exit(5) if __name__ == "__main__": main()