From d13484d282392d20f7445190264976543ce57b71 Mon Sep 17 00:00:00 2001 From: Emily Boudreaux Date: Sat, 9 Aug 2025 08:57:47 -0400 Subject: [PATCH] feat(fourdst-cli): cli --- assets/bundle/fourdst_bundle_icon.svg | 158 ++++++++++++++++++++++++++ fourdst/cli/bundle/create.py | 50 +++++--- fourdst/cli/bundle/fill.py | 2 +- fourdst/cli/bundle/inspect.py | 158 ++++++++++++++++---------- fourdst/cli/bundle/sign.py | 143 +++++++++++++++-------- fourdst/cli/bundle/validate.py | 98 +++++++++++++--- fourdst/cli/common/utils.py | 58 +++++++++- fourdst/cli/keys/generate.py | 53 ++++++--- fourdst/cli/plugin/init.py | 62 +++++++--- pyproject.toml | 3 +- 10 files changed, 605 insertions(+), 180 deletions(-) create mode 100644 assets/bundle/fourdst_bundle_icon.svg diff --git a/assets/bundle/fourdst_bundle_icon.svg b/assets/bundle/fourdst_bundle_icon.svg new file mode 100644 index 0000000..864ed4f --- /dev/null +++ b/assets/bundle/fourdst_bundle_icon.svg @@ -0,0 +1,158 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + fbundle + + + + + diff --git a/fourdst/cli/bundle/create.py b/fourdst/cli/bundle/create.py index 71d8e30..9bfc4e1 100644 --- a/fourdst/cli/bundle/create.py +++ b/fourdst/cli/bundle/create.py @@ -8,8 +8,7 @@ import datetime import yaml import zipfile from pathlib import Path - -from fourdst.cli.common.utils import get_platform_identifier, run_command +from fourdst.cli.common.utils import get_platform_identifier, get_macos_targeted_platform_identifier, run_command bundle_app = typer.Typer() @@ -19,7 +18,9 @@ def bundle_create( output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."), bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."), bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."), - bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle.") + bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle."), + # --- NEW OPTION --- + target_macos_version: str = typer.Option(None, "--target-macos-version", help="The minimum macOS version to target (e.g., '12.0').") ): """ Builds and packages one or more plugin projects into a single .fbundle file. @@ -29,8 +30,22 @@ def bundle_create( shutil.rmtree(staging_dir) staging_dir.mkdir() - # Get the host platform identifier, triggering detection if needed. - host_platform = get_platform_identifier() + # --- MODIFIED LOGIC --- + # Prepare environment for the build + build_env = os.environ.copy() + + # Determine the host platform identifier based on the target + if sys.platform == "darwin" and target_macos_version: + typer.secho(f"Targeting macOS version: {target_macos_version}", fg=typer.colors.CYAN) + host_platform = get_macos_targeted_platform_identifier(target_macos_version) + + # Set environment variables for Meson to pick up + flags = f"-mmacosx-version-min={target_macos_version}" + build_env["CXXFLAGS"] = f"{build_env.get('CXXFLAGS', '')} {flags}".strip() + build_env["LDFLAGS"] = f"{build_env.get('LDFLAGS', '')} {flags}".strip() + else: + # Default behavior for Linux or non-targeted macOS builds + host_platform = get_platform_identifier() manifest = { "bundleName": bundle_name, @@ -46,12 +61,15 @@ def bundle_create( plugin_name = plugin_dir.name print(f"--> Processing plugin: {plugin_name}") - # 1. Build the plugin - print(f" - Compiling for host platform...") + # 1. Build the plugin using the prepared environment + print(f" - Compiling for target platform...") build_dir = plugin_dir / "builddir" - if not build_dir.exists(): - run_command(["meson", "setup", "builddir"], cwd=plugin_dir) - run_command(["meson", "compile", "-C", "builddir"], cwd=plugin_dir) + if build_dir.exists(): + shutil.rmtree(build_dir) # Reconfigure every time to apply env vars + + # Pass the modified environment to the Meson commands + run_command(["meson", "setup", "builddir"], cwd=plugin_dir, env=build_env) + run_command(["meson", "compile", "-C", "builddir"], cwd=plugin_dir, env=build_env) # 2. Find the compiled artifact compiled_lib = next(build_dir.glob("lib*.so"), None) or next(build_dir.glob("lib*.dylib"), None) @@ -63,16 +81,13 @@ def bundle_create( print(" - Packaging source code (respecting .gitignore)...") sdist_path = staging_dir / f"{plugin_name}_src.zip" - # Use git to list files, which automatically respects .gitignore git_check = run_command(["git", "rev-parse", "--is-inside-work-tree"], cwd=plugin_dir, check=False) files_to_include = [] if git_check.returncode == 0: - # This is a git repo, use git to list files result = run_command(["git", "ls-files", "--cached", "--others", "--exclude-standard"], cwd=plugin_dir) files_to_include = [plugin_dir / f for f in result.stdout.strip().split('\n') if f] else: - # Not a git repo, fall back to os.walk and warn the user typer.secho(f" - Warning: '{plugin_dir.name}' is not a git repository. Packaging all files.", fg=typer.colors.YELLOW) for root, _, files in os.walk(plugin_dir): if 'builddir' in root: @@ -89,9 +104,8 @@ def bundle_create( binaries_dir = staging_dir / "bin" binaries_dir.mkdir(exist_ok=True) - # Construct new filename with arch, os, and ABI tag - base_name = compiled_lib.stem # e.g., "libplugin_a" - ext = compiled_lib.suffix # e.g., ".so" + base_name = compiled_lib.stem + ext = compiled_lib.suffix triplet = host_platform["triplet"] abi_signature = host_platform["abi_signature"] tagged_filename = f"{base_name}.{triplet}.{abi_signature}{ext}" @@ -109,7 +123,9 @@ def bundle_create( "binaries": [{ "platform": { "triplet": host_platform["triplet"], - "abi_signature": host_platform["abi_signature"] + "abi_signature": host_platform["abi_signature"], + # Adding arch separately for clarity, matching 'fill' command + "arch": host_platform["arch"] }, "path": staged_lib_path.relative_to(staging_dir).as_posix(), "compiledOn": datetime.datetime.now().isoformat() diff --git a/fourdst/cli/bundle/fill.py b/fourdst/cli/bundle/fill.py index fa758a9..3bc719b 100644 --- a/fourdst/cli/bundle/fill.py +++ b/fourdst/cli/bundle/fill.py @@ -168,4 +168,4 @@ def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file finally: if staging_dir.exists(): - shutil.rmtree(staging_dir) + shutil.rmtree(staging_dir) \ No newline at end of file diff --git a/fourdst/cli/bundle/inspect.py b/fourdst/cli/bundle/inspect.py index 664ec5d..2090097 100644 --- a/fourdst/cli/bundle/inspect.py +++ b/fourdst/cli/bundle/inspect.py @@ -8,20 +8,65 @@ import zipfile import hashlib from pathlib import Path -try: - from cryptography.hazmat.primitives import serialization, hashes - from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519 - from cryptography.exceptions import InvalidSignature -except ImportError: - print("Error: This CLI now requires 'cryptography'. Please install it.", file=sys.stderr) - print("Run: pip install cryptography", file=sys.stderr) - sys.exit(1) +from cryptography.hazmat.primitives import serialization, hashes +from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519 +from cryptography.exceptions import InvalidSignature from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH from fourdst.cli.common.utils import get_platform_identifier, calculate_sha256, is_abi_compatible bundle_app = typer.Typer() +def _reconstruct_canonical_checksum_list(staging_dir: Path, manifest: dict) -> tuple[str, list[str], list[str]]: + """ + Reconstructs the canonical checksum list from the files on disk + and compares them against the checksums listed in the manifest. + + Returns a tuple containing: + 1. The canonical string of actual checksums to verify against the signature. + 2. A list of files with checksum mismatches. + 3. A list of files that are listed in the manifest but missing from the disk. + """ + checksum_map = {} + mismatch_errors = [] + missing_files = [] + + all_files_in_manifest = [] + # Gather all file paths from the manifest + for plugin_data in manifest.get('bundlePlugins', {}).values(): + if 'sdist' in plugin_data and 'path' in plugin_data['sdist']: + all_files_in_manifest.append(plugin_data['sdist']) + if 'binaries' in plugin_data: + all_files_in_manifest.extend(plugin_data['binaries']) + + for file_info in all_files_in_manifest: + path_str = file_info.get('path') + if not path_str: + continue + + file_path = staging_dir / path_str + expected_checksum = file_info.get('checksum') + + if not file_path.exists(): + missing_files.append(path_str) + continue + + # Calculate actual checksum from the file on disk + actual_checksum = "sha256:" + calculate_sha256(file_path) + checksum_map[path_str] = actual_checksum + + # Compare with the checksum listed in the manifest + if expected_checksum and actual_checksum != expected_checksum: + mismatch_errors.append(path_str) + + # Create the canonical string for signature verification from the actual file checksums + sorted_paths = sorted(checksum_map.keys()) + canonical_list = [f"{path}:{checksum_map[path]}" for path in sorted_paths] + data_to_verify = "\n".join(canonical_list) + + return data_to_verify, mismatch_errors, missing_files + + @bundle_app.command("inspect") def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True)): """ @@ -32,12 +77,9 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi shutil.rmtree(staging_dir) try: - # Get current system info first host_platform = get_platform_identifier() - # 1. Unpack and load manifest with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: - archive_files = set(bundle_zip.namelist()) bundle_zip.extractall(staging_dir) manifest_path = staging_dir / "manifest.yaml" @@ -48,8 +90,8 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi with open(manifest_path, 'r') as f: manifest = yaml.safe_load(f) - # 2. Print Header typer.secho(f"--- Bundle Inspection Report for: {bundle_path.name} ---", bold=True) + # ... (header printing code is unchanged) ... typer.echo(f"Name: {manifest.get('bundleName', 'N/A')}") typer.echo(f"Version: {manifest.get('bundleVersion', 'N/A')}") typer.echo(f"Author: {manifest.get('bundleAuthor', 'N/A')}") @@ -58,6 +100,7 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi typer.secho(f"Host Arch: {host_platform['triplet']}", dim=True) typer.echo("-" * 50) + # 3. Signature and Trust Verification fingerprint = manifest.get('bundleAuthorKeyFingerprint') sig_path = staging_dir / "manifest.sig" @@ -65,76 +108,65 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi if not fingerprint or not sig_path.exists(): typer.secho("Trust Status: 🟔 UNSIGNED", fg=typer.colors.YELLOW) else: - # Find the key in the local trust store trusted_key_path = None if LOCAL_TRUST_STORE_PATH.exists(): - for key_file in LOCAL_TRUST_STORE_PATH.rglob("*.pub"): - pub_key = serialization.load_ssh_public_key(key_file.read_bytes()) - pub_key_bytes = pub_key.public_bytes( - encoding = serialization.Encoding.OpenSSH, - format = serialization.PublicFormat.OpenSSH - ) - pub_key_fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest() - if pub_key_fingerprint == fingerprint: - trusted_key_path = key_file - break + # Find the key in the local trust store + # ... (key finding logic is unchanged) ... + for key_file in LOCAL_TRUST_STORE_PATH.rglob("*.pem"): + try: + pub_der = (serialization.load_pem_public_key(key_file.read_bytes()) + .public_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PublicFormat.SubjectPublicKeyInfo + )) + pub_key_fingerprint = "sha256:" + hashlib.sha256(pub_der).hexdigest() + if pub_key_fingerprint == fingerprint: + trusted_key_path = key_file + break + except Exception: + continue if not trusted_key_path: typer.secho(f"Trust Status: āš ļø SIGNED but UNTRUSTED AUTHOR ({fingerprint})", fg=typer.colors.YELLOW) else: + # --- MODIFIED VERIFICATION LOGIC --- try: - pub_key_obj = serialization.load_ssh_public_key(trusted_key_path.read_bytes()) + pub_key_obj = serialization.load_pem_public_key(trusted_key_path.read_bytes()) signature = sig_path.read_bytes() - manifest_content = manifest_path.read_bytes() + + # Reconstruct the data that was originally signed + data_to_verify, checksum_errors, missing_files = _reconstruct_canonical_checksum_list(staging_dir, manifest) + with open("data_to_verify.bin", "wb") as f: + f.write(data_to_verify.encode('utf-8')) + # Verify the signature against the reconstructed data if isinstance(pub_key_obj, ed25519.Ed25519PublicKey): - pub_key_obj.verify(signature, manifest_content) + pub_key_obj.verify(signature, data_to_verify.encode('utf-8')) elif isinstance(pub_key_obj, rsa.RSAPublicKey): pub_key_obj.verify( signature, - manifest_content, + data_to_verify.encode('utf-8'), padding.PKCS1v15(), hashes.SHA256() ) - typer.secho(f"Trust Status: āœ… SIGNED and TRUSTED ({trusted_key_path.relative_to(LOCAL_TRUST_STORE_PATH)})", fg=typer.colors.GREEN) + + # If we reach here, the signature is cryptographically valid. + # Now we check if the manifest's checksums match the actual file checksums. + if checksum_errors or missing_files: + typer.secho(f"Trust Status: āŒ INVALID - Files have been tampered with after signing.", fg=typer.colors.RED) + for f in missing_files: + typer.echo(f" - Missing file listed in manifest: {f}") + for f in checksum_errors: + typer.echo(f" - Checksum mismatch for: {f}") + else: + typer.secho(f"Trust Status: āœ… SIGNED and TRUSTED ({trusted_key_path.relative_to(LOCAL_TRUST_STORE_PATH)})", fg=typer.colors.GREEN) + except InvalidSignature: - typer.secho(f"Trust Status: āŒ INVALID SIGNATURE ({fingerprint})", fg=typer.colors.RED) + typer.secho(f"Trust Status: āŒ INVALID SIGNATURE - The bundle's integrity is compromised.", fg=typer.colors.RED) typer.echo("-" * 50) - - # 4. Content Validation - typer.echo("Validating bundle contents...") - missing_files = [] - checksum_errors = [] - - for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): - sdist_path = plugin_data.get('sdist', {}).get('path') - if sdist_path and sdist_path not in archive_files: - missing_files.append(sdist_path) - - for binary in plugin_data.get('binaries', []): - binary_path_str = binary.get('path') - if binary_path_str and binary_path_str not in archive_files: - missing_files.append(binary_path_str) - elif binary_path_str: - # Verify checksum if present - expected_checksum = binary.get('checksum') - if expected_checksum: - actual_checksum = "sha256:" + calculate_sha256(staging_dir / binary_path_str) - if actual_checksum != expected_checksum: - checksum_errors.append(binary_path_str) - - if not missing_files and not checksum_errors: - typer.secho("Content Validation: āœ… OK", fg=typer.colors.GREEN) - else: - typer.secho("Content Validation: āŒ FAILED", fg=typer.colors.RED) - for f in missing_files: - typer.echo(f" - Missing file from archive: {f}") - for f in checksum_errors: - typer.echo(f" - Checksum mismatch for: {f}") - - # 5. Plugin Details - typer.echo("-" * 50) + + # ... (Plugin Details section is unchanged) ... typer.secho("Available Plugins:", bold=True) for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): typer.echo(f"\n Plugin: {plugin_name}") diff --git a/fourdst/cli/bundle/sign.py b/fourdst/cli/bundle/sign.py index 578265b..da51059 100644 --- a/fourdst/cli/bundle/sign.py +++ b/fourdst/cli/bundle/sign.py @@ -7,26 +7,67 @@ import zipfile import hashlib from pathlib import Path import sys - -try: - from cryptography.hazmat.primitives import serialization, hashes - from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519 -except ImportError: - print("Error: This CLI now requires 'cryptography'. Please install it.", file=sys.stderr) - print("Run: pip install cryptography", file=sys.stderr) - sys.exit(1) +import subprocess from fourdst.cli.common.utils import calculate_sha256 bundle_app = typer.Typer() +def _create_canonical_checksum_list(staging_dir: Path, manifest: dict) -> str: + """ + Creates a deterministic, sorted string of all file paths and their checksums. + This string is the actual data that will be signed. + """ + checksum_map = {} + + # Iterate through all plugins to find all files to be checksummed + for plugin_data in manifest.get('bundlePlugins', {}).values(): + # Add sdist (source code zip) to the list + sdist_info = plugin_data.get('sdist', {}) + if 'path' in sdist_info: + file_path = staging_dir / sdist_info['path'] + if file_path.exists(): + checksum = "sha256:" + calculate_sha256(file_path) + # Also update the manifest with the sdist checksum + sdist_info['checksum'] = checksum + checksum_map[sdist_info['path']] = checksum + else: + # This case should ideally be caught by a validation step + typer.secho(f"Warning: sdist file not found: {sdist_info['path']}", fg=typer.colors.YELLOW) + + + # Add all binaries to the list + for binary in plugin_data.get('binaries', []): + if 'path' in binary: + file_path = staging_dir / binary['path'] + if file_path.exists(): + checksum = "sha256:" + calculate_sha256(file_path) + # Update the manifest with the binary checksum + binary['checksum'] = checksum + checksum_map[binary['path']] = checksum + else: + typer.secho(f"Warning: Binary file not found: {binary['path']}", fg=typer.colors.YELLOW) + + # Sort the file paths to ensure a deterministic order + sorted_paths = sorted(checksum_map.keys()) + + # Create the final canonical string (e.g., "path1:checksum1\npath2:checksum2") + canonical_list = [f"{path}:{checksum_map[path]}" for path in sorted_paths] + + return "\n".join(canonical_list) + + @bundle_app.command("sign") def bundle_sign( bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True), private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True) ): """ - Signs a bundle with an author's private key, adding checksums and a signature. + Signs a bundle with an author's private key. + + This process calculates checksums for all source and binary files, + adds them to the manifest, and then signs a canonical list of these + checksums to ensure the integrity of the entire bundle. """ print(f"Signing bundle: {bundle_path}") staging_dir = Path("temp_sign_staging") @@ -42,57 +83,65 @@ def bundle_sign( print("Error: manifest.yaml not found in bundle.", file=sys.stderr) raise typer.Exit(code=1) - # 2. Load private key and derive public key to get fingerprint - with open(private_key, "rb") as key_file: - priv_key_obj = serialization.load_ssh_private_key(key_file.read(), password=None) - - pub_key_obj = priv_key_obj.public_key() - pub_key_bytes = pub_key_obj.public_bytes( - encoding=serialization.Encoding.OpenSSH, - format=serialization.PublicFormat.OpenSSH - ) - fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest() - print(f" - Signing with key fingerprint: {fingerprint}") + # 2. Ensure PEM private key and derive public key fingerprint via openssl + if private_key.suffix.lower() != ".pem": + typer.secho("Error: Private key must be a .pem file.", fg=typer.colors.RED) + raise typer.Exit(code=1) + typer.echo(" - Deriving public key fingerprint via openssl...") + try: + proc = subprocess.run( + ["openssl", "pkey", "-in", str(private_key), "-pubout", "-outform", "DER"], + capture_output=True, check=True + ) + pub_der = proc.stdout + fingerprint = "sha256:" + hashlib.sha256(pub_der).hexdigest() + typer.echo(f" - Signing with key fingerprint: {fingerprint}") + except subprocess.CalledProcessError as e: + typer.secho(f"Error extracting public key: {e.stderr.decode().strip()}", fg=typer.colors.RED) + raise typer.Exit(code=1) - # 3. Update manifest with checksums and fingerprint + # 3. Load manifest and generate the canonical checksum list with open(manifest_path, 'r') as f: manifest = yaml.safe_load(f) + print(" - Calculating checksums for all source and binary files...") + # This function now also modifies the manifest in-place to add the checksums + data_to_sign = _create_canonical_checksum_list(staging_dir, manifest) + + # Add the key fingerprint to the manifest manifest['bundleAuthorKeyFingerprint'] = fingerprint - for plugin in manifest['bundlePlugins'].values(): - for binary in plugin.get('binaries', []): - binary_path = staging_dir / binary['path'] - if binary_path.exists(): - binary['checksum'] = "sha256:" + calculate_sha256(binary_path) - else: - binary['checksum'] = "MISSING_FILE" - + # 4. Write the updated manifest back to the staging directory with open(manifest_path, 'w') as f: yaml.dump(manifest, f, sort_keys=False) print(" - Added file checksums and key fingerprint to manifest.") - # 4. Sign the manifest - manifest_content = manifest_path.read_bytes() - - if isinstance(priv_key_obj, ed25519.Ed25519PrivateKey): - signature = priv_key_obj.sign(manifest_content) - elif isinstance(priv_key_obj, rsa.RSAPrivateKey): - signature = priv_key_obj.sign( - manifest_content, - padding.PKCS1v15(), - hashes.SHA256() + # 5. Sign the canonical checksum list + typer.echo(" - Signing the canonical checksum list...") + canonical_temp_data_file = staging_dir / "canonical_checksums.txt" + canonical_temp_data_file.write_text(data_to_sign, encoding='utf-8') + sig_path = staging_dir / "manifest.sig" + try: + # We sign the string data directly, not the manifest file + cmd_list = [ + "openssl", + "pkeyutl", + "-sign", + "-in", str(canonical_temp_data_file), + "-inkey", str(private_key), + "-out", str(sig_path) + ] + subprocess.run( + cmd_list, + check=True, + capture_output=True ) - else: - print("Error: Unsupported private key type for signing.", file=sys.stderr) + typer.echo(f" - Created manifest.sig (> $ {' '.join(cmd_list)} ") + except subprocess.CalledProcessError as e: + typer.secho(f"Error signing manifest: {e.stderr.decode().strip()}", fg=typer.colors.RED) raise typer.Exit(code=1) - - sig_path = staging_dir / "manifest.sig" - sig_path.write_bytes(signature) - print(" - Created manifest.sig.") - - # 5. Repackage the bundle + # 6. Repackage the bundle with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: for file_path in staging_dir.rglob('*'): if file_path.is_file(): diff --git a/fourdst/cli/bundle/validate.py b/fourdst/cli/bundle/validate.py index b2e04e4..9565ea2 100644 --- a/fourdst/cli/bundle/validate.py +++ b/fourdst/cli/bundle/validate.py @@ -9,6 +9,7 @@ import hashlib from rich.console import Console from rich.panel import Panel from rich.text import Text +from rich.table import Table console = Console() @@ -20,14 +21,18 @@ def _calculate_sha256(file_path: Path) -> str: sha256_hash.update(byte_block) return sha256_hash.hexdigest() -def _validate_bundle_directory(path: Path, is_temp: bool = False): +def _validate_bundle_directory(path: Path, is_temp: bool = False, display_name: str = None): """Validates a directory that is structured like an unpacked bundle.""" title = "Validating Pre-Bundle Directory" if not is_temp else "Validating Bundle Contents" - console.print(Panel(f"{title}: [bold]{path.name}[/bold]", border_style="blue")) - + name = display_name or path.name + console.print(Panel(f"{title}: [bold]{name}[/bold]", border_style="blue")) + errors = 0 warnings = 0 + # Section 1: Manifest file check + console.print(Panel("1. Manifest File Check", border_style="cyan")) + def check(condition, success_msg, error_msg, is_warning=False): nonlocal errors, warnings if condition: @@ -55,6 +60,7 @@ def _validate_bundle_directory(path: Path, is_temp: bool = False): raise typer.Exit(code=1) # 2. Check manifest content + console.print(Panel("2. Manifest Content Validation", border_style="cyan")) check(manifest is not None, "Manifest is not empty.", "Manifest file is empty.", is_warning=True) check('bundleName' in manifest, "Manifest contains 'bundleName'.", "Manifest is missing 'bundleName'.") check('bundleVersion' in manifest, "Manifest contains 'bundleVersion'.", "Manifest is missing 'bundleVersion'.") @@ -62,9 +68,25 @@ def _validate_bundle_directory(path: Path, is_temp: bool = False): plugins = manifest.get('bundlePlugins', {}) check(plugins, "Manifest contains 'bundlePlugins' section.", "Manifest is missing 'bundlePlugins' section.") + # Build Manifest Validation table + manifest_table = Table(title="Manifest Validation") + manifest_table.add_column("Check") + manifest_table.add_column("Status") + manifest_table.add_row("manifest.yaml exists", "āœ…" if manifest_file.is_file() else "āŒ") + # YAML parse status already captured by exception above + manifest_table.add_row("Manifest parses as YAML", "āœ…") + manifest_table.add_row("Manifest not empty", "āœ…" if manifest is not None else "āš ļø") + manifest_table.add_row("bundleName present", "āœ…" if 'bundleName' in manifest else "āŒ") + manifest_table.add_row("bundleVersion present", "āœ…" if 'bundleVersion' in manifest else "āŒ") + has_plugins = bool(manifest.get('bundlePlugins')) + manifest_table.add_row("bundlePlugins section", "āœ…" if has_plugins else "āŒ") + console.print(manifest_table) + plugins = manifest.get('bundlePlugins', {}) + # 3. Check files listed in manifest + console.print(Panel("3. Plugin Validation", border_style="magenta")) for name, data in plugins.items(): - console.print(f"\n--- Validating plugin: [bold cyan]{name}[/bold cyan] ---") + console.print(Panel(f"Plugin: [bold cyan]{name}[/bold cyan]", border_style="magenta")) sdist_info = data.get('sdist', {}) sdist_path_str = sdist_info.get('path') @@ -88,23 +110,69 @@ def _validate_bundle_directory(path: Path, is_temp: bool = False): f"Checksum mismatch for {bin_path_str}.\n Expected: {expected_checksum}\n Actual: {actual_checksum}" ) + # Build Plugin Validation table + plugin_table = Table(title="Plugin Validation") + plugin_table.add_column("Plugin") + plugin_table.add_column("Sdist Defined") + plugin_table.add_column("Sdist Exists") + plugin_table.add_column("Binaries OK") + plugin_table.add_column("Checksums OK") + for name, data in plugins.items(): + # sdist checks + sdist_path_str = data.get('sdist', {}).get('path') + sdist_defined = bool(sdist_path_str) + sdist_exists = sdist_defined and (path/ sdist_path_str).exists() + # binary & checksum checks + binaries = data.get('binaries', []) + binaries_ok = all(b.get('path') and (path/ b['path']).exists() for b in binaries) + checksums_ok = all(('checksum' in b and ("sha256:"+_calculate_sha256(path/ b['path']))==b['checksum']) for b in binaries) + plugin_table.add_row( + name, + "āœ…" if sdist_defined else "āŒ", + "āœ…" if sdist_exists else "āŒ", + "āœ…" if binaries_ok else "āŒ", + "āœ…" if checksums_ok else "āŒ" + ) + console.print(plugin_table) + # 4. Check for signature + console.print(Panel("4. Signature Check", border_style="yellow")) check((path / "manifest.sig").exists(), "Signature file 'manifest.sig' found.", "Signature file 'manifest.sig' is missing.", is_warning=True) + # Build Signature Check table + sig_table = Table(title="Signature Validation") + sig_table.add_column("Item") + sig_table.add_column("Status") + sig_exists = (path / "manifest.sig").exists() + sig_table.add_row( + "manifest.sig", + "āœ…" if sig_exists else "āš ļø" + ) + console.print(sig_table) + # Final summary console.print("-" * 40) + # Display summary in a table + + summary_table = Table(title="Validation Summary") + summary_table.add_column("Result") + summary_table.add_column("Errors", justify="right") + summary_table.add_column("Warnings", justify="right") + if errors == 0: - console.print(Panel( - f"[bold green]Validation Passed[/bold green]\nWarnings: {warnings}", - title="Result", - border_style="green" - )) + result = "Passed" + style = "green" else: - console.print(Panel( - f"[bold red]Validation Failed[/bold red]\nErrors: {errors}\nWarnings: {warnings}", - title="Result", - border_style="red" - )) + result = "Failed" + style = "red" + + summary_table.add_row( + f"[bold {style}]{result}[/bold {style}]", + str(errors), + str(warnings) + ) + console.print(summary_table) + if errors != 0: raise typer.Exit(code=1) def _validate_bundle_file(bundle_path: Path): @@ -114,7 +182,7 @@ def _validate_bundle_file(bundle_path: Path): try: with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: bundle_zip.extractall(temp_dir) - _validate_bundle_directory(temp_dir, is_temp=True) + _validate_bundle_directory(temp_dir, is_temp=True, display_name=bundle_path.name) except zipfile.BadZipFile: console.print(Panel(f"[red]Error: '{bundle_path.name}' is not a valid zip file.[/red]", title="Validation Error")) raise typer.Exit(code=1) diff --git a/fourdst/cli/common/utils.py b/fourdst/cli/common/utils.py index 30cff8d..e90501e 100644 --- a/fourdst/cli/common/utils.py +++ b/fourdst/cli/common/utils.py @@ -33,12 +33,13 @@ def get_template_content(template_name: str) -> str: print(f"Error: Template file '{template_name}' not found.", file=sys.stderr) sys.exit(1) -def run_command(command: list[str], cwd: Path = None, check=True, display_output: bool = False): - """Runs a command, optionally displaying its output in a formatted box.""" +def run_command(command: list[str], cwd: Path = None, check=True, display_output: bool = False, env: dict = None): + """Runs a command, optionally displaying its output and using a custom environment.""" command_str = ' '.join(command) try: - result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd) + # Pass the env dictionary to subprocess.run + result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd, env=env) if display_output and (result.stdout or result.stderr): output_text = "" @@ -72,7 +73,7 @@ def run_command(command: list[str], cwd: Path = None, check=True, display_output raise typer.Exit(code=1) return e -def _detect_and_cache_abi(cross_file: Path = None): +def _detect_and_cache_abi(): """ Compiles and runs a C++ program to detect the compiler ABI, then caches it. """ @@ -103,11 +104,23 @@ def _detect_and_cache_abi(cross_file: Path = None): compiler = abi_details.get('compiler', 'unk_compiler') stdlib = abi_details.get('stdlib', 'unk_stdlib') - stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version') + + # --- MODIFIED LOGIC FOR MACOS VERSIONING --- + # On macOS, the OS version is more useful than the internal libc++ version. + # But for the generic host detection, we still use the detected version. + # The targeting logic will override this. + if sys.platform == "darwin": + # The C++ detector provides the internal _LIBCPP_VERSION + stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version') + detected_os = "macos" + else: + # On Linux, this will be the glibc version + stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version') + detected_os = abi_details.get("os", "linux") + abi = abi_details.get('abi', 'unk_abi') abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}" - detected_os = abi_details.get("os", "unknown_os") arch = platform.machine() platform_identifier = { @@ -140,6 +153,39 @@ def get_platform_identifier() -> dict: else: return _detect_and_cache_abi() +def get_macos_targeted_platform_identifier(target_version: str) -> dict: + """ + Generates a platform identifier for a specific target macOS version. + This bypasses host detection for the version string. + """ + # We still need the host's compiler info, so we run detection if not cached. + host_platform = get_platform_identifier() + host_details = host_platform['details'] + + compiler = host_details.get('compiler', 'clang') + stdlib = host_details.get('stdlib', 'libc++') + abi = host_details.get('abi', 'libc++_abi') + arch = platform.machine() + + abi_string = f"{compiler}-{stdlib}-{target_version}-{abi}" + + return { + "triplet": f"{arch}-macos", + "abi_signature": abi_string, + "details": { + "os": "macos", + "compiler": compiler, + "compiler_version": host_details.get('compiler_version'), + "stdlib": stdlib, + "stdlib_version": target_version, # The key change is here + "abi": abi, + }, + "is_native": True, + "cross_file": None, + "docker_image": None, + "arch": arch + } + def get_available_build_targets() -> list: """Gets native, cross-compilation, and Docker build targets.""" targets = [get_platform_identifier()] diff --git a/fourdst/cli/keys/generate.py b/fourdst/cli/keys/generate.py index 4d57c86..e2d454c 100644 --- a/fourdst/cli/keys/generate.py +++ b/fourdst/cli/keys/generate.py @@ -3,33 +3,58 @@ import typer import sys from pathlib import Path -from fourdst.cli.common.utils import run_command +from cryptography.hazmat.primitives.asymmetric import ed25519, rsa +from cryptography.hazmat.primitives import serialization keys_app = typer.Typer() @keys_app.command("generate") def keys_generate( - key_name: str = typer.Option("author_key", "--name", "-n", help="The base name for the generated key files.") + key_name: str = typer.Option("author_key", "--name", "-n", help="The base name for the generated key files."), + key_type: str = typer.Option("ed25519", "--type", "-t", help="Type of key to generate (ed25519|rsa).", case_sensitive=False) ): """ - Generates a new Ed25519 key pair for signing bundles. + Generates a new Ed25519 or RSA key pair for signing bundles. """ - private_key_path = Path(f"{key_name}") - public_key_path = Path(f"{key_name}.pub") + # Define PEM-formatted key file paths + private_key_path = Path(f"{key_name}.pem") + public_key_path = Path(f"{key_name}.pub.pem") if private_key_path.exists() or public_key_path.exists(): print(f"Error: Key files '{private_key_path}' or '{public_key_path}' already exist.", file=sys.stderr) raise typer.Exit(code=1) - print("Generating Ed25519 key pair...") - run_command([ - "ssh-keygen", - "-t", "ed25519", - "-f", str(private_key_path), - "-N", "", # No passphrase - "-C", "fourdst bundle signing key" - ]) - print("\nāœ… Keys generated successfully!") + # Generate key based on requested type + if key_type.lower() == "ed25519": + typer.echo("Generating Ed25519 key pair in PEM format via cryptography...") + private_key_obj = ed25519.Ed25519PrivateKey.generate() + elif key_type.lower() == "rsa": + typer.echo("Generating RSA-2048 key pair in PEM format via cryptography...") + private_key_obj = rsa.generate_private_key(public_exponent=65537, key_size=2048) + else: + typer.secho(f"Unsupported key type: {key_type}", fg=typer.colors.RED) + raise typer.Exit(code=1) + # Serialize private key to PEM + priv_pem = private_key_obj.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption() + ) + private_key_path.write_bytes(priv_pem) + # Derive and serialize public key to PEM + public_key_obj = private_key_obj.public_key() + pub_pem = public_key_obj.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + public_key_path.write_bytes(pub_pem) + # Also write OpenSSH-compatible public key + openssh_pub = public_key_obj.public_bytes( + encoding=serialization.Encoding.OpenSSH, + format=serialization.PublicFormat.OpenSSH + ) + Path(f"{key_name}.pub").write_bytes(openssh_pub) + print("\nāœ… PEM and OpenSSH-compatible keys generated successfully!") print(f" -> Private Key (KEEP SECRET): {private_key_path.resolve()}") print(f" -> Public Key (SHARE): {public_key_path.resolve()}") print("\nShare the public key with users who need to trust your bundles.") diff --git a/fourdst/cli/plugin/init.py b/fourdst/cli/plugin/init.py index 610e8ea..8154d05 100644 --- a/fourdst/cli/plugin/init.py +++ b/fourdst/cli/plugin/init.py @@ -130,6 +130,8 @@ def parse_cpp_header(header_path: Path): if not cindex.Config.loaded: try: # Attempt to find libclang automatically. This may need to be configured by the user. + # On systems like macOS, you might need to point to the specific version, e.g.: + # cindex.Config.set_library_path('/opt/homebrew/opt/llvm/lib') cindex.Config.set_library_file(cindex.conf.get_filename()) except cindex.LibclangError as e: print(f"Error: libclang library not found. Please ensure it's installed and in your system's path.", file=sys.stderr) @@ -137,25 +139,53 @@ def parse_cpp_header(header_path: Path): raise typer.Exit(code=1) index = cindex.Index.create() - translation_unit = index.parse(str(header_path)) + # Pass standard C++ arguments to the parser. This improves reliability. + args = ['-x', 'c++', '-std=c++17'] + translation_unit = index.parse(str(header_path), args=args) + + if not translation_unit: + print(f"Error: Unable to parse the translation unit {header_path}", file=sys.stderr) + raise typer.Exit(code=1) interfaces = {} - for node in translation_unit.cursor.get_children(): - if node.kind == cindex.CursorKind.CLASS_DECL and node.is_pure_virtual(): - # Found a class with pure virtual methods, likely an interface - interface_name = node.spelling - print(f"Found interface: {interface_name}") + # --- Recursive function to walk the AST --- + def walk_ast(node): + # We are looking for class definitions, not just declarations. + if node.kind == cindex.CursorKind.CLASS_DECL and node.is_definition(): + # Collect pure virtual methods within this class + pv_methods = [m for m in node.get_children() + if m.kind == cindex.CursorKind.CXX_METHOD and m.is_pure_virtual_method()] + + # If it has pure virtual methods, it's an interface we care about + if pv_methods: + interface_name = node.spelling + methods = [] + print(f"Found interface: '{interface_name}'") + for method in pv_methods: + # Get the string representation of all argument types + args_str = ', '.join([arg.type.spelling for arg in method.get_arguments()]) + + # Reconstruct the signature from its parts. This is much more reliable. + sig = f"{method.result_type.spelling} {method.spelling}({args_str})" + + # Append 'const' if the method is a const method + if method.is_const_method(): + sig += " const" - methods = [] - for method in node.get_children(): - if method.kind == cindex.CursorKind.CXX_METHOD and method.is_pure_virtual(): - # Only consider pure virtual methods - method_signature = f"{method.return_type.spelling} {method.spelling}({', '.join([arg.type.spelling for arg in method.get_arguments()])})" - method_body = "// TODO: Implement this method" - methods.append({"signature": method_signature, "body": method_body}) - print(f" Found pure virtual method: {method_signature}") + methods.append({"signature": sig, "body": " // TODO: Implement this method"}) + print(f" -> Found pure virtual method: {sig}") + + interfaces[interface_name] = methods + + interfaces[interface_name] = methods - interfaces[interface_name] = methods + # --- The recursive step --- + # Recurse for children of this node + for child in node.get_children(): + walk_ast(child) - return interfaces + # Start the traversal from the root of the AST + walk_ast(translation_unit.cursor) + + return interfaces \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index e840ae2..9a89798 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,8 @@ dependencies = [ "questionary", "rich", "pyyaml", - "cryptography" + "cryptography", + "pyOpenSSL" ] [project.scripts]