From 1aadd0173251e8b47da451888d0150fa2d2231ff Mon Sep 17 00:00:00 2001 From: Emily Boudreaux Date: Wed, 6 Aug 2025 08:25:03 -0400 Subject: [PATCH] feat(cli): added much more robust bundle handling --- build-python/meson.build | 60 ++ cli.md | 343 ++++++++ fourdst/cli/bundle/__init__.py | 0 fourdst/cli/bundle/clear.py | 69 ++ fourdst/cli/bundle/create.py | 132 +++ fourdst/cli/bundle/diff.py | 121 +++ fourdst/cli/bundle/fill.py | 171 ++++ fourdst/cli/bundle/inspect.py | 174 ++++ fourdst/cli/bundle/sign.py | 102 +++ fourdst/cli/bundle/validate.py | 143 +++ fourdst/cli/cache/__init__.py | 0 fourdst/cli/cache/clear.py | 20 + fourdst/cli/common/__init__.py | 0 fourdst/cli/common/config.py | 16 + fourdst/cli/common/templates.py | 129 +++ fourdst/cli/common/utils.py | 424 +++++++++ fourdst/cli/keys/__init__.py | 0 fourdst/cli/keys/add.py | 23 + fourdst/cli/keys/generate.py | 35 + fourdst/cli/keys/list.py | 23 + fourdst/cli/keys/remote/__init__.py | 1 + fourdst/cli/keys/remote/add.py | 31 + fourdst/cli/keys/remote/list.py | 24 + fourdst/cli/keys/remote/remove.py | 30 + fourdst/cli/keys/remove.py | 61 ++ fourdst/cli/keys/sync.py | 66 ++ fourdst/cli/main.py | 1268 ++------------------------- fourdst/cli/plugin/__init__.py | 0 fourdst/cli/plugin/diff.py | 105 +++ fourdst/cli/plugin/extract.py | 82 ++ fourdst/cli/plugin/init.py | 161 ++++ fourdst/cli/plugin/pack.py | 100 +++ fourdst/cli/plugin/validate.py | 74 ++ pyproject.toml | 1 + 34 files changed, 2788 insertions(+), 1201 deletions(-) create mode 100644 cli.md create mode 100644 fourdst/cli/bundle/__init__.py create mode 100644 fourdst/cli/bundle/clear.py create mode 100644 fourdst/cli/bundle/create.py create mode 100644 fourdst/cli/bundle/diff.py create mode 100644 fourdst/cli/bundle/fill.py create mode 100644 fourdst/cli/bundle/inspect.py create mode 100644 fourdst/cli/bundle/sign.py create mode 100644 fourdst/cli/bundle/validate.py create mode 100644 fourdst/cli/cache/__init__.py create mode 100644 fourdst/cli/cache/clear.py create mode 100644 fourdst/cli/common/__init__.py create mode 100644 fourdst/cli/common/config.py create mode 100644 fourdst/cli/common/templates.py create mode 100644 fourdst/cli/common/utils.py create mode 100644 fourdst/cli/keys/__init__.py create mode 100644 fourdst/cli/keys/add.py create mode 100644 fourdst/cli/keys/generate.py create mode 100644 fourdst/cli/keys/list.py create mode 100644 fourdst/cli/keys/remote/__init__.py create mode 100644 fourdst/cli/keys/remote/add.py create mode 100644 fourdst/cli/keys/remote/list.py create mode 100644 fourdst/cli/keys/remote/remove.py create mode 100644 fourdst/cli/keys/remove.py create mode 100644 fourdst/cli/keys/sync.py create mode 100644 fourdst/cli/plugin/__init__.py create mode 100644 fourdst/cli/plugin/diff.py create mode 100644 fourdst/cli/plugin/extract.py create mode 100644 fourdst/cli/plugin/init.py create mode 100644 fourdst/cli/plugin/pack.py create mode 100644 fourdst/cli/plugin/validate.py diff --git a/build-python/meson.build b/build-python/meson.build index 44c813b..3c99163 100644 --- a/build-python/meson.build +++ b/build-python/meson.build @@ -37,3 +37,63 @@ py_installation.install_sources( ), subdir: 'fourdst/cli/templates', ) +py_installation.install_sources( + files( + meson.project_source_root() + '/fourdst/cli/bundle/__init__.py', + meson.project_source_root() + '/fourdst/cli/bundle/create.py', + meson.project_source_root() + '/fourdst/cli/bundle/fill.py', + meson.project_source_root() + '/fourdst/cli/bundle/inspect.py', + meson.project_source_root() + '/fourdst/cli/bundle/sign.py', + meson.project_source_root() + '/fourdst/cli/bundle/clear.py', + meson.project_source_root() + '/fourdst/cli/bundle/diff.py', + meson.project_source_root() + '/fourdst/cli/bundle/validate.py', + ), + subdir: 'fourdst/cli/bundle' +) +py_installation.install_sources( + files( + meson.project_source_root() + '/fourdst/cli/cache/__init__.py', + meson.project_source_root() + '/fourdst/cli/cache/clear.py' + ), + subdir: 'fourdst/cli/cache' +) +py_installation.install_sources( + files( + meson.project_source_root() + '/fourdst/cli/common/__init__.py', + meson.project_source_root() + '/fourdst/cli/common/config.py', + meson.project_source_root() + '/fourdst/cli/common/templates.py', + meson.project_source_root() + '/fourdst/cli/common/utils.py', + ), + subdir: 'fourdst/cli/common' +) +py_installation.install_sources( + files( + meson.project_source_root() + '/fourdst/cli/keys/__init__.py', + meson.project_source_root() + '/fourdst/cli/keys/generate.py', + meson.project_source_root() + '/fourdst/cli/keys/sync.py', + meson.project_source_root() + '/fourdst/cli/keys/add.py', + meson.project_source_root() + '/fourdst/cli/keys/list.py', + meson.project_source_root() + '/fourdst/cli/keys/remove.py', + ), + subdir: 'fourdst/cli/keys' +) +py_installation.install_sources( + files( + meson.project_source_root() + '/fourdst/cli/keys/remote/__init__.py', + meson.project_source_root() + '/fourdst/cli/keys/remote/add.py', + meson.project_source_root() + '/fourdst/cli/keys/remote/list.py', + meson.project_source_root() + '/fourdst/cli/keys/remote/remove.py', + ), + subdir: 'fourdst/cli/keys/remote' +) +py_installation.install_sources( + files( + meson.project_source_root() + '/fourdst/cli/plugin/__init__.py', + meson.project_source_root() + '/fourdst/cli/plugin/init.py', + meson.project_source_root() + '/fourdst/cli/plugin/pack.py', + meson.project_source_root() + '/fourdst/cli/plugin/extract.py', + meson.project_source_root() + '/fourdst/cli/plugin/diff.py', + meson.project_source_root() + '/fourdst/cli/plugin/validate.py', + ), + subdir: 'fourdst/cli/plugin' +) diff --git a/cli.md b/cli.md new file mode 100644 index 0000000..bc727cf --- /dev/null +++ b/cli.md @@ -0,0 +1,343 @@ +# `fourdst-cli` Documentation + +`fourdst`'s goal is to provide a single source for installation and utility for all lib* utilities. This includes python bindings and a command line interface for common tasks. This document covers the design and usage of that command line interface, `fourdst-cli` + +At the moment `fourdst-cli` only includes subprograms and commands related to plugin management for the 4D-STAR/libplugin library. [libplugin](https://github.com/4D-STAR/libplugin) is a small plugin library written for use by the 4D-STAR collaboration. Its goal is to allow researchers to easily share plugins to extend code in a low friction and reproducable manner. + +> **IMPORTANT:** Plugins, by their nature, allow arbitrary code to run on your system, libplugin does not preform any kind of sandboxing so this has the potential to be *very* unsafe. We have built in a rudimentery signing system to provide at least some security. However, the developers are researchers and not security professionals. As a user, please listen to the warnings you get. Self signed bundles are fine for you to use for yourself; however, be very very weary of unsigned or untrusted bundles when you are not the author. libplugin was developed for scientific use and with a focus on ease of use for scientists (and with the paradigm of collaborates sharing plugin files). This means that the security model does have some implicit level of trust assumed. Use this library at your own risk and **be very careful** about using unknown plugins. + +## Installation +`fourdst-cli` can be installed from pip + +```bash +pip install fourdst +``` + +or from source + +``` +git clone https://github.com/4D-STAR/fourdst +cd fourdst +pip install . +``` + +note that in order to install from source you will need `meson`, `ninja`, and `cmake` installed. If you do not have these, they can all be installed with pip + +```bash +pip install meson ninja cmake +``` + +Installing `fourdst` will make the `fourdst-cli` command line program avalible. Note you may need to restart or resource your terminal for changes to take effect. + + +## Core Concepts +There are few concepts `fourdst-cli` defined, these are outlined below. + +### Plugins +A **Plugin** is a self-contained C++ project, built as a shared library (`.so`, `.dylib`), that implements a specific interface defined in a C++ header file. The `fourdst-cli` helps scaffold, build, and manage these projects. Generally, plugin concumers (programs that can make use of user plugins) must provide interface header files which can then be targeted by plugin authors. + +### Bundles (`.fbundle`) +A **Bundle** is a single `.fbundle` file (which is just a standard `.zip` archive using a different extension) that contains everything needed to distribute one or more plugins. A key design principle is that a bundle can contain both pre-compiled binaries for various platforms *and* the full source code. This hybrid approach provides the best of both worlds: +- **Convenience**: Users can often use a pre-compiled binary directly if one matches their system, which is fast and requires no local toolchain. +- **Flexibility & Longevity**: If a compatible binary isn't available, users can compile the plugin from the included source code. This ensures the plugin remains usable on future platforms. + +### ABI (Application Binary Interface) +The C++ ABI is a complex and often frustrating aspect of cross-platform development. It defines how compiled code interacts at a binary level. Different compilers (GCC, Clang, MSVC), standard libraries (libstdc++, libc++), and even compiler versions can produce ABI-incompatible binaries. + +**Why it matters:** You cannot simply mix and match C++ binaries compiled with different toolchains. `fourdst-cli` tackles this head-on by performing a one-time **ABI detection** on the host system. It compiles and runs a small C++ program to determine the exact compiler, standard library, and ABI flags. This generates a unique **ABI Signature** (e.g., `clang-libc++-1500.3.9.4-libc++_abi`) which is used to tag binaries, ensuring that only compatible libraries are loaded. + +### The Trust Store +To ensure security, `fourdst-cli` uses a cryptographic trust model based on public-key cryptography. +- **Signing**: Plugin authors can sign their bundles with a private key. This process adds the author's public key fingerprint to the bundle's manifest and creates a digital signature (`manifest.sig`). +- **Verification**: Consumers of the bundle add the author's public key to their local **Trust Store** (`~/.config/fourdst/keys`). When they inspect the bundle, the CLI checks if the signature is valid and if the key that created it is in the local trust store. This confirms both the bundle's integrity (it hasn't been tampered with) and its authenticity (it comes from a trusted author). + +--- + +## Command Reference +All commands and subprograms can display more detailed cli api by passing the `--help` flag. + +### `plugin` +Subprogram for managing individual plugin projects. + +#### `plugin init` +Initializes a new Meson-based C++ plugin project from a C++ header file that defines an interface. + +```bash +fourdst-cli plugin init --header +``` + +> The init command automates the tedious setup of a C++ project. It parses the header to find abstract classes (those with pure virtual methods) and generates a C++ source file with stubs for all the methods you need to implement. It also sets up a complete meson.build file with the fourdst/libplugin dependency, and initializes a Git repository, so you can start coding immediately. + +#### `plugin validate` + +Validates the structure of a plugin directory. + +```bash +fourdst-cli plugin validate [PLUGIN_PATH] +``` + +> This is a crucial command for quality assurance and continuous integration (CI). It quickly checks for common project structure errors, like a missing meson.build file or the absence of a shared_library() definition, before you attempt a full build. + +#### `plugin extract` + +Extracts a plugin's source code from a bundle. + +```bash +fourdst-cli plugin extract --out +``` + +> This allows developers to easily inspect the source code of a plugin contained within a bundle without having to manually unzip multiple archives. + +#### `plugin diff` + +Compares the source code of a specific plugin between two different bundles. + +```bash +fourdst-cli plugin diff +``` + +> This is an essential tool for understanding what changed between two versions of a plugin bundle, providing a "git diff" like experience for the plugin's source code. + +### `bundle` + +Commands for creating, signing, and managing distributable .fbundle files. + +#### `bundle create` + +Builds and packages one or more plugin projects into a single .fbundle file. + +```bash +fourdst-cli bundle create [PLUGIN_DIR_2...] --out my_bundle.fbundle +``` + +> Why it's designed this way: This is the primary command for authors. It automates the entire packaging process: +> +> 1. Compiles the plugin(s) for the host system. +> +> 2. Tags the resulting binary with the host's platform and ABI signature. +> +> 3. Packages the complete source code (respecting .gitignore) into a source distribution (sdist). +> +> 4. Creates a manifest.yaml file describing the contents. +> +> 5. Zips everything into a single, portable .fbundle file. + +#### `bundle inspect` + +Inspects a bundle, validating its contents and cryptographic signature. + +```bash +fourdst-cli bundle inspect +``` + +> This is the most important command for consumers. It provides a complete "report card" for a bundle: +> +> - Trust Status: It checks for a signature, verifies it against the manifest, and checks if the signing key is in your local trust store. It will clearly state if the bundle is SIGNED and TRUSTED, SIGNED but UNTRUSTED, or UNSIGNED. +> +> - Content Validation: It verifies that all files listed in the manifest actually exist in the archive and checks their checksums if the bundle is signed. +> +> - Compatibility: It lists all available binaries and highlights whether one is compatible with your current system's platform and ABI. + +#### `bundle sign` + +Signs a bundle with an author's private key. + +```bash +fourdst-cli bundle sign --key /path/to/private_key +``` + +> Security and authenticity are paramount. This command adds a cryptographic layer of trust to the bundle. It calculates checksums for all binary files, adds them to the manifest, and then signs the entire manifest. This makes the bundle tamper-proof; any modification to the manifest or the binary files will invalidate the signature. + +#### `bundle validate` + +Performs a strict validation of a bundle file or a pre-bundle directory. + +```bash +fourdst-cli bundle validate +``` + +> While inspect provides a user-friendly report, validate is a stricter check suitable for automated scripts. It returns a non-zero exit code if any error is found (missing files, checksum mismatches, invalid manifest), making it ideal for CI/CD pipelines. + +#### `bundle clear` + +Removes all compiled binaries from a bundle, leaving only the source distributions. + +```bash +fourdst-cli bundle clear +``` + +> This is useful for creating a "source-only" distribution. It reduces the file size and removes any potentially untrusted pre-compiled code, forcing the consumer to build from source. + +#### `bundle diff` + +Compares two bundle files, showing differences in their manifests, signatures, and file contents. + +```bash +fourdst-cli bundle diff +``` + +> Provides a high-level overview of what has changed between two bundle releases, including changes to the manifest, the signature, and which files have been added, removed, or modified. + +#### `bundle fill` + +Builds new binaries for missing targets from the bundle's source. + +```bash +fourdst-cli bundle fill +``` + +> This is the magic that makes cross-platform distribution feasible. If a user receives a bundle without a binary for their specific platform (e.g., they are on aarch64-linux and the bundle only has an x86_64-linux binary), they can run bundle fill. The command will: +> +> - Detect available build targets (native, cross-compilation files, Docker). +> +> - Prompt the user to select which missing binaries they want to build. +> +> - Unpack the source, compile it using the selected target, and add the newly compiled, correctly tagged binary back into the bundle. +> +> - This empowers end-users to create binaries for their own platform without needing to be a C++ expert. + +### `keys` + +Commands for managing cryptographic keys and the trust store. + +- `keys generate`: Creates a new Ed25519 key pair for signing. + +- `keys add `: Adds a public key to the local trust store. + +- `keys remove [KEY_PATH]`: Removes a public key from the trust store. + +- `keys list`: Lists all trusted public keys. + +#### `keys sync` + +Syncs the local trust store with all configured remote Git repositories. + +> Manually adding keys can be cumbersome. This command allows you to point the CLI to one or more Git repositories that contain public keys. Running keys sync will pull the latest keys from all remotes, making it easy to keep your trust store up-to-date with keys from your team or community. + +### `keys remote` + +Manages the list of remote key repositories. + +- `keys remote add `: Adds a new remote Git repository. + +- `keys remote list`: Lists configured remotes. + +- `keys remote remove `: Removes a remote. + +> The usage of `keys remote` is intended to allow for remote source of trust to be established. There are risks associated with this as it shifts the expectation of trust onto the repository maintainers. Users should use remote public key stores at their own risk. + +> **Note:** We indend to establish a public key store on GitHub where plugin authors, officially vetted by the 4D-STAR collaboration, can register their public keys. **Any other public key stores for libplugin are unofficial and should be treated with extreme caution**. + +### `cache` + +Commands for managing the local cache. + +#### `cache clear` + +Clears all cached data, including the detected ABI signature. + +> The ABI signature is cached for performance. If you update your system's C++ compiler or toolchain, the cached ABI might become stale. cache clear deletes the cache, forcing a re-detection on the next run, ensuring your builds are always using the correct ABI signature. + +## Workflows + +### Workflow 1: Plugin Author (From Idea to Signed Bundle) + +#### 1. Define the Interface: +Create a C++ header file, my_interface.h, with a class containing pure virtual methods. + +#### 2. Initialize the Project: + +```bash +fourdst-cli plugin init my_awesome_plugin --header my_interface.h + +cd my_awesome_plugin +``` + +#### 3. Implement the Logic: +Open src/my_awesome_plugin.cpp and fill in the `TODO` sections with your plugin's implementation. + +#### 4. Build and Test Locally: + +```bash +meson setup builddir +meson compile -C builddir +``` + +#### 5. Create the Bundle: +From the parent directory of my_awesome_plugin run + +```bash +fourdst-cli bundle create my_awesome_plugin --out my_plugin_v1.fbundle +``` + +#### 6. Generate a Signing Key (only needs to be done once): + +```bash +fourdst-cli keys generate --name my_author_key +``` + +#### 7. Install your self signing key (only needs to be done once) +```bash +fourdst-cli keys add my_author_key.pub +``` +This will install the key you generated to your fourdst config (`$HOME/.config/fourdst/keys`). This lets you easily use self signed bundles. Note that you should **not** simply self sign bundles from the internet. + +#### 8. Sign the Bundle: + +```bash +fourdst-cli bundle sign my_plugin_v1.fbundle --key my_author_key +``` + +#### 8. Distribute: +As said at the top of this document the intended usage case of libplugin is something like "I am a researcher and I have this set of plugins I wrote for this code, I want my student, or collaborator, to be able to run the same code with the same set of plugins". Because of this we anticipate that usage will look like individual researchers building plugins and bundles and sharing them direclty with others (i.e. not through some central ditribution server). + +Plugin authors can register their public signing keys with us to establish a trusted set of authors (Note: that each author must be validated manually by current authors). This lets plugin consumers compare signed bundles to trusted sources. + +> We want to remind readers of the note at the top. The authors of this library are researchers and not security professionals. libplugin has been developed with the goal of making plugins easy for scientiests and with a limited amount of security tooling built in; however, plugins should be treated as untrusted code and should only be used if you are **very** confident that you trust the author and that the plugin you have is actually from that author (and has not been modified or had malicious code injected into it). + +### Workflow 2: Plugin Consumer (Verifying and Using a Bundle) + +#### 1. Receive Files +You get my_plugin_v1.fbundle from an author. Either the author is a trusted author (at which point you can call fourdst-cli keys sync to syncronize trusted keys with the 4D-STAR github keychain) or they are not. If they are not they can choose to share the public key of the key pair used to sign the bundle. + +> Note: You must be **very** sure you trust the plugin author if you accept their public key. Arbitrary code signed with the private key of this pair will be able to run on your compute. **Do not accept random public keys from plugin authors on the internet**. This is intended to be used by trusted collaborators, advisors, or other personal / professional connections. + +Assuming you do trust the author... + +#### 2. Add the author's public key to your local trust store. + +```bash +fourdst-cli keys add /path/to/my_author_key.pub +``` + +#### 3. Inspect the Bundle: +Verify the signature and check for a compatible binary. + +```bash +fourdst-cli bundle inspect my_plugin_v1.fbundle +``` + +- **Scenario A:** Compatible binary exists. The output shows Trust Status: ✅ SIGNED and TRUSTED and highlights a compatible binary in green. You are ready to use the plugin. + +- **Scenario B:** No compatible binary. The output shows the bundle is trusted, but warns that no binary matches your system's ABI. + +- **Scenario C:** Untrusted or unsigned + +#### 4a. (Optional) Fill the Bundle: +If you are in Scenario B, build the binary from the included source + +```bash +fourdst-cli bundle fill my_plugin_v1.fbundle +``` + +- The CLI will prompt you to build for your native platform. After it finishes, running bundle inspect again will show a new, compatible binary. The bundle is now ready to use. + +#### 4b. (Optional) Sign the Bundle: +If you are in Scenario C, and **if you trust the author and are confident that the author you think sent you the plugin *actully* sent you the plugin** you may choose to self sign the plugin. + +Note that this increases your risk of running malicious code, you are effectivley saying "I trust this code regardles of the fact that no chain of trust can be established". If you accept that risk you can choose to sign the code yourself. Follow the instructions in the above workflow to do this. + + +#### 5. Use the bundle +Whatever code you have that uses libplugin for plugins will expect the bundle path to be provided somehow (this might be through a command line option, config file, or some other manner). Pass the filled, signed, and validated bundle to this program. + diff --git a/fourdst/cli/bundle/__init__.py b/fourdst/cli/bundle/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/fourdst/cli/bundle/clear.py b/fourdst/cli/bundle/clear.py new file mode 100644 index 0000000..e5f59dd --- /dev/null +++ b/fourdst/cli/bundle/clear.py @@ -0,0 +1,69 @@ +# fourdst/cli/bundle/clear.py +import typer +import yaml +import zipfile +from pathlib import Path +import tempfile +import shutil + +def bundle_clear( + bundle_path: Path = typer.Argument(..., help="The path to the .fbundle file to clear.", exists=True, readable=True, writable=True) +): + """ + Removes all compiled binaries from a bundle, leaving only the source distributions. + """ + typer.echo(f"--- Clearing binaries from bundle: {bundle_path.name} ---") + + try: + with tempfile.TemporaryDirectory() as temp_dir_str: + temp_dir = Path(temp_dir_str) + + # 1. Unpack the bundle + with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: + bundle_zip.extractall(temp_dir) + + # 2. Read the manifest + manifest_path = temp_dir / "manifest.yaml" + if not manifest_path.is_file(): + typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + with open(manifest_path, 'r') as f: + manifest = yaml.safe_load(f) + + # 3. Clear binaries and signatures + typer.echo("Clearing binaries and signature information...") + manifest.pop('bundleAuthorKeyFingerprint', None) + manifest.pop('checksums', None) + + for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): + if 'binaries' in plugin_data: + plugin_data['binaries'] = [] + + # 4. Delete the binaries directory and signature file + bin_dir = temp_dir / "bin" + if bin_dir.is_dir(): + shutil.rmtree(bin_dir) + typer.echo(" - Removed 'bin/' directory.") + + sig_file = temp_dir / "manifest.sig" + if sig_file.is_file(): + sig_file.unlink() + typer.echo(" - Removed 'manifest.sig'.") + + # 5. Write the updated manifest + with open(manifest_path, 'w') as f: + yaml.dump(manifest, f, sort_keys=False) + + # 6. Repack the bundle + typer.echo("Repacking the bundle...") + with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: + for file_path in temp_dir.rglob('*'): + if file_path.is_file(): + bundle_zip.write(file_path, file_path.relative_to(temp_dir)) + + typer.secho(f"\n✅ Bundle '{bundle_path.name}' has been cleared of all binaries.", fg=typer.colors.GREEN) + + except Exception as e: + typer.secho(f"An unexpected error occurred: {e}", fg=typer.colors.RED) + raise typer.Exit(code=1) diff --git a/fourdst/cli/bundle/create.py b/fourdst/cli/bundle/create.py new file mode 100644 index 0000000..71d8e30 --- /dev/null +++ b/fourdst/cli/bundle/create.py @@ -0,0 +1,132 @@ +# fourdst/cli/bundle/create.py + +import typer +import os +import sys +import shutil +import datetime +import yaml +import zipfile +from pathlib import Path + +from fourdst.cli.common.utils import get_platform_identifier, run_command + +bundle_app = typer.Typer() + +@bundle_app.command("create") +def bundle_create( + plugin_dirs: list[Path] = typer.Argument(..., help="A list of plugin project directories to include.", exists=True, file_okay=False), + output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."), + bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."), + bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."), + bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle.") +): + """ + Builds and packages one or more plugin projects into a single .fbundle file. + """ + staging_dir = Path("temp_bundle_staging") + if staging_dir.exists(): + shutil.rmtree(staging_dir) + staging_dir.mkdir() + + # Get the host platform identifier, triggering detection if needed. + host_platform = get_platform_identifier() + + manifest = { + "bundleName": bundle_name, + "bundleVersion": bundle_version, + "bundleAuthor": bundle_author, + "bundleComment": "Created with fourdst-cli", + "bundledOn": datetime.datetime.now().isoformat(), + "bundlePlugins": {} + } + + print("Creating bundle...") + for plugin_dir in plugin_dirs: + plugin_name = plugin_dir.name + print(f"--> Processing plugin: {plugin_name}") + + # 1. Build the plugin + print(f" - Compiling for host platform...") + build_dir = plugin_dir / "builddir" + if not build_dir.exists(): + run_command(["meson", "setup", "builddir"], cwd=plugin_dir) + run_command(["meson", "compile", "-C", "builddir"], cwd=plugin_dir) + + # 2. Find the compiled artifact + compiled_lib = next(build_dir.glob("lib*.so"), None) or next(build_dir.glob("lib*.dylib"), None) + if not compiled_lib: + print(f"Error: Could not find compiled library for {plugin_name} (expected lib*.so or lib*.dylib)", file=sys.stderr) + raise typer.Exit(code=1) + + # 3. Package source code (sdist), respecting .gitignore + print(" - Packaging source code (respecting .gitignore)...") + sdist_path = staging_dir / f"{plugin_name}_src.zip" + + # Use git to list files, which automatically respects .gitignore + git_check = run_command(["git", "rev-parse", "--is-inside-work-tree"], cwd=plugin_dir, check=False) + + files_to_include = [] + if git_check.returncode == 0: + # This is a git repo, use git to list files + result = run_command(["git", "ls-files", "--cached", "--others", "--exclude-standard"], cwd=plugin_dir) + files_to_include = [plugin_dir / f for f in result.stdout.strip().split('\n') if f] + else: + # Not a git repo, fall back to os.walk and warn the user + typer.secho(f" - Warning: '{plugin_dir.name}' is not a git repository. Packaging all files.", fg=typer.colors.YELLOW) + for root, _, files in os.walk(plugin_dir): + if 'builddir' in root: + continue + for file in files: + files_to_include.append(Path(root) / file) + + with zipfile.ZipFile(sdist_path, 'w', zipfile.ZIP_DEFLATED) as sdist_zip: + for file_path in files_to_include: + if file_path.is_file(): + sdist_zip.write(file_path, file_path.relative_to(plugin_dir)) + + # 4. Stage artifacts with ABI-tagged filenames and update manifest + binaries_dir = staging_dir / "bin" + binaries_dir.mkdir(exist_ok=True) + + # Construct new filename with arch, os, and ABI tag + base_name = compiled_lib.stem # e.g., "libplugin_a" + ext = compiled_lib.suffix # e.g., ".so" + triplet = host_platform["triplet"] + abi_signature = host_platform["abi_signature"] + tagged_filename = f"{base_name}.{triplet}.{abi_signature}{ext}" + staged_lib_path = binaries_dir / tagged_filename + + print(f" - Staging binary as: {tagged_filename}") + shutil.copy(compiled_lib, staged_lib_path) + + manifest["bundlePlugins"][plugin_name] = { + "sdist": { + "path": sdist_path.name, + "sdistBundledOn": datetime.datetime.now().isoformat(), + "buildable": True + }, + "binaries": [{ + "platform": { + "triplet": host_platform["triplet"], + "abi_signature": host_platform["abi_signature"] + }, + "path": staged_lib_path.relative_to(staging_dir).as_posix(), + "compiledOn": datetime.datetime.now().isoformat() + }] + } + + # 5. Write manifest and package final bundle + manifest_path = staging_dir / "manifest.yaml" + with open(manifest_path, 'w') as f: + yaml.dump(manifest, f, sort_keys=False) + + print(f"\nPackaging final bundle: {output_bundle}") + with zipfile.ZipFile(output_bundle, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: + for root, _, files in os.walk(staging_dir): + for file in files: + file_path = Path(root) / file + bundle_zip.write(file_path, file_path.relative_to(staging_dir)) + + shutil.rmtree(staging_dir) + print("\n✅ Bundle created successfully!") diff --git a/fourdst/cli/bundle/diff.py b/fourdst/cli/bundle/diff.py new file mode 100644 index 0000000..eaa77a0 --- /dev/null +++ b/fourdst/cli/bundle/diff.py @@ -0,0 +1,121 @@ +# fourdst/cli/bundle/diff.py +import typer +import yaml +import zipfile +from pathlib import Path +import tempfile +import shutil +import difflib +from rich.console import Console +from rich.panel import Panel +from rich.text import Text +from rich.table import Table + +console = Console() + +def _get_file_content(directory: Path, filename: str): + file_path = directory / filename + if not file_path.exists(): + return None + return file_path.read_bytes() + +def bundle_diff( + bundle_a_path: Path = typer.Argument(..., help="The first bundle to compare.", exists=True, readable=True), + bundle_b_path: Path = typer.Argument(..., help="The second bundle to compare.", exists=True, readable=True), +): + """ + Compares two bundle files, showing differences in their manifests, signatures, and contents. + """ + console.print(Panel(f"Comparing [bold blue]{bundle_a_path.name}[/bold blue] with [bold blue]{bundle_b_path.name}[/bold blue]")) + + with tempfile.TemporaryDirectory() as temp_a_str, tempfile.TemporaryDirectory() as temp_b_str: + temp_a = Path(temp_a_str) + temp_b = Path(temp_b_str) + + # Unpack both bundles + with zipfile.ZipFile(bundle_a_path, 'r') as z: z.extractall(temp_a) + with zipfile.ZipFile(bundle_b_path, 'r') as z: z.extractall(temp_b) + + # --- 1. Compare Signatures --- + sig_a = _get_file_content(temp_a, "manifest.sig") + sig_b = _get_file_content(temp_b, "manifest.sig") + + sig_panel_style = "green" + sig_status = "" + if sig_a == sig_b and sig_a is not None: + sig_status = "[green]UNCHANGED[/green]" + elif sig_a and not sig_b: + sig_status = "[yellow]REMOVED[/yellow]" + sig_panel_style = "yellow" + elif not sig_a and sig_b: + sig_status = "[yellow]ADDED[/yellow]" + sig_panel_style = "yellow" + elif sig_a and sig_b and sig_a != sig_b: + sig_status = "[bold red]CHANGED[/bold red]" + sig_panel_style = "red" + else: + sig_status = "[dim]Both Unsigned[/dim]" + sig_panel_style = "dim" + + console.print(Panel(f"Signature Status: {sig_status}", title="[bold]Signature Verification[/bold]", border_style=sig_panel_style, expand=False)) + + # --- 2. Compare Manifests --- + manifest_a_content = (temp_a / "manifest.yaml").read_text() + manifest_b_content = (temp_b / "manifest.yaml").read_text() + + if manifest_a_content != manifest_b_content: + diff = difflib.unified_diff( + manifest_a_content.splitlines(keepends=True), + manifest_b_content.splitlines(keepends=True), + fromfile=f"{bundle_a_path.name}/manifest.yaml", + tofile=f"{bundle_b_path.name}/manifest.yaml", + ) + + diff_text = Text() + for line in diff: + if line.startswith('+'): + diff_text.append(line, style="green") + elif line.startswith('-'): + diff_text.append(line, style="red") + elif line.startswith('^'): + diff_text.append(line, style="blue") + else: + diff_text.append(line) + + console.print(Panel(diff_text, title="[bold]Manifest Differences[/bold]", border_style="yellow")) + else: + console.print(Panel("[green]Manifests are identical.[/green]", title="[bold]Manifest[/bold]", border_style="green")) + + # --- 3. Compare File Contents (via checksums) --- + manifest_a = yaml.safe_load(manifest_a_content) + manifest_b = yaml.safe_load(manifest_b_content) + + files_a = {p['path']: p.get('checksum') for p in manifest_a.get('bundlePlugins', {}).get(next(iter(manifest_a.get('bundlePlugins', {})), ''), {}).get('binaries', [])} + files_b = {p['path']: p.get('checksum') for p in manifest_b.get('bundlePlugins', {}).get(next(iter(manifest_b.get('bundlePlugins', {})), ''), {}).get('binaries', [])} + + table = Table(title="File Content Comparison") + table.add_column("File Path", style="cyan") + table.add_column("Status", style="magenta") + table.add_column("Details", style="yellow") + + all_files = sorted(list(set(files_a.keys()) | set(files_b.keys()))) + has_content_changes = False + + for file in all_files: + in_a = file in files_a + in_b = file in files_b + + if in_a and not in_b: + table.add_row(file, "[red]REMOVED[/red]", "") + has_content_changes = True + elif not in_a and in_b: + table.add_row(file, "[green]ADDED[/green]", "") + has_content_changes = True + elif files_a[file] != files_b[file]: + table.add_row(file, "[yellow]MODIFIED[/yellow]", f"Checksum changed from {files_a.get(file, 'N/A')} to {files_b.get(file, 'N/A')}") + has_content_changes = True + + if has_content_changes: + console.print(table) + else: + console.print(Panel("[green]All file contents are identical.[/green]", title="[bold]File Contents[/bold]", border_style="green")) diff --git a/fourdst/cli/bundle/fill.py b/fourdst/cli/bundle/fill.py new file mode 100644 index 0000000..fa758a9 --- /dev/null +++ b/fourdst/cli/bundle/fill.py @@ -0,0 +1,171 @@ +# fourdst/cli/bundle/fill.py + +import typer +import shutil +import datetime +import yaml +import zipfile +from pathlib import Path +import questionary +import subprocess +import sys +import traceback + +try: + import docker +except ImportError: + docker = None # Docker is an optional dependency for the 'fill' command + +from rich.console import Console +from rich.panel import Panel + +console = Console() + +from fourdst.cli.common.utils import get_available_build_targets, _build_plugin_in_docker, _build_plugin_for_target + +bundle_app = typer.Typer() + +@bundle_app.command("fill") +def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file to fill with new binaries.", exists=True)): + """ + Builds new binaries for the current host or cross-targets from the bundle's source. + """ + staging_dir = Path(f"temp_fill_{bundle_path.stem}") + if staging_dir.exists(): + shutil.rmtree(staging_dir) + + try: + # 1. Unpack and load manifest + with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: + bundle_zip.extractall(staging_dir) + + manifest_path = staging_dir / "manifest.yaml" + if not manifest_path.exists(): + typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + with open(manifest_path, 'r') as f: + manifest = yaml.safe_load(f) + + # 2. Find available targets and missing binaries + available_targets = get_available_build_targets() + build_options = [] + + for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): + if "sdist" not in plugin_data: + continue # Cannot build without source + + existing_abis = {b['platform']['abi_signature'] for b in plugin_data.get('binaries', [])} + + for target in available_targets: + # Use a more descriptive name for the choice + if target.get('docker_image', None): + display_name = f"Docker: {target['docker_image']}" + elif target.get('cross_file', None): + display_name = f"Cross: {Path(target['cross_file']).name}" + else: + display_name = f"Native: {target['abi_signature']} (Local System)" + + if target['abi_signature'] not in existing_abis: + build_options.append({ + "name": f"Build '{plugin_name}' for {display_name}", + "plugin_name": plugin_name, + "target": target + }) + + if not build_options: + typer.secho("✅ Bundle is already full for all available build targets.", fg=typer.colors.GREEN) + raise typer.Exit() + + # 3. Prompt user to select which targets to build + choices = [opt['name'] for opt in build_options] + selected_builds = questionary.checkbox( + "Select which missing binaries to build:", + choices=choices + ).ask() + + if not selected_builds: + typer.echo("No binaries selected to build. Exiting.") + raise typer.Exit() + + # 4. Build selected targets + for build_name in selected_builds: + build_job = next(opt for opt in build_options if opt['name'] == build_name) + plugin_name = build_job['plugin_name'] + target = build_job['target'] + + typer.secho(f"\nBuilding {plugin_name} for target '{build_name}'...", bold=True) + + sdist_zip_path = staging_dir / manifest['bundlePlugins'][plugin_name]['sdist']['path'] + build_temp_dir = staging_dir / f"build_{plugin_name}" + + try: + if target['docker_image']: + if not docker: + typer.secho("Error: Docker is not installed. Please install Docker to build this target.", fg=typer.colors.RED) + continue + compiled_lib, final_target = _build_plugin_in_docker(sdist_zip_path, build_temp_dir, target, plugin_name) + else: + compiled_lib, final_target = _build_plugin_for_target(sdist_zip_path, build_temp_dir, target) + + # Add new binary to bundle + abi_tag = final_target["abi_signature"] + base_name = compiled_lib.stem + ext = compiled_lib.suffix + triplet = final_target["triplet"] + tagged_filename = f"{base_name}.{triplet}.{abi_tag}{ext}" + + binaries_dir = staging_dir / "bin" + binaries_dir.mkdir(exist_ok=True) + staged_lib_path = binaries_dir / tagged_filename + shutil.move(compiled_lib, staged_lib_path) + + # Update manifest + new_binary_entry = { + "platform": { + "triplet": final_target["triplet"], + "abi_signature": abi_tag, + "arch": final_target["arch"] + }, + "path": staged_lib_path.relative_to(staging_dir).as_posix(), + "compiledOn": datetime.datetime.now().isoformat() + } + manifest['bundlePlugins'][plugin_name]['binaries'].append(new_binary_entry) + typer.secho(f" -> Successfully built and staged {tagged_filename}", fg=typer.colors.GREEN) + + except (FileNotFoundError, subprocess.CalledProcessError) as e: + typer.secho(f" -> Failed to build {plugin_name} for target '{build_name}': {e}", fg=typer.colors.RED) + + tb_str = traceback.format_exc() + console.print(Panel( + tb_str, + title="Traceback", + border_style="yellow", + expand=False + )) + + finally: + if build_temp_dir.exists(): + shutil.rmtree(build_temp_dir) + + # 5. Repackage the bundle + # Invalidate any old signature + if "bundleAuthorKeyFingerprint" in manifest: + del manifest["bundleAuthorKeyFingerprint"] + if (staging_dir / "manifest.sig").exists(): + (staging_dir / "manifest.sig").unlink() + typer.secho("\n⚠️ Bundle signature has been invalidated by this operation. Please re-sign the bundle.", fg=typer.colors.YELLOW) + + with open(manifest_path, 'w') as f: + yaml.dump(manifest, f, sort_keys=False) + + with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: + for file_path in staging_dir.rglob('*'): + if file_path.is_file(): + bundle_zip.write(file_path, file_path.relative_to(staging_dir)) + + typer.secho(f"\n✅ Bundle '{bundle_path.name}' has been filled successfully.", fg=typer.colors.GREEN) + + finally: + if staging_dir.exists(): + shutil.rmtree(staging_dir) diff --git a/fourdst/cli/bundle/inspect.py b/fourdst/cli/bundle/inspect.py new file mode 100644 index 0000000..ba62d0c --- /dev/null +++ b/fourdst/cli/bundle/inspect.py @@ -0,0 +1,174 @@ +# fourdst/cli/bundle/inspect.py + +import typer +import sys +import shutil +import yaml +import zipfile +import hashlib +from pathlib import Path + +try: + from cryptography.hazmat.primitives import serialization, hashes + from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519 + from cryptography.exceptions import InvalidSignature +except ImportError: + print("Error: This CLI now requires 'cryptography'. Please install it.", file=sys.stderr) + print("Run: pip install cryptography", file=sys.stderr) + sys.exit(1) + +from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH +from fourdst.cli.common.utils import calculate_sha256, get_platform_identifier + +bundle_app = typer.Typer() + +@bundle_app.command("inspect") +def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True)): + """ + Inspects a bundle, validating its contents and cryptographic signature. + """ + staging_dir = Path(f"temp_inspect_{bundle_path.stem}") + if staging_dir.exists(): + shutil.rmtree(staging_dir) + + try: + # Get current system info first + host_platform = get_platform_identifier() + + # 1. Unpack and load manifest + with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: + archive_files = set(bundle_zip.namelist()) + bundle_zip.extractall(staging_dir) + + manifest_path = staging_dir / "manifest.yaml" + if not manifest_path.exists(): + typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + with open(manifest_path, 'r') as f: + manifest = yaml.safe_load(f) + + # 2. Print Header + typer.secho(f"--- Bundle Inspection Report for: {bundle_path.name} ---", bold=True) + typer.echo(f"Name: {manifest.get('bundleName', 'N/A')}") + typer.echo(f"Version: {manifest.get('bundleVersion', 'N/A')}") + typer.echo(f"Author: {manifest.get('bundleAuthor', 'N/A')}") + typer.echo(f"Bundled: {manifest.get('bundledOn', 'N/A')}") + typer.secho(f"Host ABI: {host_platform['abi_signature']}", dim=True) + typer.secho(f"Host Arch: {host_platform['triplet']}", dim=True) + typer.echo("-" * 50) + + # 3. Signature and Trust Verification + fingerprint = manifest.get('bundleAuthorKeyFingerprint') + sig_path = staging_dir / "manifest.sig" + + if not fingerprint or not sig_path.exists(): + typer.secho("Trust Status: 🟡 UNSIGNED", fg=typer.colors.YELLOW) + else: + # Find the key in the local trust store + trusted_key_path = None + if LOCAL_TRUST_STORE_PATH.exists(): + for key_file in LOCAL_TRUST_STORE_PATH.rglob("*.pub"): + pub_key = serialization.load_ssh_public_key(key_file.read_bytes()) + pub_key_bytes = pub_key.public_bytes( + encoding = serialization.Encoding.OpenSSH, + format = serialization.PublicFormat.OpenSSH + ) + pub_key_fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest() + if pub_key_fingerprint == fingerprint: + trusted_key_path = key_file + break + + if not trusted_key_path: + typer.secho(f"Trust Status: ⚠️ SIGNED but UNTRUSTED AUTHOR ({fingerprint})", fg=typer.colors.YELLOW) + else: + try: + pub_key_obj = serialization.load_ssh_public_key(trusted_key_path.read_bytes()) + signature = sig_path.read_bytes() + manifest_content = manifest_path.read_bytes() + + if isinstance(pub_key_obj, ed25519.Ed25519PublicKey): + pub_key_obj.verify(signature, manifest_content) + elif isinstance(pub_key_obj, rsa.RSAPublicKey): + pub_key_obj.verify( + signature, + manifest_content, + padding.PKCS1v15(), + hashes.SHA256() + ) + typer.secho(f"Trust Status: ✅ SIGNED and TRUSTED ({trusted_key_path.relative_to(LOCAL_TRUST_STORE_PATH)})", fg=typer.colors.GREEN) + except InvalidSignature: + typer.secho(f"Trust Status: ❌ INVALID SIGNATURE ({fingerprint})", fg=typer.colors.RED) + + typer.echo("-" * 50) + + # 4. Content Validation + typer.echo("Validating bundle contents...") + missing_files = [] + checksum_errors = [] + + for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): + sdist_path = plugin_data.get('sdist', {}).get('path') + if sdist_path and sdist_path not in archive_files: + missing_files.append(sdist_path) + + for binary in plugin_data.get('binaries', []): + binary_path_str = binary.get('path') + if binary_path_str and binary_path_str not in archive_files: + missing_files.append(binary_path_str) + elif binary_path_str: + # Verify checksum if present + expected_checksum = binary.get('checksum') + if expected_checksum: + actual_checksum = "sha256:" + calculate_sha256(staging_dir / binary_path_str) + if actual_checksum != expected_checksum: + checksum_errors.append(binary_path_str) + + if not missing_files and not checksum_errors: + typer.secho("Content Validation: ✅ OK", fg=typer.colors.GREEN) + else: + typer.secho("Content Validation: ❌ FAILED", fg=typer.colors.RED) + for f in missing_files: + typer.echo(f" - Missing file from archive: {f}") + for f in checksum_errors: + typer.echo(f" - Checksum mismatch for: {f}") + + # 5. Plugin Details + typer.echo("-" * 50) + typer.secho("Available Plugins:", bold=True) + for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): + typer.echo(f"\n Plugin: {plugin_name}") + typer.echo(f" Source Dist: {plugin_data.get('sdist', {}).get('path', 'N/A')}") + binaries = plugin_data.get('binaries', []) + + host_compatible_binary_found = False + if not binaries: + typer.echo(" Binaries: None") + else: + typer.echo(" Binaries:") + for b in binaries: + plat = b.get('platform', {}) + is_compatible = (plat.get('triplet') == host_platform['triplet'] and + plat.get('abi_signature') == host_platform['abi_signature']) + + color = typer.colors.GREEN if is_compatible else None + if is_compatible: + host_compatible_binary_found = True + + typer.secho(f" - Path: {b.get('path', 'N/A')}", fg=color) + typer.secho(f" ABI: {plat.get('abi_signature', 'N/A')}", fg=color, dim=True) + typer.secho(f" Arch: {plat.get('triplet', 'N/A')}", fg=color, dim=True) + + if not host_compatible_binary_found: + typer.secho( + f" Note: No compatible binary found for the current system ({host_platform['triplet']}).", + fg=typer.colors.YELLOW + ) + typer.secho( + " Run 'fourdst-cli bundle fill' to build one.", + fg=typer.colors.YELLOW + ) + + finally: + if staging_dir.exists(): + shutil.rmtree(staging_dir) diff --git a/fourdst/cli/bundle/sign.py b/fourdst/cli/bundle/sign.py new file mode 100644 index 0000000..578265b --- /dev/null +++ b/fourdst/cli/bundle/sign.py @@ -0,0 +1,102 @@ +# fourdst/cli/bundle/sign.py + +import typer +import shutil +import yaml +import zipfile +import hashlib +from pathlib import Path +import sys + +try: + from cryptography.hazmat.primitives import serialization, hashes + from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519 +except ImportError: + print("Error: This CLI now requires 'cryptography'. Please install it.", file=sys.stderr) + print("Run: pip install cryptography", file=sys.stderr) + sys.exit(1) + +from fourdst.cli.common.utils import calculate_sha256 + +bundle_app = typer.Typer() + +@bundle_app.command("sign") +def bundle_sign( + bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True), + private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True) +): + """ + Signs a bundle with an author's private key, adding checksums and a signature. + """ + print(f"Signing bundle: {bundle_path}") + staging_dir = Path("temp_sign_staging") + if staging_dir.exists(): + shutil.rmtree(staging_dir) + + # 1. Unpack the bundle + with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: + bundle_zip.extractall(staging_dir) + + manifest_path = staging_dir / "manifest.yaml" + if not manifest_path.exists(): + print("Error: manifest.yaml not found in bundle.", file=sys.stderr) + raise typer.Exit(code=1) + + # 2. Load private key and derive public key to get fingerprint + with open(private_key, "rb") as key_file: + priv_key_obj = serialization.load_ssh_private_key(key_file.read(), password=None) + + pub_key_obj = priv_key_obj.public_key() + pub_key_bytes = pub_key_obj.public_bytes( + encoding=serialization.Encoding.OpenSSH, + format=serialization.PublicFormat.OpenSSH + ) + fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest() + print(f" - Signing with key fingerprint: {fingerprint}") + + # 3. Update manifest with checksums and fingerprint + with open(manifest_path, 'r') as f: + manifest = yaml.safe_load(f) + + manifest['bundleAuthorKeyFingerprint'] = fingerprint + for plugin in manifest['bundlePlugins'].values(): + for binary in plugin.get('binaries', []): + binary_path = staging_dir / binary['path'] + if binary_path.exists(): + binary['checksum'] = "sha256:" + calculate_sha256(binary_path) + else: + binary['checksum'] = "MISSING_FILE" + + + with open(manifest_path, 'w') as f: + yaml.dump(manifest, f, sort_keys=False) + print(" - Added file checksums and key fingerprint to manifest.") + + # 4. Sign the manifest + manifest_content = manifest_path.read_bytes() + + if isinstance(priv_key_obj, ed25519.Ed25519PrivateKey): + signature = priv_key_obj.sign(manifest_content) + elif isinstance(priv_key_obj, rsa.RSAPrivateKey): + signature = priv_key_obj.sign( + manifest_content, + padding.PKCS1v15(), + hashes.SHA256() + ) + else: + print("Error: Unsupported private key type for signing.", file=sys.stderr) + raise typer.Exit(code=1) + + + sig_path = staging_dir / "manifest.sig" + sig_path.write_bytes(signature) + print(" - Created manifest.sig.") + + # 5. Repackage the bundle + with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: + for file_path in staging_dir.rglob('*'): + if file_path.is_file(): + bundle_zip.write(file_path, file_path.relative_to(staging_dir)) + + shutil.rmtree(staging_dir) + print("\n✅ Bundle signed successfully!") diff --git a/fourdst/cli/bundle/validate.py b/fourdst/cli/bundle/validate.py new file mode 100644 index 0000000..b2e04e4 --- /dev/null +++ b/fourdst/cli/bundle/validate.py @@ -0,0 +1,143 @@ +# fourdst/cli/bundle/validate.py +import typer +import yaml +import zipfile +from pathlib import Path +import tempfile +import shutil +import hashlib +from rich.console import Console +from rich.panel import Panel +from rich.text import Text + +console = Console() + +def _calculate_sha256(file_path: Path) -> str: + """Calculates the SHA256 checksum of a file.""" + sha256_hash = hashlib.sha256() + with open(file_path, "rb") as f: + for byte_block in iter(lambda: f.read(4096), b""): + sha256_hash.update(byte_block) + return sha256_hash.hexdigest() + +def _validate_bundle_directory(path: Path, is_temp: bool = False): + """Validates a directory that is structured like an unpacked bundle.""" + title = "Validating Pre-Bundle Directory" if not is_temp else "Validating Bundle Contents" + console.print(Panel(f"{title}: [bold]{path.name}[/bold]", border_style="blue")) + + errors = 0 + warnings = 0 + + def check(condition, success_msg, error_msg, is_warning=False): + nonlocal errors, warnings + if condition: + console.print(Text(f"✅ {success_msg}", style="green")) + return True + else: + if is_warning: + console.print(Text(f"⚠️ {error_msg}", style="yellow")) + warnings += 1 + else: + console.print(Text(f"❌ {error_msg}", style="red")) + errors += 1 + return False + + # 1. Check for manifest + manifest_file = path / "manifest.yaml" + if not check(manifest_file.is_file(), "Found manifest.yaml.", "Missing manifest.yaml file."): + raise typer.Exit(code=1) + + try: + manifest = yaml.safe_load(manifest_file.read_text()) + check(True, "Manifest file is valid YAML.", "") + except yaml.YAMLError as e: + check(False, "", f"Manifest file is not valid YAML: {e}") + raise typer.Exit(code=1) + + # 2. Check manifest content + check(manifest is not None, "Manifest is not empty.", "Manifest file is empty.", is_warning=True) + check('bundleName' in manifest, "Manifest contains 'bundleName'.", "Manifest is missing 'bundleName'.") + check('bundleVersion' in manifest, "Manifest contains 'bundleVersion'.", "Manifest is missing 'bundleVersion'.") + + plugins = manifest.get('bundlePlugins', {}) + check(plugins, "Manifest contains 'bundlePlugins' section.", "Manifest is missing 'bundlePlugins' section.") + + # 3. Check files listed in manifest + for name, data in plugins.items(): + console.print(f"\n--- Validating plugin: [bold cyan]{name}[/bold cyan] ---") + sdist_info = data.get('sdist', {}) + sdist_path_str = sdist_info.get('path') + + if check(sdist_path_str, "sdist path is defined.", f"sdist path not defined for plugin '{name}'."): + sdist_path = path / sdist_path_str + check(sdist_path.exists(), f"sdist file found: {sdist_path_str}", f"sdist file not found: {sdist_path_str}") + + for binary in data.get('binaries', []): + bin_path_str = binary.get('path') + if not check(bin_path_str, "Binary path is defined.", "Binary entry is missing a 'path'."): + continue + + bin_path = path / bin_path_str + if check(bin_path.exists(), f"Binary file found: {bin_path_str}", f"Binary file not found: {bin_path_str}"): + expected_checksum = binary.get('checksum') + if check(expected_checksum, "Checksum is defined.", f"Checksum not defined for binary '{bin_path_str}'.", is_warning=True): + actual_checksum = "sha256:" + _calculate_sha256(bin_path) + check( + actual_checksum == expected_checksum, + f"Checksum matches for {bin_path_str}", + f"Checksum mismatch for {bin_path_str}.\n Expected: {expected_checksum}\n Actual: {actual_checksum}" + ) + + # 4. Check for signature + check((path / "manifest.sig").exists(), "Signature file 'manifest.sig' found.", "Signature file 'manifest.sig' is missing.", is_warning=True) + + # Final summary + console.print("-" * 40) + if errors == 0: + console.print(Panel( + f"[bold green]Validation Passed[/bold green]\nWarnings: {warnings}", + title="Result", + border_style="green" + )) + else: + console.print(Panel( + f"[bold red]Validation Failed[/bold red]\nErrors: {errors}\nWarnings: {warnings}", + title="Result", + border_style="red" + )) + raise typer.Exit(code=1) + +def _validate_bundle_file(bundle_path: Path): + """Unpacks a .fbundle file and runs directory validation on its contents.""" + with tempfile.TemporaryDirectory() as temp_dir_str: + temp_dir = Path(temp_dir_str) + try: + with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: + bundle_zip.extractall(temp_dir) + _validate_bundle_directory(temp_dir, is_temp=True) + except zipfile.BadZipFile: + console.print(Panel(f"[red]Error: '{bundle_path.name}' is not a valid zip file.[/red]", title="Validation Error")) + raise typer.Exit(code=1) + +def bundle_validate( + path: Path = typer.Argument( + ".", + help="The path to the .fbundle file or pre-bundle directory to validate.", + exists=True, + resolve_path=True + ) +): + """ + Validates a packed .fbundle or a directory ready to be packed. + + - If a directory is provided, it checks for a valid manifest and that all referenced files exist. + - If a .fbundle file is provided, it unpacks it and runs the same validation checks. + """ + if path.is_dir(): + _validate_bundle_directory(path) + elif path.is_file(): + _validate_bundle_file(path) + else: + # This case should not be reached due to `exists=True` + console.print(Panel("[red]Error: Path is not a file or directory.[/red]", title="Validation Error")) + raise typer.Exit(code=1) diff --git a/fourdst/cli/cache/__init__.py b/fourdst/cli/cache/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/fourdst/cli/cache/clear.py b/fourdst/cli/cache/clear.py new file mode 100644 index 0000000..0b2f9c9 --- /dev/null +++ b/fourdst/cli/cache/clear.py @@ -0,0 +1,20 @@ +# fourdst/cli/cache/clear.py + +import typer +import shutil +from fourdst.cli.common.config import CACHE_PATH +import typer + +cache_app = typer.Typer() + +@cache_app.command("clear") +def cache_clear(): + """ + Clears all cached data, including the ABI signature. + Run this if you have updated your C++ compiler. + """ + if CACHE_PATH.exists(): + shutil.rmtree(CACHE_PATH) + print("✅ Local cache cleared.") + else: + print("No cache found to clear.") diff --git a/fourdst/cli/common/__init__.py b/fourdst/cli/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/fourdst/cli/common/config.py b/fourdst/cli/common/config.py new file mode 100644 index 0000000..3aa8240 --- /dev/null +++ b/fourdst/cli/common/config.py @@ -0,0 +1,16 @@ +# fourdst/cli/common/config.py + +from pathlib import Path + +FOURDST_CONFIG_DIR = Path.home() / ".config" / "fourdst" +LOCAL_TRUST_STORE_PATH = FOURDST_CONFIG_DIR / "keys" +CROSS_FILES_PATH = FOURDST_CONFIG_DIR / "cross" +CACHE_PATH = FOURDST_CONFIG_DIR / "cache" +ABI_CACHE_FILE = CACHE_PATH / "abi_identifier.json" +DOCKER_BUILD_IMAGES = { + "x86_64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_x86_64", + "aarch64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_aarch64", + "i686 (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_i686", + "ppc64le (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_ppc64le", + "s390x (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_s390x" +} diff --git a/fourdst/cli/common/templates.py b/fourdst/cli/common/templates.py new file mode 100644 index 0000000..5f16425 --- /dev/null +++ b/fourdst/cli/common/templates.py @@ -0,0 +1,129 @@ +# fourdst/cli/common/templates.py + +ABI_DETECTOR_CPP_SRC = """ +#include +#include +#include + +#ifdef __GNUC__ +#if __has_include() +#include +#endif +#endif + +int main() { + std::string os; + std::string compiler; + std::string compiler_version; + std::string stdlib; + std::string stdlib_version; + std::string abi; + +#if defined(__APPLE__) && defined(__MACH__) + os = "macos"; +#elif defined(__linux__) + os = "linux"; +#elif defined(_WIN32) + os = "windows"; +#else + os = "unknown_os"; +#endif + +#if defined(__clang__) + compiler = "clang"; + compiler_version = __clang_version__; +#elif defined(__GNUC__) + compiler = "gcc"; + compiler_version = std::to_string(__GNUC__) + "." + std::to_string(__GNUC_MINOR__) + "." + std::to_string(__GNUC_PATCHLEVEL__); +#elif defined(_MSC_VER) + compiler = "msvc"; + compiler_version = std::to_string(_MSC_VER); +#else + compiler = "unknown_compiler"; + compiler_version = "0"; +#endif + +#if defined(_LIBCPP_VERSION) + stdlib = "libc++"; + stdlib_version = std::to_string(_LIBCPP_VERSION); + abi = "libc++_abi"; // On libc++, the ABI is tightly coupled with the library itself. +#elif defined(__GLIBCXX__) + stdlib = "libstdc++"; + #if defined(_GLIBCXX_USE_CXX11_ABI) + abi = _GLIBCXX_USE_CXX11_ABI == 1 ? "cxx11_abi" : "pre_cxx11_abi"; + #else + abi = "pre_cxx11_abi"; + #endif + #if __has_include() + stdlib_version = gnu_get_libc_version(); + #else + stdlib_version = "unknown"; + #endif +#else + stdlib = "unknown_stdlib"; + abi = "unknown_abi"; +#endif + + std::cout << "os=" << os << std::endl; + std::cout << "compiler=" << compiler << std::endl; + std::cout << "compiler_version=" << compiler_version << std::endl; + std::cout << "stdlib=" << stdlib << std::endl; + if (!stdlib_version.empty()) { + std::cout << "stdlib_version=" << stdlib_version << std::endl; + } + // Always print the ABI key for consistent parsing + std::cout << "abi=" << abi << std::endl; + + return 0; +} +""" + +ABI_DETECTOR_MESON_SRC = """ +project('abi-detector', 'cpp', default_options : ['cpp_std=c++23']) +executable('detector', 'main.cpp') +""" + +GITIGNORE_CONTENT = """# General +*.swp +*~ +.DS_Store + +# Python +__pycache__/ +*.pyc +*.pyo +*.pyd +.Python +.venv/ +venv/ +env/ +*.egg-info/ +dist/ + +# C++ Build Artifacts +*.o +*.a +*.so +*.dylib +*.dll +*.lib +*.exe + +# Meson Build System +# Ignore any directory containing meson-private, which is a reliable marker +**/meson-private/ +# Also ignore common build directory names +build/ +builddir/ + +# Subprojects - ignore all subdirectories except 'packagefiles' and root .wrap files +/subprojects/* +!/subprojects/packagefiles +!/subprojects/*.wrap + +# Editor specific +.vscode/ +.idea/ +*.sublime-project +*.sublime-workspace +""" diff --git a/fourdst/cli/common/utils.py b/fourdst/cli/common/utils.py new file mode 100644 index 0000000..e0205e0 --- /dev/null +++ b/fourdst/cli/common/utils.py @@ -0,0 +1,424 @@ +# fourdst/cli/common/utils.py + +import typer +import os +import sys +import shutil +import subprocess +from pathlib import Path +import importlib.resources +import json +import platform +import zipfile +import hashlib + +try: + import docker +except ImportError: + docker = None + +from rich.console import Console +from rich.panel import Panel + +console = Console() + +from fourdst.cli.common.config import CACHE_PATH, ABI_CACHE_FILE, CROSS_FILES_PATH, DOCKER_BUILD_IMAGES +from fourdst.cli.common.templates import ABI_DETECTOR_CPP_SRC, ABI_DETECTOR_MESON_SRC + +def get_template_content(template_name: str) -> str: + """Safely reads content from a template file packaged with the CLI.""" + try: + return importlib.resources.files('fourdst.cli.templates').joinpath(template_name).read_text() + except FileNotFoundError: + print(f"Error: Template file '{template_name}' not found.", file=sys.stderr) + sys.exit(1) + +def run_command(command: list[str], cwd: Path = None, check=True, display_output: bool = False): + """Runs a command, optionally displaying its output in a formatted box.""" + command_str = ' '.join(command) + + try: + result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd) + + if display_output and (result.stdout or result.stderr): + output_text = "" + if result.stdout: + output_text += result.stdout.strip() + if result.stderr: + output_text += f"\n[yellow]{result.stderr.strip()}[/yellow]" + + console.print(Panel( + output_text, + title=f"Output from: `{command_str}`", + border_style="blue", + expand=False + )) + + return result + except subprocess.CalledProcessError as e: + if check: + output_text = "" + if e.stdout: + output_text += f"[bold]--- STDOUT ---[/bold]\n{e.stdout.strip()}" + if e.stderr: + output_text += f"\n[bold]--- STDERR ---[/bold]\n{e.stderr.strip()}" + + console.print(Panel( + output_text, + title=f"Error running: `{command_str}`", + border_style="red", + expand=False + )) + raise typer.Exit(code=1) + return e + +def _detect_and_cache_abi(cross_file: Path = None): + """ + Compiles and runs a C++ program to detect the compiler ABI, then caches it. + """ + print("Performing one-time native C++ ABI detection...") + temp_dir = CACHE_PATH / "abi_detector" + if temp_dir.exists(): + shutil.rmtree(temp_dir) + temp_dir.mkdir(parents=True) + + try: + (temp_dir / "main.cpp").write_text(ABI_DETECTOR_CPP_SRC) + (temp_dir / "meson.build").write_text(ABI_DETECTOR_MESON_SRC) + + print(" - Configuring detector...") + run_command(["meson", "setup", "build"], cwd=temp_dir, display_output=True) + print(" - Compiling detector...") + run_command(["meson", "compile", "-C", "build"], cwd=temp_dir, display_output=True) + + detector_exe = temp_dir / "build" / "detector" + print(" - Running detector...") + proc = subprocess.run([str(detector_exe)], check=True, capture_output=True, text=True) + + abi_details = {} + for line in proc.stdout.strip().split('\n'): + if '=' in line: + key, value = line.split('=', 1) + abi_details[key.strip()] = value.strip() + + compiler = abi_details.get('compiler', 'unk_compiler') + stdlib = abi_details.get('stdlib', 'unk_stdlib') + stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version') + abi = abi_details.get('abi', 'unk_abi') + abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}" + + detected_os = abi_details.get("os", "unknown_os") + arch = platform.machine() + + platform_identifier = { + "triplet": f"{arch}-{detected_os}", + "abi_signature": abi_string, + "details": abi_details, + "is_native": True, + "cross_file": None, + "docker_image": None, + "arch": arch + } + + with open(ABI_CACHE_FILE, 'w') as f: + json.dump(platform_identifier, f, indent=2) + + print(f"✅ Native ABI detected and cached: {abi_string}") + return platform_identifier + + finally: + if temp_dir.exists(): + shutil.rmtree(temp_dir) + +def get_platform_identifier() -> dict: + """ + Gets the native platform identifier, using a cached value if available. + """ + if ABI_CACHE_FILE.exists(): + with open(ABI_CACHE_FILE, 'r') as f: + return json.load(f) + else: + return _detect_and_cache_abi() + +def get_available_build_targets() -> list: + """Gets native, cross-compilation, and Docker build targets.""" + targets = [get_platform_identifier()] + + # Add cross-file targets + CROSS_FILES_PATH.mkdir(exist_ok=True) + for cross_file in CROSS_FILES_PATH.glob("*.cross"): + triplet = cross_file.stem + targets.append({ + "triplet": triplet, + "abi_signature": f"cross-{triplet}", + "is_native": False, + "cross_file": str(cross_file.resolve()), + "docker_image": None + }) + + # Add Docker targets if Docker is available + if docker: + try: + client = docker.from_env() + client.ping() + for name, image in DOCKER_BUILD_IMAGES.items(): + arch = name.split(' ')[0] + targets.append({ + "triplet": f"linux-{arch}", + "abi_signature": f"docker-{image}", + "is_native": False, + "cross_file": None, + "docker_image": image, + "arch": arch + }) + except Exception: + typer.secho("Warning: Docker is installed but the daemon is not running. Docker targets are unavailable.", fg=typer.colors.YELLOW) + + return targets + +def _build_plugin_for_target(sdist_path: Path, build_dir: Path, target: dict): + """Builds a plugin natively or with a cross file.""" + source_dir = build_dir / "src" + if source_dir.exists(): + shutil.rmtree(source_dir) + + with zipfile.ZipFile(sdist_path, 'r') as sdist_zip: + sdist_zip.extractall(source_dir) + + + setup_cmd = ["meson", "setup"] + if target["cross_file"]: + setup_cmd.extend(["--cross-file", target["cross_file"]]) + setup_cmd.append("build") + + run_command(setup_cmd, cwd=source_dir, display_output=True) + run_command(["meson", "compile", "-C", "build"], cwd=source_dir, display_output=True) + + meson_build_dir = source_dir / "build" + compiled_lib = next(meson_build_dir.rglob("lib*.so"), None) or next(meson_build_dir.rglob("lib*.dylib"), None) + if not compiled_lib: + raise FileNotFoundError("Could not find compiled library after build.") + + return compiled_lib, target # Return target as ABI is pre-determined + +def _build_plugin_in_docker(sdist_path: Path, build_dir: Path, target: dict, plugin_name: str): + """Builds a plugin inside a Docker container.""" + client = docker.from_env() + image_name = target["docker_image"] + + # Find arch from DOCKER_BUILD_IMAGES to create a clean triplet later + arch = "unknown_arch" + for name, img in DOCKER_BUILD_IMAGES.items(): + if img == image_name: + arch = name.split(' ')[0] + break + + typer.echo(f" - Pulling Docker image '{image_name}' (if necessary)...") + client.images.pull(image_name) + + source_dir = build_dir / "src" + if source_dir.exists(): + shutil.rmtree(source_dir) + + with zipfile.ZipFile(sdist_path, 'r') as sdist_zip: + sdist_zip.extractall(source_dir) + + # This script will be run inside the container + build_script = f""" + set -e + echo "--- Installing build dependencies ---" + export PATH="/opt/python/cp313-cp313/bin:$PATH" + pip install meson ninja cmake + + echo " -> ℹ meson version: $(meson --version) [$(which meson)]" + echo " -> ℹ ninja version: $(ninja --version) [$(which ninja)]" + echo " -> ℹ cmake version: $(cmake --version) [$(which cmake)]" + + echo "--- Configuring with Meson ---" + meson setup /build/meson_build + echo "--- Compiling with Meson ---" + meson compile -C /build/meson_build + echo "--- Running ABI detector ---" + # We need to build and run the ABI detector inside the container too + mkdir /tmp/abi && cd /tmp/abi + echo "{ABI_DETECTOR_CPP_SRC.replace('"', '\\"')}" > main.cpp + echo "{ABI_DETECTOR_MESON_SRC.replace('"', '\\"')}" > meson.build + meson setup build && meson compile -C build + ./build/detector > /build/abi_details.txt + """ + + container_build_dir = Path("/build") + + typer.echo(" - Running build container...") + container = client.containers.run( + image=image_name, + command=["/bin/sh", "-c", build_script], + volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'rw'}}, + working_dir=str(container_build_dir), + detach=True + ) + + # Stream logs + for line in container.logs(stream=True, follow=True): + typer.echo(f" [docker] {line.decode('utf-8').strip()}") + + result = container.wait() + if result["StatusCode"] != 0: + # The container is stopped, but we can still inspect its filesystem by restarting it briefly. + log_output = container.logs() + container.remove() # Clean up before raising + typer.secho(f"Build failed inside Docker. Full log:\n{log_output.decode('utf-8')}", fg=typer.colors.RED) + raise subprocess.CalledProcessError(result["StatusCode"], "Build inside Docker failed.") + + # Retrieve artifacts by searching inside the container's filesystem + typer.echo(" - Locating compiled library in container...") + meson_build_dir_str = (container_build_dir / "meson_build").as_posix() + expected_lib_name = f"lib{plugin_name}.so" + + find_cmd = f"find {meson_build_dir_str} -name {expected_lib_name}" + + # We need to run the find command in the now-stopped container. + # We can't use exec_run on a stopped container, but we can create a new + # one that uses the same filesystem (volume) to find the file. + try: + find_output = client.containers.run( + image=image_name, + command=["/bin/sh", "-c", find_cmd], + volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'ro'}}, + remove=True, # Clean up the find container immediately + detach=False + ) + found_path_str = find_output.decode('utf-8').strip() + if not found_path_str: + raise FileNotFoundError("Find command returned no path.") + compiled_lib = Path(found_path_str) + typer.echo(f" - Found library at: {compiled_lib}") + + except Exception as e: + typer.secho(f" - Error: Could not locate '{expected_lib_name}' inside the container.", fg=typer.colors.RED) + typer.secho(f" Details: {e}", fg=typer.colors.RED) + raise FileNotFoundError("Could not find compiled library in container after a successful build.") + + # Get the ABI details from the container + abi_details_content = "" + bits, _ = container.get_archive(str(container_build_dir / "abi_details.txt")) + for chunk in bits: + abi_details_content += chunk.decode('utf-8') + + # We need to find the actual file content within the tar stream + # This is a simplification; a real implementation would use the `tarfile` module + actual_content = abi_details_content.split('\n', 1)[1] if '\n' in abi_details_content else abi_details_content + actual_content = actual_content.split('main.cpp')[1].strip() if 'main.cpp' in actual_content else actual_content + actual_content = actual_content.rsplit('0755', 1)[0].strip() if '0755' in actual_content else actual_content + + + abi_details = {} + for line in actual_content.strip().split('\n'): + if '=' in line: + key, value = line.split('=', 1) + abi_details[key.strip()] = value.strip() + + compiler = abi_details.get('compiler', 'unk_compiler') + stdlib = abi_details.get('stdlib', 'unk_stdlib') + stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version') + abi = abi_details.get('abi', 'unk_abi') + abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}" + + final_target = { + "triplet": f"{abi_details.get('os', 'linux')}-{arch}", + "abi_signature": abi_string, + "is_native": False, + "cross_file": None, + "docker_image": image_name, + "arch": arch + } + + # Copy the binary out + local_lib_path = build_dir / compiled_lib.name + bits, _ = container.get_archive(str(compiled_lib)) + with open(local_lib_path, 'wb') as f: + for chunk in bits: + f.write(chunk) + + container.remove() + + return local_lib_path, final_target + + +def calculate_sha256(file_path: Path) -> str: + """Calculates the SHA256 checksum of a file.""" + sha256_hash = hashlib.sha256() + with open(file_path, "rb") as f: + for byte_block in iter(lambda: f.read(4096), b""): + sha256_hash.update(byte_block) + return sha256_hash.hexdigest() + +def parse_cpp_header(header_path: Path): + """ + Parses a C++ header file using libclang to find classes and their pure virtual methods. + """ + # This function requires python-clang-16 + try: + from clang import cindex + except ImportError: + print("Error: The 'init' command requires 'libclang'. Please install it.", file=sys.stderr) + print("Run: pip install python-clang-16", file=sys.stderr) + # Also ensure the libclang.so/dylib is in your system's library path. + raise typer.Exit(code=1) + + if not cindex.Config.loaded: + try: + # Attempt to find libclang automatically. This may need to be configured by the user. + cindex.Config.set_library_file(cindex.conf.get_filename()) + except cindex.LibclangError as e: + print(f"Error: libclang library not found. Please ensure it's installed and in your system's path.", file=sys.stderr) + print(f"Details: {e}", file=sys.stderr) + raise typer.Exit(code=1) + + # --- Get compiler flags from pkg-config to help clang find includes --- + try: + pkg_config_proc = subprocess.run( + ['pkg-config', '--cflags', 'fourdst_plugin'], + capture_output=True, + text=True, + check=True + ) + # Split the flags string into a list of arguments for libclang + compiler_flags = pkg_config_proc.stdout.strip().split() + print(f"Using compiler flags from pkg-config: {' '.join(compiler_flags)}") + except (subprocess.CalledProcessError, FileNotFoundError): + print("Warning: `pkg-config --cflags fourdst-plugin` failed. Parsing may not succeed if the header has dependencies.", file=sys.stderr) + print("Please ensure 'pkg-config' is installed and 'fourdst-plugin.pc' is in your PKG_CONFIG_PATH.", file=sys.stderr) + compiler_flags = [] + + index = cindex.Index.create() + # Add the pkg-config flags to the parser arguments + translation_unit = index.parse(str(header_path), args=['-x', 'c++', '-std=c++23'] + compiler_flags) + + interfaces = {} + for cursor in translation_unit.cursor.walk_preorder(): + if cursor.kind == cindex.CursorKind.CLASS_DECL and cursor.is_definition(): + class_name = cursor.spelling + methods = [] + for child in cursor.get_children(): + if child.kind == cindex.CursorKind.CXX_METHOD and child.is_pure_virtual_method(): + method_name = child.spelling + result_type = child.result_type.spelling + # Recreate the full method signature + params = [p.spelling or f"param{i+1}" for i, p in enumerate(child.get_arguments())] + param_str = ", ".join(f"{p.type.spelling} {p.spelling}" for p in child.get_arguments()) + const_qualifier = " const" if child.is_const_method() else "" + + signature = f"{result_type} {method_name}({param_str}){const_qualifier}" + + # Generate a placeholder body + body = f" // TODO: Implement the {method_name} method.\n" + if result_type != "void": + body += f" return {{}};" # Default return + + methods.append({'signature': signature, 'body': body}) + + if methods: # Only consider classes with pure virtual methods as interfaces + interfaces[class_name] = methods + + return interfaces diff --git a/fourdst/cli/keys/__init__.py b/fourdst/cli/keys/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/fourdst/cli/keys/add.py b/fourdst/cli/keys/add.py new file mode 100644 index 0000000..c70c395 --- /dev/null +++ b/fourdst/cli/keys/add.py @@ -0,0 +1,23 @@ +# fourdst/cli/keys/add.py +import typer +import shutil +from pathlib import Path +from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH + +MANUAL_KEYS_DIR = LOCAL_TRUST_STORE_PATH / "manual" + +def keys_add( + key_path: Path = typer.Argument(..., help="Path to the public key file to add.", exists=True, readable=True) +): + """Adds a single public key to the local trust store.""" + MANUAL_KEYS_DIR.mkdir(parents=True, exist_ok=True) + + destination = MANUAL_KEYS_DIR / key_path.name + if destination.exists(): + # check content + if destination.read_bytes() == key_path.read_bytes(): + typer.secho(f"Key '{key_path.name}' with same content already exists.", fg=typer.colors.YELLOW) + return + + shutil.copy(key_path, destination) + typer.secho(f"✅ Key '{key_path.name}' added to manual trust store.", fg=typer.colors.GREEN) diff --git a/fourdst/cli/keys/generate.py b/fourdst/cli/keys/generate.py new file mode 100644 index 0000000..4d57c86 --- /dev/null +++ b/fourdst/cli/keys/generate.py @@ -0,0 +1,35 @@ +# fourdst/cli/keys/generate.py + +import typer +import sys +from pathlib import Path +from fourdst.cli.common.utils import run_command + +keys_app = typer.Typer() + +@keys_app.command("generate") +def keys_generate( + key_name: str = typer.Option("author_key", "--name", "-n", help="The base name for the generated key files.") +): + """ + Generates a new Ed25519 key pair for signing bundles. + """ + private_key_path = Path(f"{key_name}") + public_key_path = Path(f"{key_name}.pub") + + if private_key_path.exists() or public_key_path.exists(): + print(f"Error: Key files '{private_key_path}' or '{public_key_path}' already exist.", file=sys.stderr) + raise typer.Exit(code=1) + + print("Generating Ed25519 key pair...") + run_command([ + "ssh-keygen", + "-t", "ed25519", + "-f", str(private_key_path), + "-N", "", # No passphrase + "-C", "fourdst bundle signing key" + ]) + print("\n✅ Keys generated successfully!") + print(f" -> Private Key (KEEP SECRET): {private_key_path.resolve()}") + print(f" -> Public Key (SHARE): {public_key_path.resolve()}") + print("\nShare the public key with users who need to trust your bundles.") diff --git a/fourdst/cli/keys/list.py b/fourdst/cli/keys/list.py new file mode 100644 index 0000000..37c58c9 --- /dev/null +++ b/fourdst/cli/keys/list.py @@ -0,0 +1,23 @@ +# fourdst/cli/keys/list.py +import typer +from pathlib import Path +from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH + +def keys_list(): + """Lists all trusted public keys.""" + if not LOCAL_TRUST_STORE_PATH.exists(): + typer.echo("Trust store not found.") + return + + keys_found = False + for source_dir in LOCAL_TRUST_STORE_PATH.iterdir(): + if source_dir.is_dir(): + keys = list(source_dir.glob("*.pub")) + if keys: + keys_found = True + typer.secho(f"\n--- Source: {source_dir.name} ---", bold=True) + for key_file in keys: + typer.echo(f" - {key_file.name}") + + if not keys_found: + typer.echo("No trusted keys found.") diff --git a/fourdst/cli/keys/remote/__init__.py b/fourdst/cli/keys/remote/__init__.py new file mode 100644 index 0000000..4aa9378 --- /dev/null +++ b/fourdst/cli/keys/remote/__init__.py @@ -0,0 +1 @@ +# fourdst/cli/keys/remote/__init__.py diff --git a/fourdst/cli/keys/remote/add.py b/fourdst/cli/keys/remote/add.py new file mode 100644 index 0000000..cd7907e --- /dev/null +++ b/fourdst/cli/keys/remote/add.py @@ -0,0 +1,31 @@ +# fourdst/cli/keys/remote/add.py +import typer +import json +from pathlib import Path +from fourdst.cli.common.config import FOURDST_CONFIG_DIR + +KEY_REMOTES_CONFIG = FOURDST_CONFIG_DIR / "key_remotes.json" + +def remote_add( + url: str = typer.Argument(..., help="The URL of the Git repository."), + name: str = typer.Argument(..., help="A local name for the remote.") +): + """Adds a new remote key source.""" + FOURDST_CONFIG_DIR.mkdir(parents=True, exist_ok=True) + + if KEY_REMOTES_CONFIG.exists(): + with open(KEY_REMOTES_CONFIG, 'r') as f: + config = json.load(f) + else: + config = {"remotes": []} + + if any(r['name'] == name for r in config['remotes']): + typer.secho(f"Error: Remote with name '{name}' already exists.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + config['remotes'].append({"name": name, "url": url}) + + with open(KEY_REMOTES_CONFIG, 'w') as f: + json.dump(config, f, indent=2) + + typer.secho(f"✅ Remote '{name}' added.", fg=typer.colors.GREEN) diff --git a/fourdst/cli/keys/remote/list.py b/fourdst/cli/keys/remote/list.py new file mode 100644 index 0000000..54c4dd2 --- /dev/null +++ b/fourdst/cli/keys/remote/list.py @@ -0,0 +1,24 @@ +# fourdst/cli/keys/remote/list.py +import typer +import json +from pathlib import Path +from fourdst.cli.common.config import FOURDST_CONFIG_DIR + +KEY_REMOTES_CONFIG = FOURDST_CONFIG_DIR / "key_remotes.json" + +def remote_list(): + """Lists all configured remote key sources.""" + if not KEY_REMOTES_CONFIG.exists(): + typer.echo("No remotes configured.") + return + + with open(KEY_REMOTES_CONFIG, 'r') as f: + config = json.load(f) + + if not config.get("remotes"): + typer.echo("No remotes configured.") + return + + typer.secho("Configured Key Remotes:", bold=True) + for remote in config['remotes']: + typer.echo(f" - {remote['name']}: {remote['url']}") diff --git a/fourdst/cli/keys/remote/remove.py b/fourdst/cli/keys/remote/remove.py new file mode 100644 index 0000000..db002c1 --- /dev/null +++ b/fourdst/cli/keys/remote/remove.py @@ -0,0 +1,30 @@ +# fourdst/cli/keys/remote/remove.py +import typer +import json +from pathlib import Path +from fourdst.cli.common.config import FOURDST_CONFIG_DIR + +KEY_REMOTES_CONFIG = FOURDST_CONFIG_DIR / "key_remotes.json" + +def remote_remove( + name: str = typer.Argument(..., help="The name of the remote to remove.") +): + """Removes a remote key source.""" + if not KEY_REMOTES_CONFIG.exists(): + typer.secho("Error: No remotes configured.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + with open(KEY_REMOTES_CONFIG, 'r') as f: + config = json.load(f) + + original_len = len(config['remotes']) + config['remotes'] = [r for r in config['remotes'] if r['name'] != name] + + if len(config['remotes']) == original_len: + typer.secho(f"Error: Remote '{name}' not found.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + with open(KEY_REMOTES_CONFIG, 'w') as f: + json.dump(config, f, indent=2) + + typer.secho(f"✅ Remote '{name}' removed.", fg=typer.colors.GREEN) diff --git a/fourdst/cli/keys/remove.py b/fourdst/cli/keys/remove.py new file mode 100644 index 0000000..8a22530 --- /dev/null +++ b/fourdst/cli/keys/remove.py @@ -0,0 +1,61 @@ +# fourdst/cli/keys/remove.py +import typer +import questionary +from pathlib import Path +import hashlib + +from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH + +def get_key_fingerprint(key_path: Path) -> str: + """Generates a SHA256 fingerprint for a public key.""" + pub_key_bytes = key_path.read_bytes() + # Assuming OpenSSH format, the fingerprint is based on the raw public key bytes + # For simplicity, we'll hash the whole file content. + return "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest() + +def keys_remove( + key_path: Path = typer.Argument(None, help="Path to the public key file to remove.", exists=True, readable=True) +): + """Removes a single public key from the local trust store.""" + if not LOCAL_TRUST_STORE_PATH.exists(): + typer.secho("Trust store not found.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + if key_path: + # Remove by content matching + target_content = key_path.read_bytes() + key_removed = False + for source_dir in LOCAL_TRUST_STORE_PATH.iterdir(): + if source_dir.is_dir(): + for pub_key in source_dir.glob("*.pub"): + if pub_key.read_bytes() == target_content: + pub_key.unlink() + typer.secho(f"✅ Removed key '{pub_key.name}' from source '{source_dir.name}'.", fg=typer.colors.GREEN) + key_removed = True + if not key_removed: + typer.secho("No matching key found to remove.", fg=typer.colors.YELLOW) + else: + # Interactive removal + all_keys = [] + for source_dir in LOCAL_TRUST_STORE_PATH.iterdir(): + if source_dir.is_dir(): + for pub_key in source_dir.glob("*.pub"): + all_keys.append(pub_key) + + if not all_keys: + typer.echo("No keys to remove.") + raise typer.Exit() + + choices = [ + { + "name": f"{key.relative_to(LOCAL_TRUST_STORE_PATH)} ({get_key_fingerprint(key)})", + "value": key + } for key in all_keys + ] + + selected_to_remove = questionary.checkbox("Select keys to remove:", choices=choices).ask() + + if selected_to_remove: + for key_to_remove in selected_to_remove: + key_to_remove.unlink() + typer.secho(f"✅ Removed key '{key_to_remove.name}'.", fg=typer.colors.GREEN) diff --git a/fourdst/cli/keys/sync.py b/fourdst/cli/keys/sync.py new file mode 100644 index 0000000..dad2054 --- /dev/null +++ b/fourdst/cli/keys/sync.py @@ -0,0 +1,66 @@ +# fourdst/cli/keys/sync.py +import typer +import shutil +import json +from pathlib import Path +import questionary + +from fourdst.cli.common.config import FOURDST_CONFIG_DIR, LOCAL_TRUST_STORE_PATH + +KEY_REMOTES_CONFIG = FOURDST_CONFIG_DIR / "key_remotes.json" +REMOTES_DIR = LOCAL_TRUST_STORE_PATH / "remotes" + +keys_app = typer.Typer() + +@keys_app.command("sync") +def keys_sync(): + """ + Syncs the local trust store with all configured remote Git repositories. + """ + if not KEY_REMOTES_CONFIG.exists(): + typer.secho("No remotes configured. Use 'fourdst-cli keys remote add' to add one.", fg=typer.colors.YELLOW) + raise typer.Exit() + + with open(KEY_REMOTES_CONFIG, 'r') as f: + config = json.load(f) + + remotes = config.get("remotes", []) + if not remotes: + typer.secho("No remotes configured.", fg=typer.colors.YELLOW) + raise typer.Exit() + + REMOTES_DIR.mkdir(parents=True, exist_ok=True) + + remotes_to_remove = [] + + for remote in remotes: + name = remote['name'] + url = remote['url'] + remote_path = REMOTES_DIR / name + + typer.secho(f"--- Syncing remote '{name}' from {url} ---", bold=True) + + try: + if remote_path.exists(): + run_command(["git", "pull"], cwd=remote_path) + else: + run_command(["git", "clone", "--depth", "1", url, str(remote_path)]) + + # Clean up non-public key files + for item in remote_path.iterdir(): + if item.is_file() and item.suffix != '.pub': + item.unlink() + + typer.secho(f"✅ Sync successful for '{name}'.", fg=typer.colors.GREEN) + + except Exception as e: + typer.secho(f"⚠️ Failed to sync remote '{name}': {e}", fg=typer.colors.YELLOW) + if questionary.confirm(f"Do you want to remove the remote '{name}'?").ask(): + remotes_to_remove.append(name) + + if remotes_to_remove: + config['remotes'] = [r for r in config['remotes'] if r['name'] not in remotes_to_remove] + with open(KEY_REMOTES_CONFIG, 'w') as f: + json.dump(config, f, indent=2) + typer.secho(f"Removed failing remotes: {', '.join(remotes_to_remove)}", fg=typer.colors.YELLOW) + diff --git a/fourdst/cli/main.py b/fourdst/cli/main.py index 88bec8a..31b87fe 100644 --- a/fourdst/cli/main.py +++ b/fourdst/cli/main.py @@ -1,1224 +1,90 @@ # fourdst/cli/main.py import typer -import os -import sys -import shutil -import subprocess from pathlib import Path -import importlib.resources -import questionary -import yaml -import zipfile -import hashlib -import platform -import datetime -import json -# --- Third-party libraries required for new features --- -# These would need to be added to the project's dependencies (e.g., in pyproject.toml) -try: - from cryptography.hazmat.primitives import serialization, hashes - from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519 - from cryptography.exceptions import InvalidSignature -except ImportError: - print("Error: This CLI now requires 'cryptography' and 'PyYAML'. Please install them.", file=sys.stderr) - print("Run: pip install cryptography pyyaml", file=sys.stderr) - sys.exit(1) +from fourdst.cli.common.config import CACHE_PATH + +from fourdst.cli.bundle.create import bundle_create +from fourdst.cli.bundle.fill import bundle_fill +from fourdst.cli.bundle.sign import bundle_sign +from fourdst.cli.bundle.inspect import bundle_inspect +from fourdst.cli.bundle.clear import bundle_clear +from fourdst.cli.bundle.diff import bundle_diff +from fourdst.cli.bundle.validate import bundle_validate + +from fourdst.cli.plugin.init import plugin_init +from fourdst.cli.plugin.pack import plugin_pack +from fourdst.cli.plugin.extract import plugin_extract +from fourdst.cli.plugin.diff import plugin_diff +from fourdst.cli.plugin.validate import plugin_validate + +from fourdst.cli.cache.clear import cache_clear + +from fourdst.cli.keys.generate import keys_generate +from fourdst.cli.keys.sync import keys_sync +from fourdst.cli.keys.add import keys_add +from fourdst.cli.keys.remove import keys_remove +from fourdst.cli.keys.list import keys_list + +from fourdst.cli.keys.remote.add import remote_add +from fourdst.cli.keys.remote.list import remote_list +from fourdst.cli.keys.remote.remove import remote_remove -try: - import docker -except ImportError: - docker = None # Docker is an optional dependency for the 'fill' command -# --- Main Typer application --- app = typer.Typer( name="fourdst-cli", help="A command-line tool for managing fourdst projects, plugins, and bundles." ) + plugin_app = typer.Typer(name="plugin", help="Commands for managing individual fourdst plugins.") bundle_app = typer.Typer(name="bundle", help="Commands for creating, signing, and managing plugin bundles.") -keys_app = typer.Typer(name="keys", help="Commands for cryptographic key generation and management.") cache_app = typer.Typer(name="cache", help="Commands for managing the local cache.") +keys_app = typer.Typer(name="keys", help="Commands for cryptographic key generation and management.") +remote_app = typer.Typer(name="remote", help="Manage remote git repositories for public keys.") + +# Add commands to their respective apps +plugin_app.command("init")(plugin_init) +plugin_app.command("pack")(plugin_pack) +plugin_app.command("extract")(plugin_extract) +plugin_app.command("validate")(plugin_validate) +plugin_app.command("diff")(plugin_diff) + +bundle_app.command("create")(bundle_create) +bundle_app.command("fill")(bundle_fill) +bundle_app.command("sign")(bundle_sign) +bundle_app.command("inspect")(bundle_inspect) +bundle_app.command("clear")(bundle_clear) +bundle_app.command("diff")(bundle_diff) +bundle_app.command("validate")(bundle_validate) + +cache_app.command("clear")(cache_clear) + + +keys_app.add_typer(remote_app) + +keys_app.command("generate")(keys_generate) +keys_app.command("sync")(keys_sync) +keys_app.command("add")(keys_add) +keys_app.command("remove")(keys_remove) +keys_app.command("list")(keys_list) + +remote_app.command("add")(remote_add) +remote_app.command("list")(remote_list) +remote_app.command("remove")(remote_remove) + + +# Add the sub-apps to the main app app.add_typer(plugin_app, name="plugin") app.add_typer(bundle_app, name="bundle") app.add_typer(keys_app, name="keys") app.add_typer(cache_app, name="cache") - -# --- Configuration --- -FOURDST_CONFIG_DIR = Path.home() / ".config" / "fourdst" -LOCAL_TRUST_STORE_PATH = FOURDST_CONFIG_DIR / "keys" -CROSS_FILES_PATH = FOURDST_CONFIG_DIR / "cross" -CACHE_PATH = FOURDST_CONFIG_DIR / "cache" -ABI_CACHE_FILE = CACHE_PATH / "abi_identifier.json" -DOCKER_BUILD_IMAGES = { - "x86_64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_x86_64", - "aarch64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_aarch64", - "i686 (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_i686", - "ppc64le (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_ppc64le", - "s390x (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_s390x" -} - -# --- C++ ABI Detector Source --- -ABI_DETECTOR_CPP_SRC = """ -#include -#include -#include - -#ifdef __GNUC__ -#if __has_include() -#include -#endif -#endif - -int main() { - std::string os; - std::string compiler; - std::string compiler_version; - std::string stdlib; - std::string stdlib_version; - std::string abi; - -#if defined(__APPLE__) && defined(__MACH__) - os = "macos"; -#elif defined(__linux__) - os = "linux"; -#elif defined(_WIN32) - os = "windows"; -#else - os = "unknown_os"; -#endif - -#if defined(__clang__) - compiler = "clang"; - compiler_version = __clang_version__; -#elif defined(__GNUC__) - compiler = "gcc"; - compiler_version = std::to_string(__GNUC__) + "." + std::to_string(__GNUC_MINOR__) + "." + std::to_string(__GNUC_PATCHLEVEL__); -#elif defined(_MSC_VER) - compiler = "msvc"; - compiler_version = std::to_string(_MSC_VER); -#else - compiler = "unknown_compiler"; - compiler_version = "0"; -#endif - -#if defined(_LIBCPP_VERSION) - stdlib = "libc++"; - stdlib_version = std::to_string(_LIBCPP_VERSION); - abi = "libc++_abi"; // On libc++, the ABI is tightly coupled with the library itself. -#elif defined(__GLIBCXX__) - stdlib = "libstdc++"; - #if defined(_GLIBCXX_USE_CXX11_ABI) - abi = _GLIBCXX_USE_CXX11_ABI == 1 ? "cxx11_abi" : "pre_cxx11_abi"; - #else - abi = "pre_cxx11_abi"; - #endif - #if __has_include() - stdlib_version = gnu_get_libc_version(); - #else - stdlib_version = "unknown"; - #endif -#else - stdlib = "unknown_stdlib"; - abi = "unknown_abi"; -#endif - - std::cout << "os=" << os << std::endl; - std::cout << "compiler=" << compiler << std::endl; - std::cout << "compiler_version=" << compiler_version << std::endl; - std::cout << "stdlib=" << stdlib << std::endl; - if (!stdlib_version.empty()) { - std::cout << "stdlib_version=" << stdlib_version << std::endl; - } - // Always print the ABI key for consistent parsing - std::cout << "abi=" << abi << std::endl; - - return 0; -} -""" - -ABI_DETECTOR_MESON_SRC = """ -project('abi-detector', 'cpp', default_options : ['cpp_std=c++23']) -executable('detector', 'main.cpp') -""" - -# --- .gitignore Template --- -GITIGNORE_CONTENT = """# General -*.swp -*~ -.DS_Store - -# Python -__pycache__/ -*.pyc -*.pyo -*.pyd -.Python -.venv/ -venv/ -env/ -*.egg-info/ -dist/ - -# C++ Build Artifacts -*.o -*.a -*.so -*.dylib -*.dll -*.lib -*.exe - -# Meson Build System -# Ignore any directory containing meson-private, which is a reliable marker -**/meson-private/ -# Also ignore common build directory names -build/ -builddir/ - -# Subprojects - ignore all subdirectories except 'packagefiles' and root .wrap files -/subprojects/* -!/subprojects/packagefiles -!/subprojects/*.wrap - -# Editor specific -.vscode/ -.idea/ -*.sublime-project -*.sublime-workspace -""" - - -# --- Helper Functions --- - -def get_template_content(template_name: str) -> str: - """Safely reads content from a template file packaged with the CLI.""" - try: - return importlib.resources.files('fourdst.cli.templates').joinpath(template_name).read_text() - except FileNotFoundError: - print(f"Error: Template file '{template_name}' not found.", file=sys.stderr) - sys.exit(1) - -def _detect_and_cache_abi(cross_file: Path = None): - """ - Compiles and runs a C++ program to detect the compiler ABI, then caches it. - """ - print("Performing one-time native C++ ABI detection...") - temp_dir = CACHE_PATH / "abi_detector" - if temp_dir.exists(): - shutil.rmtree(temp_dir) - temp_dir.mkdir(parents=True) - - try: - (temp_dir / "main.cpp").write_text(ABI_DETECTOR_CPP_SRC) - (temp_dir / "meson.build").write_text(ABI_DETECTOR_MESON_SRC) - - print(" - Configuring detector...") - run_command(["meson", "setup", "build"], cwd=temp_dir) - print(" - Compiling detector...") - run_command(["meson", "compile", "-C", "build"], cwd=temp_dir) - - detector_exe = temp_dir / "build" / "detector" - print(" - Running detector...") - proc = subprocess.run([str(detector_exe)], check=True, capture_output=True, text=True) - - abi_details = {} - for line in proc.stdout.strip().split('\n'): - if '=' in line: - key, value = line.split('=', 1) - abi_details[key.strip()] = value.strip() - - compiler = abi_details.get('compiler', 'unk_compiler') - stdlib = abi_details.get('stdlib', 'unk_stdlib') - stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version') - abi = abi_details.get('abi', 'unk_abi') - abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}" - - detected_os = abi_details.get("os", "unknown_os") - arch = platform.machine() - - platform_identifier = { - "triplet": f"{arch}-{detected_os}", - "abi_signature": abi_string, - "details": abi_details, - "is_native": True, - "cross_file": None, - "docker_image": None, - "arch": arch - } - - with open(ABI_CACHE_FILE, 'w') as f: - json.dump(platform_identifier, f, indent=2) - - print(f"✅ Native ABI detected and cached: {abi_string}") - return platform_identifier - - finally: - if temp_dir.exists(): - shutil.rmtree(temp_dir) - -def get_platform_identifier() -> dict: - """ - Gets the native platform identifier, using a cached value if available. - """ - if ABI_CACHE_FILE.exists(): - with open(ABI_CACHE_FILE, 'r') as f: - return json.load(f) - else: - return _detect_and_cache_abi() - -def get_available_build_targets() -> list: - """Gets native, cross-compilation, and Docker build targets.""" - targets = [get_platform_identifier()] - - # Add cross-file targets - CROSS_FILES_PATH.mkdir(exist_ok=True) - for cross_file in CROSS_FILES_PATH.glob("*.cross"): - triplet = cross_file.stem - targets.append({ - "triplet": triplet, - "abi_signature": f"cross-{triplet}", - "is_native": False, - "cross_file": str(cross_file.resolve()), - "docker_image": None - }) - - # Add Docker targets if Docker is available - if docker: - try: - client = docker.from_env() - client.ping() - for name, image in DOCKER_BUILD_IMAGES.items(): - arch = name.split(' ')[0] - targets.append({ - "triplet": f"linux-{arch}", - "abi_signature": f"docker-{image}", - "is_native": False, - "cross_file": None, - "docker_image": image, - "arch": arch - }) - except Exception: - typer.secho("Warning: Docker is installed but the daemon is not running. Docker targets are unavailable.", fg=typer.colors.YELLOW) - - return targets - -def _build_plugin_for_target(sdist_path: Path, build_dir: Path, target: dict): - """Builds a plugin natively or with a cross file.""" - source_dir = build_dir / "src" - if source_dir.exists(): - shutil.rmtree(source_dir) - - with zipfile.ZipFile(sdist_path, 'r') as sdist_zip: - sdist_zip.extractall(source_dir) - - meson_build_dir = build_dir / "meson_build" - if meson_build_dir.exists(): - shutil.rmtree(meson_build_dir) - - setup_cmd = ["meson", "setup"] - if target["cross_file"]: - setup_cmd.extend(["--cross-file", target["cross_file"]]) - setup_cmd.append(str(meson_build_dir)) - - run_command(setup_cmd, cwd=source_dir) - run_command(["meson", "compile", "-C", str(meson_build_dir)], cwd=source_dir) - - compiled_lib = next(meson_build_dir.glob("lib*.so"), None) or next(meson_build_dir.glob("lib*.dylib"), None) - if not compiled_lib: - raise FileNotFoundError("Could not find compiled library after build.") - - return compiled_lib, target # Return target as ABI is pre-determined - -def _build_plugin_in_docker(sdist_path: Path, build_dir: Path, target: dict, plugin_name: str): - """Builds a plugin inside a Docker container.""" - client = docker.from_env() - image_name = target["docker_image"] - - # Find arch from DOCKER_BUILD_IMAGES to create a clean triplet later - arch = "unknown_arch" - for name, img in DOCKER_BUILD_IMAGES.items(): - if img == image_name: - arch = name.split(' ')[0] - break - - typer.echo(f" - Pulling Docker image '{image_name}' (if necessary)...") - client.images.pull(image_name) - - source_dir = build_dir / "src" - if source_dir.exists(): - shutil.rmtree(source_dir) - - with zipfile.ZipFile(sdist_path, 'r') as sdist_zip: - sdist_zip.extractall(source_dir) - - # This script will be run inside the container - build_script = f""" - set -e - echo "--- Installing build dependencies ---" - export PATH="/opt/python/cp313-cp313/bin:$PATH" - pip install meson ninja cmake - - echo " -> ℹ meson version: $(meson --version) [$(which meson)]" - echo " -> ℹ ninja version: $(ninja --version) [$(which ninja)]" - echo " -> ℹ cmake version: $(cmake --version) [$(which cmake)]" - - echo "--- Configuring with Meson ---" - meson setup /build/meson_build - echo "--- Compiling with Meson ---" - meson compile -C /build/meson_build - echo "--- Running ABI detector ---" - # We need to build and run the ABI detector inside the container too - mkdir /tmp/abi && cd /tmp/abi - echo "{ABI_DETECTOR_CPP_SRC.replace('"', '\\"')}" > main.cpp - echo "{ABI_DETECTOR_MESON_SRC.replace('"', '\\"')}" > meson.build - meson setup build && meson compile -C build - ./build/detector > /build/abi_details.txt - """ - - container_build_dir = Path("/build") - - typer.echo(" - Running build container...") - container = client.containers.run( - image=image_name, - command=["/bin/sh", "-c", build_script], - volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'rw'}}, - working_dir=str(container_build_dir), - detach=True - ) - - # Stream logs - for line in container.logs(stream=True, follow=True): - typer.echo(f" [docker] {line.decode('utf-8').strip()}") - - result = container.wait() - if result["StatusCode"] != 0: - # The container is stopped, but we can still inspect its filesystem by restarting it briefly. - log_output = container.logs() - container.remove() # Clean up before raising - typer.secho(f"Build failed inside Docker. Full log:\n{log_output.decode('utf-8')}", fg=typer.colors.RED) - raise subprocess.CalledProcessError(result["StatusCode"], "Build inside Docker failed.") - - # Retrieve artifacts by searching inside the container's filesystem - typer.echo(" - Locating compiled library in container...") - meson_build_dir_str = (container_build_dir / "meson_build").as_posix() - expected_lib_name = f"lib{plugin_name}.so" - - find_cmd = f"find {meson_build_dir_str} -name {expected_lib_name}" - - # We need to run the find command in the now-stopped container. - # We can't use exec_run on a stopped container, but we can create a new - # one that uses the same filesystem (volume) to find the file. - try: - find_output = client.containers.run( - image=image_name, - command=["/bin/sh", "-c", find_cmd], - volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'ro'}}, - remove=True, # Clean up the find container immediately - detach=False - ) - found_path_str = find_output.decode('utf-8').strip() - if not found_path_str: - raise FileNotFoundError("Find command returned no path.") - compiled_lib = Path(found_path_str) - typer.echo(f" - Found library at: {compiled_lib}") - - except Exception as e: - typer.secho(f" - Error: Could not locate '{expected_lib_name}' inside the container.", fg=typer.colors.RED) - typer.secho(f" Details: {e}", fg=typer.colors.RED) - raise FileNotFoundError("Could not find compiled library in container after a successful build.") - - # Get the ABI details from the container - abi_details_content = "" - bits, _ = container.get_archive(str(container_build_dir / "abi_details.txt")) - for chunk in bits: - abi_details_content += chunk.decode('utf-8') - - # We need to find the actual file content within the tar stream - # This is a simplification; a real implementation would use the `tarfile` module - actual_content = abi_details_content.split('\n', 1)[1] if '\n' in abi_details_content else abi_details_content - actual_content = actual_content.split('main.cpp')[1].strip() if 'main.cpp' in actual_content else actual_content - actual_content = actual_content.rsplit('0755', 1)[0].strip() if '0755' in actual_content else actual_content - - - abi_details = {} - for line in actual_content.strip().split('\n'): - if '=' in line: - key, value = line.split('=', 1) - abi_details[key.strip()] = value.strip() - - compiler = abi_details.get('compiler', 'unk_compiler') - stdlib = abi_details.get('stdlib', 'unk_stdlib') - stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version') - abi = abi_details.get('abi', 'unk_abi') - abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}" - - final_target = { - "triplet": f"{abi_details.get('os', 'linux')}-{arch}", - "abi_signature": abi_string, - "is_native": False, - "cross_file": None, - "docker_image": image_name, - "arch": arch - } - - # Copy the binary out - local_lib_path = build_dir / compiled_lib.name - bits, _ = container.get_archive(str(compiled_lib)) - with open(local_lib_path, 'wb') as f: - for chunk in bits: - f.write(chunk) - - container.remove() - - return local_lib_path, final_target - - -def calculate_sha256(file_path: Path) -> str: - """Calculates the SHA256 checksum of a file.""" - sha256_hash = hashlib.sha256() - with open(file_path, "rb") as f: - for byte_block in iter(lambda: f.read(4096), b""): - sha256_hash.update(byte_block) - return sha256_hash.hexdigest() - -def run_command(command: list[str], cwd: Path = None, check=True): - """Runs a command and handles errors.""" - try: - # Hide stdout/stderr unless there's an error - result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd) - return result - except subprocess.CalledProcessError as e: - if check: - print(f"Error running command: {' '.join(command)}", file=sys.stderr) - print(f"STDOUT:\n{e.stdout}", file=sys.stderr) - print(f"STDERR:\n{e.stderr}", file=sys.stderr) - raise typer.Exit(code=1) - return e - - -# --- Cache Management Commands --- -@cache_app.command("clear") -def cache_clear(): - """ - Clears all cached data, including the ABI signature. - Run this if you have updated your C++ compiler. - """ - if CACHE_PATH.exists(): - shutil.rmtree(CACHE_PATH) - print("✅ Local cache cleared.") - else: - print("No cache found to clear.") - -# --- Key Management Commands --- - -@keys_app.command("generate") -def keys_generate( - key_name: str = typer.Option("author_key", "--name", "-n", help="The base name for the generated key files.") -): - """ - Generates a new Ed25519 key pair for signing bundles. - """ - private_key_path = Path(f"{key_name}") - public_key_path = Path(f"{key_name}.pub") - - if private_key_path.exists() or public_key_path.exists(): - print(f"Error: Key files '{private_key_path}' or '{public_key_path}' already exist.", file=sys.stderr) - raise typer.Exit(code=1) - - print("Generating Ed25519 key pair...") - run_command([ - "ssh-keygen", - "-t", "ed25519", - "-f", str(private_key_path), - "-N", "", # No passphrase - "-C", "fourdst bundle signing key" - ]) - print("\n✅ Keys generated successfully!") - print(f" -> Private Key (KEEP SECRET): {private_key_path.resolve()}") - print(f" -> Public Key (SHARE): {public_key_path.resolve()}") - print("\nShare the public key with users who need to trust your bundles.") - -@keys_app.command("sync") -def keys_sync( - repo_url: str = typer.Argument(..., help="The URL of the Git repository containing trusted public keys.") -): - """ - Syncs the local trust store with a central Git repository of public keys. - This will ADD new keys and REMOVE keys that are no longer in the repository. - """ - LOCAL_TRUST_STORE_PATH.mkdir(parents=True, exist_ok=True) - temp_dir = Path("temp_keys_repo") - - print(f"Syncing trust store with {repo_url}...") - if temp_dir.exists(): - shutil.rmtree(temp_dir) - run_command(["git", "clone", "--depth", "1", repo_url, str(temp_dir)]) - - # Get key sets - repo_keys = {p.name for p in temp_dir.glob("*.pub")} - local_keys = {p.name for p in LOCAL_TRUST_STORE_PATH.glob("*.pub")} - - # Sync logic - keys_to_add = repo_keys - local_keys - keys_to_remove = local_keys - repo_keys - - for key_file in keys_to_add: - shutil.copy(temp_dir / key_file, LOCAL_TRUST_STORE_PATH / key_file) - print(f" [+] Added trusted key: {key_file}") - - for key_file in keys_to_remove: - (LOCAL_TRUST_STORE_PATH / key_file).unlink() - print(f" [-] Removed key: {key_file}") - - if not keys_to_add and not keys_to_remove: - print("Trust store is already up to date.") - - shutil.rmtree(temp_dir) - print("\n✅ Trust store sync complete!") - print(f"Location: {LOCAL_TRUST_STORE_PATH}") - - -# --- Bundle Management Commands --- - -@bundle_app.command("create") -def bundle_create( - plugin_dirs: list[Path] = typer.Argument(..., help="A list of plugin project directories to include.", exists=True, file_okay=False), - output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."), - bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."), - bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."), - bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle.") -): - """ - Builds and packages one or more plugin projects into a single .fbundle file. - """ - staging_dir = Path("temp_bundle_staging") - if staging_dir.exists(): - shutil.rmtree(staging_dir) - staging_dir.mkdir() - - # Get the host platform identifier, triggering detection if needed. - host_platform = get_platform_identifier() - abi_tag = host_platform["abi_signature"] - - manifest = { - "bundleName": bundle_name, - "bundleVersion": bundle_version, - "bundleAuthor": bundle_author, - "bundleComment": "Created with fourdst-cli", - "bundledOn": datetime.datetime.now().isoformat(), - "bundlePlugins": {} - } - - print("Creating bundle...") - for plugin_dir in plugin_dirs: - plugin_name = plugin_dir.name - print(f"--> Processing plugin: {plugin_name}") - - # 1. Build the plugin - print(f" - Compiling for host platform...") - build_dir = plugin_dir / "builddir" - if not build_dir.exists(): - run_command(["meson", "setup", "builddir"], cwd=plugin_dir) - run_command(["meson", "compile", "-C", "builddir"], cwd=plugin_dir) - - # 2. Find the compiled artifact - compiled_lib = next(build_dir.glob("lib*.so"), None) or next(build_dir.glob("lib*.dylib"), None) - if not compiled_lib: - print(f"Error: Could not find compiled library for {plugin_name} (expected lib*.so or lib*.dylib)", file=sys.stderr) - raise typer.Exit(code=1) - - # 3. Package source code (sdist), respecting .gitignore - print(" - Packaging source code (respecting .gitignore)...") - sdist_path = staging_dir / f"{plugin_name}_src.zip" - - # Use git to list files, which automatically respects .gitignore - git_check = run_command(["git", "rev-parse", "--is-inside-work-tree"], cwd=plugin_dir, check=False) - - files_to_include = [] - if git_check.returncode == 0: - # This is a git repo, use git to list files - result = run_command(["git", "ls-files", "--cached", "--others", "--exclude-standard"], cwd=plugin_dir) - files_to_include = [plugin_dir / f for f in result.stdout.strip().split('\n') if f] - else: - # Not a git repo, fall back to os.walk and warn the user - typer.secho(f" - Warning: '{plugin_dir.name}' is not a git repository. Packaging all files.", fg=typer.colors.YELLOW) - for root, _, files in os.walk(plugin_dir): - if 'builddir' in root: - continue - for file in files: - files_to_include.append(Path(root) / file) - - with zipfile.ZipFile(sdist_path, 'w', zipfile.ZIP_DEFLATED) as sdist_zip: - for file_path in files_to_include: - if file_path.is_file(): - sdist_zip.write(file_path, file_path.relative_to(plugin_dir)) - - # 4. Stage artifacts with ABI-tagged filenames and update manifest - binaries_dir = staging_dir / "bin" - binaries_dir.mkdir(exist_ok=True) - - # Construct new filename with arch, os, and ABI tag - base_name = compiled_lib.stem # e.g., "libplugin_a" - ext = compiled_lib.suffix # e.g., ".so" - triplet = host_platform["triplet"] - abi_signature = host_platform["abi_signature"] - tagged_filename = f"{base_name}.{triplet}.{abi_signature}{ext}" - staged_lib_path = binaries_dir / tagged_filename - - print(f" - Staging binary as: {tagged_filename}") - shutil.copy(compiled_lib, staged_lib_path) - - manifest["bundlePlugins"][plugin_name] = { - "sdist": { - "path": sdist_path.name, - "sdistBundledOn": datetime.datetime.now().isoformat(), - "buildable": True - }, - "binaries": [{ - "platform": { - "triplet": host_platform["triplet"], - "abi_signature": host_platform["abi_signature"] - }, - "path": staged_lib_path.relative_to(staging_dir).as_posix(), - "compiledOn": datetime.datetime.now().isoformat() - }] - } - - # 5. Write manifest and package final bundle - manifest_path = staging_dir / "manifest.yaml" - with open(manifest_path, 'w') as f: - yaml.dump(manifest, f, sort_keys=False) - - print(f"\nPackaging final bundle: {output_bundle}") - with zipfile.ZipFile(output_bundle, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: - for root, _, files in os.walk(staging_dir): - for file in files: - file_path = Path(root) / file - bundle_zip.write(file_path, file_path.relative_to(staging_dir)) - - shutil.rmtree(staging_dir) - print("\n✅ Bundle created successfully!") - -@bundle_app.command("fill") -def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file to fill with new binaries.", exists=True)): - """ - Builds new binaries for the current host or cross-targets from the bundle's source. - """ - staging_dir = Path(f"temp_fill_{bundle_path.stem}") - if staging_dir.exists(): - shutil.rmtree(staging_dir) - - try: - # 1. Unpack and load manifest - with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: - bundle_zip.extractall(staging_dir) - - manifest_path = staging_dir / "manifest.yaml" - if not manifest_path.exists(): - typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED) - raise typer.Exit(code=1) - - with open(manifest_path, 'r') as f: - manifest = yaml.safe_load(f) - - # 2. Find available targets and missing binaries - available_targets = get_available_build_targets() - build_options = [] - - for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): - if "sdist" not in plugin_data: - continue # Cannot build without source - - existing_abis = {b['platform']['abi_signature'] for b in plugin_data.get('binaries', [])} - - for target in available_targets: - # Use a more descriptive name for the choice - if target.get('docker_image', None): - display_name = f"Docker: {target['docker_image']}" - elif target.get('cross_file', None): - display_name = f"Cross: {Path(target['cross_file']).name}" - else: - display_name = f"Native: {target['abi_signature']}" - - if target['abi_signature'] not in existing_abis: - build_options.append({ - "name": f"Build '{plugin_name}' for {display_name}", - "plugin_name": plugin_name, - "target": target - }) - - if not build_options: - typer.secho("✅ Bundle is already full for all available build targets.", fg=typer.colors.GREEN) - raise typer.Exit() - - # 3. Prompt user to select which targets to build - choices = [opt['name'] for opt in build_options] - selected_builds = questionary.checkbox("Select which missing binaries to build:", choices=choices).ask() - - if not selected_builds: - typer.echo("No binaries selected to build. Exiting.") - raise typer.Exit() - - # 4. Build selected targets - for build_name in selected_builds: - build_job = next(opt for opt in build_options if opt['name'] == build_name) - plugin_name = build_job['plugin_name'] - target = build_job['target'] - - typer.secho(f"\nBuilding {plugin_name} for target '{build_name}'...", bold=True) - - sdist_zip_path = staging_dir / manifest['bundlePlugins'][plugin_name]['sdist']['path'] - build_temp_dir = staging_dir / f"build_{plugin_name}" - - try: - if target['docker_image']: - if not docker: - typer.secho("Error: Docker is not installed. Please install Docker to build this target.", fg=typer.colors.RED) - continue - compiled_lib, final_target = _build_plugin_in_docker(sdist_zip_path, build_temp_dir, target, plugin_name) - else: - compiled_lib, final_target = _build_plugin_for_target(sdist_zip_path, build_temp_dir, target) - - # Add new binary to bundle - abi_tag = final_target["abi_signature"] - base_name = compiled_lib.stem - ext = compiled_lib.suffix - triplet = final_target["triplet"] - tagged_filename = f"{base_name}.{triplet}.{abi_tag}{ext}" - - binaries_dir = staging_dir / "bin" - binaries_dir.mkdir(exist_ok=True) - staged_lib_path = binaries_dir / tagged_filename - shutil.move(compiled_lib, staged_lib_path) - - # Update manifest - new_binary_entry = { - "platform": { - "triplet": final_target["triplet"], - "abi_signature": abi_tag, - "arch": final_target["arch"] - }, - "path": staged_lib_path.relative_to(staging_dir).as_posix(), - "compiledOn": datetime.datetime.now().isoformat() - } - manifest['bundlePlugins'][plugin_name]['binaries'].append(new_binary_entry) - typer.secho(f" -> Successfully built and staged {tagged_filename}", fg=typer.colors.GREEN) - - except (FileNotFoundError, subprocess.CalledProcessError) as e: - typer.secho(f" -> Failed to build {plugin_name} for target '{build_name}': {e}", fg=typer.colors.RED) - finally: - if build_temp_dir.exists(): - shutil.rmtree(build_temp_dir) - - # 5. Repackage the bundle - # Invalidate any old signature - if "bundleAuthorKeyFingerprint" in manifest: - del manifest["bundleAuthorKeyFingerprint"] - if (staging_dir / "manifest.sig").exists(): - (staging_dir / "manifest.sig").unlink() - typer.secho("\n⚠️ Bundle signature has been invalidated by this operation. Please re-sign the bundle.", fg=typer.colors.YELLOW) - - with open(manifest_path, 'w') as f: - yaml.dump(manifest, f, sort_keys=False) - - with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: - for file_path in staging_dir.rglob('*'): - if file_path.is_file(): - bundle_zip.write(file_path, file_path.relative_to(staging_dir)) - - typer.secho(f"\n✅ Bundle '{bundle_path.name}' has been filled successfully.", fg=typer.colors.GREEN) - - finally: - if staging_dir.exists(): - shutil.rmtree(staging_dir) - - -@bundle_app.command("sign") -def bundle_sign( - bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True), - private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True) -): - """ - Signs a bundle with an author's private key, adding checksums and a signature. - """ - print(f"Signing bundle: {bundle_path}") - staging_dir = Path("temp_sign_staging") - if staging_dir.exists(): - shutil.rmtree(staging_dir) - - # 1. Unpack the bundle - with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: - bundle_zip.extractall(staging_dir) - - manifest_path = staging_dir / "manifest.yaml" - if not manifest_path.exists(): - print("Error: manifest.yaml not found in bundle.", file=sys.stderr) - raise typer.Exit(code=1) - - # 2. Load private key and derive public key to get fingerprint - with open(private_key, "rb") as key_file: - priv_key_obj = serialization.load_ssh_private_key(key_file.read(), password=None) - - pub_key_obj = priv_key_obj.public_key() - pub_key_bytes = pub_key_obj.public_bytes( - encoding=serialization.Encoding.OpenSSH, - format=serialization.PublicFormat.OpenSSH - ) - fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest() - print(f" - Signing with key fingerprint: {fingerprint}") - - # 3. Update manifest with checksums and fingerprint - with open(manifest_path, 'r') as f: - manifest = yaml.safe_load(f) - - manifest['bundleAuthorKeyFingerprint'] = fingerprint - for plugin in manifest['bundlePlugins'].values(): - for binary in plugin.get('binaries', []): - binary_path = staging_dir / binary['path'] - if binary_path.exists(): - binary['checksum'] = "sha256:" + calculate_sha256(binary_path) - else: - binary['checksum'] = "MISSING_FILE" - - - with open(manifest_path, 'w') as f: - yaml.dump(manifest, f, sort_keys=False) - print(" - Added file checksums and key fingerprint to manifest.") - - # 4. Sign the manifest - manifest_content = manifest_path.read_bytes() - - if isinstance(priv_key_obj, ed25519.Ed25519PrivateKey): - signature = priv_key_obj.sign(manifest_content) - elif isinstance(priv_key_obj, rsa.RSAPrivateKey): - signature = priv_key_obj.sign( - manifest_content, - padding.PKCS1v15(), - hashes.SHA256() - ) - else: - print("Error: Unsupported private key type for signing.", file=sys.stderr) - raise typer.Exit(code=1) - - - sig_path = staging_dir / "manifest.sig" - sig_path.write_bytes(signature) - print(" - Created manifest.sig.") - - # 5. Repackage the bundle - with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: - for file_path in staging_dir.rglob('*'): - if file_path.is_file(): - bundle_zip.write(file_path, file_path.relative_to(staging_dir)) - - shutil.rmtree(staging_dir) - print("\n✅ Bundle signed successfully!") - -@bundle_app.command("inspect") -def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True)): - """ - Inspects a bundle, validating its contents and cryptographic signature. - """ - staging_dir = Path(f"temp_inspect_{bundle_path.stem}") - if staging_dir.exists(): - shutil.rmtree(staging_dir) - - try: - # 1. Unpack and load manifest - with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: - archive_files = set(bundle_zip.namelist()) - bundle_zip.extractall(staging_dir) - - manifest_path = staging_dir / "manifest.yaml" - if not manifest_path.exists(): - typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED) - raise typer.Exit(code=1) - - with open(manifest_path, 'r') as f: - manifest = yaml.safe_load(f) - - # 2. Print Header - typer.secho(f"--- Bundle Inspection Report for: {bundle_path.name} ---", bold=True) - typer.echo(f"Name: {manifest.get('bundleName', 'N/A')}") - typer.echo(f"Version: {manifest.get('bundleVersion', 'N/A')}") - typer.echo(f"Author: {manifest.get('bundleAuthor', 'N/A')}") - typer.echo(f"Bundled: {manifest.get('bundledOn', 'N/A')}") - typer.echo("-" * 50) - - # 3. Signature and Trust Verification - fingerprint = manifest.get('bundleAuthorKeyFingerprint') - sig_path = staging_dir / "manifest.sig" - - if not fingerprint or not sig_path.exists(): - typer.secho("Trust Status: 🟡 UNSIGNED", fg=typer.colors.YELLOW) - else: - # Find the key in the local trust store - trusted_key_path = None - if LOCAL_TRUST_STORE_PATH.exists(): - for key_file in LOCAL_TRUST_STORE_PATH.glob("*.pub"): - pub_key_bytes = key_file.read_bytes() - pub_key_fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest() - if pub_key_fingerprint == fingerprint: - trusted_key_path = key_file - break - - if not trusted_key_path: - typer.secho(f"Trust Status: ⚠️ SIGNED but UNTRUSTED AUTHOR ({fingerprint})", fg=typer.colors.YELLOW) - else: - try: - pub_key_obj = serialization.load_ssh_public_key(trusted_key_path.read_bytes()) - signature = sig_path.read_bytes() - manifest_content = manifest_path.read_bytes() - - if isinstance(pub_key_obj, ed25519.Ed25519PublicKey): - pub_key_obj.verify(signature, manifest_content) - elif isinstance(pub_key_obj, rsa.RSAPublicKey): - pub_key_obj.verify( - signature, - manifest_content, - padding.PKCS1v15(), - hashes.SHA256() - ) - typer.secho(f"Trust Status: ✅ SIGNED and TRUSTED ({fingerprint})", fg=typer.colors.GREEN) - except InvalidSignature: - typer.secho(f"Trust Status: ❌ INVALID SIGNATURE ({fingerprint})", fg=typer.colors.RED) - - typer.echo("-" * 50) - - # 4. Content Validation - typer.echo("Validating bundle contents...") - missing_files = [] - checksum_errors = [] - - for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): - sdist_path = plugin_data.get('sdist', {}).get('path') - if sdist_path and sdist_path not in archive_files: - missing_files.append(sdist_path) - - for binary in plugin_data.get('binaries', []): - binary_path_str = binary.get('path') - if binary_path_str and binary_path_str not in archive_files: - missing_files.append(binary_path_str) - elif binary_path_str: - # Verify checksum if present - expected_checksum = binary.get('checksum') - if expected_checksum: - actual_checksum = "sha256:" + calculate_sha256(staging_dir / binary_path_str) - if actual_checksum != expected_checksum: - checksum_errors.append(binary_path_str) - - if not missing_files and not checksum_errors: - typer.secho("Content Validation: ✅ OK", fg=typer.colors.GREEN) - else: - typer.secho("Content Validation: ❌ FAILED", fg=typer.colors.RED) - for f in missing_files: - typer.echo(f" - Missing file from archive: {f}") - for f in checksum_errors: - typer.echo(f" - Checksum mismatch for: {f}") - - # 5. Plugin Details - typer.echo("-" * 50) - typer.secho("Available Plugins:", bold=True) - for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): - typer.echo(f"\n Plugin: {plugin_name}") - typer.echo(f" Source Dist: {plugin_data.get('sdist', {}).get('path', 'N/A')}") - binaries = plugin_data.get('binaries', []) - if not binaries: - typer.echo(" Binaries: None") - else: - typer.echo(" Binaries:") - for b in binaries: - plat = b.get('platform', {}) - typer.echo(f" - Path: {b.get('path', 'N/A')}") - typer.echo(f" ABI: {plat.get('abi_signature', 'N/A')}") - typer.echo(f" Arch: {plat.get('triplet', 'N/A')}") - - finally: - if staging_dir.exists(): - shutil.rmtree(staging_dir) - - -# --- Original Plugin Commands (Unchanged for now) --- - -def parse_cpp_header(header_path: Path): - """ - Parses a C++ header file using libclang to find classes and their pure virtual methods. - """ - # This function requires python-clang-16 - try: - from clang import cindex - except ImportError: - print("Error: The 'init' command requires 'libclang'. Please install it.", file=sys.stderr) - print("Run: pip install python-clang-16", file=sys.stderr) - # Also ensure the libclang.so/dylib is in your system's library path. - raise typer.Exit(code=1) - - if not cindex.Config.loaded: - try: - # Attempt to find libclang automatically. This may need to be configured by the user. - cindex.Config.set_library_file(cindex.conf.get_filename()) - except cindex.LibclangError as e: - print(f"Error: libclang library not found. Please ensure it's installed and in your system's path.", file=sys.stderr) - print(f"Details: {e}", file=sys.stderr) - raise typer.Exit(code=1) - - # --- Get compiler flags from pkg-config to help clang find includes --- - try: - pkg_config_proc = subprocess.run( - ['pkg-config', '--cflags', 'fourdst_plugin'], - capture_output=True, - text=True, - check=True - ) - # Split the flags string into a list of arguments for libclang - compiler_flags = pkg_config_proc.stdout.strip().split() - print(f"Using compiler flags from pkg-config: {' '.join(compiler_flags)}") - except (subprocess.CalledProcessError, FileNotFoundError): - print("Warning: `pkg-config --cflags fourdst-plugin` failed. Parsing may not succeed if the header has dependencies.", file=sys.stderr) - print("Please ensure 'pkg-config' is installed and 'fourdst-plugin.pc' is in your PKG_CONFIG_PATH.", file=sys.stderr) - compiler_flags = [] - - index = cindex.Index.create() - # Add the pkg-config flags to the parser arguments - translation_unit = index.parse(str(header_path), args=['-x', 'c++', '-std=c++23'] + compiler_flags) - - interfaces = {} - for cursor in translation_unit.cursor.walk_preorder(): - if cursor.kind == cindex.CursorKind.CLASS_DECL and cursor.is_definition(): - class_name = cursor.spelling - methods = [] - for child in cursor.get_children(): - if child.kind == cindex.CursorKind.CXX_METHOD and child.is_pure_virtual_method(): - method_name = child.spelling - result_type = child.result_type.spelling - # Recreate the full method signature - params = [p.spelling or f"param{i+1}" for i, p in enumerate(child.get_arguments())] - param_str = ", ".join(f"{p.type.spelling} {p.spelling}" for p in child.get_arguments()) - const_qualifier = " const" if child.is_const_method() else "" - - signature = f"{result_type} {method_name}({param_str}){const_qualifier}" - - # Generate a placeholder body - body = f" // TODO: Implement the {method_name} method.\n" - if result_type != "void": - body += f" return {{}};" # Default return - - methods.append({'signature': signature, 'body': body}) - - if methods: # Only consider classes with pure virtual methods as interfaces - interfaces[class_name] = methods - - return interfaces - -@plugin_app.command("init") -def plugin_init( - project_name: str = typer.Argument(..., help="The name of the new plugin project."), - header: Path = typer.Option(..., "--header", "-H", help="Path to the C++ header file defining the plugin interface.", exists=True, file_okay=True, dir_okay=False, readable=True), - directory: Path = typer.Option(".", "-d", "--directory", help="The directory to create the project in.", resolve_path=True), - version: str = typer.Option("0.1.0", "--ver", help="The initial SemVer version of the plugin."), - libplugin_rev: str = typer.Option("main", "--libplugin-rev", help="The git revision of libplugin to use.") -): - """ - Initializes a new Meson-based C++ plugin project from an interface header. - """ - print(f"Parsing interface header: {header.name}") - interfaces = parse_cpp_header(header) - - if not interfaces: - print(f"Error: No suitable interfaces (classes with pure virtual methods) found in {header}", file=sys.stderr) - raise typer.Exit(code=1) - - # --- Interactive Selection --- - chosen_interface = questionary.select( - "Which interface would you like to implement?", - choices=list(interfaces.keys()) - ).ask() - - if not chosen_interface: - raise typer.Exit() # User cancelled - - print(f"Initializing plugin '{project_name}' implementing interface '{chosen_interface}'...") - - # --- Code Generation --- - method_stubs = "\n".join( - f" {method['signature']} override {{\n{method['body']}\n }}" - for method in interfaces[chosen_interface] - ) - - class_name = ''.join(filter(str.isalnum, project_name.replace('_', ' ').title().replace(' ', ''))) + "Plugin" - root_path = directory / project_name - src_path = root_path / "src" - include_path = src_path / "include" - subprojects_path = root_path / "subprojects" - - try: - src_path.mkdir(parents=True, exist_ok=True) - include_path.mkdir(exist_ok=True) - subprojects_path.mkdir(exist_ok=True) - - # --- Copy interface header to make project self-contained --- - local_header_path = include_path / header.name - shutil.copy(header, local_header_path) - print(f" -> Copied interface header to {local_header_path.relative_to(root_path)}") - - # --- Create libplugin.wrap file --- - libplugin_wrap_content = f"""[wrap-git] -url = https://github.com/4D-STAR/libplugin -revision = {libplugin_rev} -depth = 1 -""" - (subprojects_path / "libplugin.wrap").write_text(libplugin_wrap_content) - print(f" -> Created {subprojects_path / 'libplugin.wrap'}") - - # --- Create meson.build from template --- - meson_template = get_template_content("meson.build.in") - meson_content = meson_template.format( - project_name=project_name, - version=version - ) - (root_path / "meson.build").write_text(meson_content) - print(f" -> Created {root_path / 'meson.build'}") - - # --- Create C++ source file from template --- - cpp_template = get_template_content("plugin.cpp.in") - cpp_content = cpp_template.format( - class_name=class_name, - project_name=project_name, - interface=chosen_interface, - interface_header_path=header.name, # Use just the filename - method_stubs=method_stubs - ) - (src_path / f"{project_name}.cpp").write_text(cpp_content) - print(f" -> Created {src_path / f'{project_name}.cpp'}") - - # --- Create .gitignore --- - (root_path / ".gitignore").write_text(GITIGNORE_CONTENT) - print(f" -> Created .gitignore") - - # --- Initialize Git Repository --- - print(" -> Initializing Git repository...") - run_command(["git", "init"], cwd=root_path) - run_command(["git", "add", "."], cwd=root_path) - commit_message = f"Initial commit: Scaffold fourdst plugin '{project_name}'" - run_command(["git", "commit", "-m", commit_message], cwd=root_path) - - - except OSError as e: - print(f"Error creating project structure: {e}", file=sys.stderr) - raise typer.Exit(code=1) - - print("\n✅ Project initialized successfully and committed to Git!") - print("To build your new plugin:") - print(f" cd {root_path}") - print(" meson setup builddir") - print(" meson compile -C builddir") - - -if __name__ == "__main__": +def main(): # Create config directory if it doesn't exist CACHE_PATH.mkdir(parents=True, exist_ok=True) app() + +if __name__ == "__main__": + main() diff --git a/fourdst/cli/plugin/__init__.py b/fourdst/cli/plugin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/fourdst/cli/plugin/diff.py b/fourdst/cli/plugin/diff.py new file mode 100644 index 0000000..ba6441b --- /dev/null +++ b/fourdst/cli/plugin/diff.py @@ -0,0 +1,105 @@ +# fourdst/cli/plugin/diff.py +import typer +import yaml +import zipfile +from pathlib import Path +import tempfile +import shutil +import difflib +from rich.console import Console +from rich.panel import Panel +from rich.text import Text + +console = Console() + +def _extract_sdist(bundle_path: Path, plugin_name: str, temp_dir: Path): + """Extracts a specific plugin's sdist from a bundle to a directory.""" + sdist_extract_path = temp_dir / f"{plugin_name}_src" + + with tempfile.TemporaryDirectory() as bundle_unpack_dir_str: + bundle_unpack_dir = Path(bundle_unpack_dir_str) + + with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: + bundle_zip.extractall(bundle_unpack_dir) + + manifest_path = bundle_unpack_dir / "manifest.yaml" + if not manifest_path.exists(): + raise FileNotFoundError("manifest.yaml not found in bundle.") + + with open(manifest_path, 'r') as f: + manifest = yaml.safe_load(f) + + plugin_data = manifest.get('bundlePlugins', {}).get(plugin_name) + if not plugin_data or 'sdist' not in plugin_data: + raise FileNotFoundError(f"Plugin '{plugin_name}' or its sdist not found in {bundle_path.name}.") + + sdist_path_in_bundle = bundle_unpack_dir / plugin_data['sdist']['path'] + if not sdist_path_in_bundle.exists(): + raise FileNotFoundError(f"sdist archive '{plugin_data['sdist']['path']}' not found in bundle.") + + with zipfile.ZipFile(sdist_path_in_bundle, 'r') as sdist_zip: + sdist_zip.extractall(sdist_extract_path) + + return sdist_extract_path + +def plugin_diff( + plugin_name: str = typer.Argument(..., help="The name of the plugin to compare."), + bundle_a_path: Path = typer.Argument(..., help="The first bundle to compare.", exists=True, readable=True), + bundle_b_path: Path = typer.Argument(..., help="The second bundle to compare.", exists=True, readable=True), +): + """ + Compares the source code of a specific plugin between two different bundles. + """ + console.print(Panel(f"Comparing source for plugin [bold blue]{plugin_name}[/bold blue] between bundles")) + + with tempfile.TemporaryDirectory() as temp_a_str, tempfile.TemporaryDirectory() as temp_b_str: + try: + src_a_path = _extract_sdist(bundle_a_path, plugin_name, Path(temp_a_str)) + src_b_path = _extract_sdist(bundle_b_path, plugin_name, Path(temp_b_str)) + except FileNotFoundError as e: + console.print(f"[red]Error: {e}[/red]") + raise typer.Exit(code=1) + + files_a = {p.relative_to(src_a_path) for p in src_a_path.rglob('*') if p.is_file()} + files_b = {p.relative_to(src_b_path) for p in src_b_path.rglob('*') if p.is_file()} + + added_files = files_b - files_a + removed_files = files_a - files_b + common_files = files_a & files_b + + has_changes = False + + if added_files: + has_changes = True + console.print(Panel("\n".join(f"[green]+ {f}[/green]" for f in sorted(list(added_files))), title="[bold]Added Files[/bold]")) + + if removed_files: + has_changes = True + console.print(Panel("\n".join(f"[red]- {f}[/red]" for f in sorted(list(removed_files))), title="[bold]Removed Files[/bold]")) + + modified_files_count = 0 + for file_rel_path in sorted(list(common_files)): + content_a = (src_a_path / file_rel_path).read_text() + content_b = (src_b_path / file_rel_path).read_text() + + if content_a != content_b: + has_changes = True + modified_files_count += 1 + diff = difflib.unified_diff( + content_a.splitlines(keepends=True), + content_b.splitlines(keepends=True), + fromfile=f"a/{file_rel_path}", + tofile=f"b/{file_rel_path}", + ) + diff_text = Text() + for line in diff: + if line.startswith('+'): diff_text.append(line, style="green") + elif line.startswith('-'): diff_text.append(line, style="red") + else: diff_text.append(line) + + console.print(Panel(diff_text, title=f"[bold yellow]Modified: {file_rel_path}[/bold yellow]", border_style="yellow", expand=False)) + + if not has_changes: + console.print(Panel("[green]No source code changes detected for this plugin.[/green]", title="Result")) + else: + console.print(f"\nFound changes in {modified_files_count} file(s).") diff --git a/fourdst/cli/plugin/extract.py b/fourdst/cli/plugin/extract.py new file mode 100644 index 0000000..caade19 --- /dev/null +++ b/fourdst/cli/plugin/extract.py @@ -0,0 +1,82 @@ +# fourdst/cli/plugin/extract.py +import typer +import yaml +import zipfile +from pathlib import Path +import tempfile +import shutil + +def plugin_extract( + plugin_name: str = typer.Argument(..., help="The name of the plugin to extract."), + bundle_path: Path = typer.Argument(..., help="The path to the .fbundle file.", exists=True, readable=True), + output_dir: Path = typer.Option( + Path("."), + "--out", "-o", + help="The directory to extract the plugin source to. Defaults to the current directory.", + file_okay=False, + dir_okay=True, + writable=True, + resolve_path=True + ) +): + """ + Extracts a plugin's source code from a bundle. + """ + output_dir.mkdir(parents=True, exist_ok=True) + + try: + with tempfile.TemporaryDirectory() as temp_dir_str: + temp_dir = Path(temp_dir_str) + + # 1. Unpack the main bundle + typer.echo(f"Opening bundle: {bundle_path.name}") + with zipfile.ZipFile(bundle_path, 'r') as bundle_zip: + bundle_zip.extractall(temp_dir) + + # 2. Read the manifest + manifest_path = temp_dir / "manifest.yaml" + if not manifest_path.exists(): + typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + with open(manifest_path, 'r') as f: + manifest = yaml.safe_load(f) + + # 3. Find the plugin and its sdist + plugin_data = manifest.get('bundlePlugins', {}).get(plugin_name) + if not plugin_data: + typer.secho(f"Error: Plugin '{plugin_name}' not found in the bundle.", fg=typer.colors.RED) + available_plugins = list(manifest.get('bundlePlugins', {}).keys()) + if available_plugins: + typer.echo("Available plugins are: " + ", ".join(available_plugins)) + raise typer.Exit(code=1) + + sdist_info = plugin_data.get('sdist') + if not sdist_info or 'path' not in sdist_info: + typer.secho(f"Error: Source distribution (sdist) not found for plugin '{plugin_name}'.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + sdist_path_in_bundle = temp_dir / sdist_info['path'] + if not sdist_path_in_bundle.is_file(): + typer.secho(f"Error: sdist file '{sdist_info['path']}' is missing from the bundle archive.", fg=typer.colors.RED) + raise typer.Exit(code=1) + + # 4. Extract the sdist to the final output directory + final_destination = output_dir / plugin_name + if final_destination.exists(): + typer.secho(f"Warning: Output directory '{final_destination}' already exists. Files may be overwritten.", fg=typer.colors.YELLOW) + else: + final_destination.mkdir(parents=True) + + typer.echo(f"Extracting '{plugin_name}' source to '{final_destination.resolve()}'...") + with zipfile.ZipFile(sdist_path_in_bundle, 'r') as sdist_zip: + sdist_zip.extractall(final_destination) + + typer.secho(f"\n✅ Plugin '{plugin_name}' extracted successfully.", fg=typer.colors.GREEN) + + except zipfile.BadZipFile: + typer.secho(f"Error: '{bundle_path}' is not a valid bundle (zip) file.", fg=typer.colors.RED) + raise typer.Exit(code=1) + except Exception as e: + typer.secho(f"An unexpected error occurred: {e}", fg=typer.colors.RED) + raise typer.Exit(code=1) diff --git a/fourdst/cli/plugin/init.py b/fourdst/cli/plugin/init.py new file mode 100644 index 0000000..610e8ea --- /dev/null +++ b/fourdst/cli/plugin/init.py @@ -0,0 +1,161 @@ +# fourdst/cli/plugin/init.py + +import typer +import sys +import shutil +from pathlib import Path +import questionary + +from fourdst.cli.common.utils import run_command, get_template_content +from fourdst.cli.common.templates import GITIGNORE_CONTENT + +plugin_app = typer.Typer() + +@plugin_app.command("init") +def plugin_init( + project_name: str = typer.Argument(..., help="The name of the new plugin project."), + header: Path = typer.Option(..., "--header", "-H", help="Path to the C++ header file defining the plugin interface.", exists=True, file_okay=True, dir_okay=False, readable=True), + directory: Path = typer.Option(".", "-d", "--directory", help="The directory to create the project in.", resolve_path=True), + version: str = typer.Option("0.1.0", "--ver", help="The initial SemVer version of the plugin."), + libplugin_rev: str = typer.Option("main", "--libplugin-rev", help="The git revision of libplugin to use.") +): + """ + Initializes a new Meson-based C++ plugin project from an interface header. + """ + print(f"Parsing interface header: {header.name}") + interfaces = parse_cpp_header(header) + + if not interfaces: + print(f"Error: No suitable interfaces (classes with pure virtual methods) found in {header}", file=sys.stderr) + raise typer.Exit(code=1) + + # --- Interactive Selection --- + chosen_interface = questionary.select( + "Which interface would you like to implement?", + choices=list(interfaces.keys()) + ).ask() + + if not chosen_interface: + raise typer.Exit() # User cancelled + + print(f"Initializing plugin '{project_name}' implementing interface '{chosen_interface}'...") + + # --- Code Generation --- + method_stubs = "\n".join( + f" {method['signature']} override {{\n{method['body']}\n }}" + for method in interfaces[chosen_interface] + ) + + class_name = ''.join(filter(str.isalnum, project_name.replace('_', ' ').title().replace(' ', ''))) + "Plugin" + root_path = directory / project_name + src_path = root_path / "src" + include_path = src_path / "include" + subprojects_path = root_path / "subprojects" + + try: + src_path.mkdir(parents=True, exist_ok=True) + include_path.mkdir(exist_ok=True) + subprojects_path.mkdir(exist_ok=True) + + # --- Copy interface header to make project self-contained --- + local_header_path = include_path / header.name + shutil.copy(header, local_header_path) + print(f" -> Copied interface header to {local_header_path.relative_to(root_path)}") + + # --- Create libplugin.wrap file --- + libplugin_wrap_content = f"""[wrap-git] +url = https://github.com/4D-STAR/libplugin +revision = {libplugin_rev} +depth = 1 +""" + (subprojects_path / "libplugin.wrap").write_text(libplugin_wrap_content) + print(f" -> Created {subprojects_path / 'libplugin.wrap'}") + + # --- Create meson.build from template --- + meson_template = get_template_content("meson.build.in") + meson_content = meson_template.format( + project_name=project_name, + version=version + ) + (root_path / "meson.build").write_text(meson_content) + print(f" -> Created {root_path / 'meson.build'}") + + # --- Create C++ source file from template --- + cpp_template = get_template_content("plugin.cpp.in") + cpp_content = cpp_template.format( + class_name=class_name, + project_name=project_name, + interface=chosen_interface, + interface_header_path=header.name, # Use just the filename + method_stubs=method_stubs + ) + (src_path / f"{project_name}.cpp").write_text(cpp_content) + print(f" -> Created {src_path / f'{project_name}.cpp'}") + + # --- Create .gitignore --- + (root_path / ".gitignore").write_text(GITIGNORE_CONTENT) + print(f" -> Created .gitignore") + + # --- Initialize Git Repository --- + print(" -> Initializing Git repository...") + run_command(["git", "init"], cwd=root_path) + run_command(["git", "add", "."], cwd=root_path) + commit_message = f"Initial commit: Scaffold fourdst plugin '{project_name}'" + run_command(["git", "commit", "-m", commit_message], cwd=root_path) + + + except OSError as e: + print(f"Error creating project structure: {e}", file=sys.stderr) + raise typer.Exit(code=1) + + print("\n✅ Project initialized successfully and committed to Git!") + print("To build your new plugin:") + print(f" cd {root_path}") + print(" meson setup builddir") + print(" meson compile -C builddir") + +def parse_cpp_header(header_path: Path): + """ + Parses a C++ header file using libclang to find classes and their pure virtual methods. + """ + # This function requires python-clang-16 + try: + from clang import cindex + except ImportError: + print("Error: The 'init' command requires 'libclang'. Please install it.", file=sys.stderr) + print("Run: pip install python-clang-16", file=sys.stderr) + # Also ensure the libclang.so/dylib is in your system's library path. + raise typer.Exit(code=1) + + if not cindex.Config.loaded: + try: + # Attempt to find libclang automatically. This may need to be configured by the user. + cindex.Config.set_library_file(cindex.conf.get_filename()) + except cindex.LibclangError as e: + print(f"Error: libclang library not found. Please ensure it's installed and in your system's path.", file=sys.stderr) + print(f"Details: {e}", file=sys.stderr) + raise typer.Exit(code=1) + + index = cindex.Index.create() + translation_unit = index.parse(str(header_path)) + + interfaces = {} + + for node in translation_unit.cursor.get_children(): + if node.kind == cindex.CursorKind.CLASS_DECL and node.is_pure_virtual(): + # Found a class with pure virtual methods, likely an interface + interface_name = node.spelling + print(f"Found interface: {interface_name}") + + methods = [] + for method in node.get_children(): + if method.kind == cindex.CursorKind.CXX_METHOD and method.is_pure_virtual(): + # Only consider pure virtual methods + method_signature = f"{method.return_type.spelling} {method.spelling}({', '.join([arg.type.spelling for arg in method.get_arguments()])})" + method_body = "// TODO: Implement this method" + methods.append({"signature": method_signature, "body": method_body}) + print(f" Found pure virtual method: {method_signature}") + + interfaces[interface_name] = methods + + return interfaces diff --git a/fourdst/cli/plugin/pack.py b/fourdst/cli/plugin/pack.py new file mode 100644 index 0000000..d0ad6a0 --- /dev/null +++ b/fourdst/cli/plugin/pack.py @@ -0,0 +1,100 @@ +# fourdst/cli/plugin/pack.py +import typer +import sys +import yaml +import zipfile +from pathlib import Path + +from fourdst.cli.common.utils import calculate_sha256 + +def _validate_bundle_directory(directory: Path) -> list[str]: + """ + Validates that a directory has the structure of a valid bundle. + Returns a list of error strings. An empty list means success. + """ + errors = [] + manifest_path = directory / "manifest.yaml" + + if not manifest_path.is_file(): + return ["Error: Missing 'manifest.yaml' in the root of the directory."] + + try: + with open(manifest_path, 'r') as f: + manifest = yaml.safe_load(f) + except yaml.YAMLError as e: + return [f"Error: Invalid YAML in manifest.yaml: {e}"] + + # 1. Check that all files referenced in the manifest exist + for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items(): + sdist_info = plugin_data.get('sdist', {}) + if sdist_info: + sdist_path = sdist_info.get('path') + if sdist_path and not (directory / sdist_path).is_file(): + errors.append(f"Missing sdist file for '{plugin_name}': {sdist_path}") + + for binary in plugin_data.get('binaries', []): + binary_path = binary.get('path') + if binary_path and not (directory / binary_path).is_file(): + errors.append(f"Missing binary file for '{plugin_name}': {binary_path}") + + # 2. If checksums exist, validate them + expected_checksum = binary.get('checksum') + if binary_path and expected_checksum: + file_to_check = directory / binary_path + if file_to_check.is_file(): + actual_checksum = "sha256:" + calculate_sha256(file_to_check) + if actual_checksum != expected_checksum: + errors.append(f"Checksum mismatch for '{binary_path}'") + + return errors + + +def plugin_pack( + folder_path: Path = typer.Argument(..., help="The directory to pack into a bundle.", exists=True, file_okay=False, dir_okay=True, readable=True), + name: str = typer.Option(None, "--name", "-n", help="The name for the output bundle file (without extension). Defaults to the folder name.") +): + """ + Validates and packs a directory into a .fbundle archive. + """ + typer.echo(f"--- Validating Bundle Directory: {folder_path.resolve()} ---") + + validation_errors = _validate_bundle_directory(folder_path) + + if validation_errors: + typer.secho("Validation Failed. The following issues were found:", fg=typer.colors.RED, bold=True) + for error in validation_errors: + typer.echo(f" - {error}") + raise typer.Exit(code=1) + + typer.secho("✅ Validation Successful.", fg=typer.colors.GREEN) + typer.echo("\n--- Packing Bundle ---") + + output_name = name if name else folder_path.name + output_path = folder_path.parent / f"{output_name}.fbundle" + + if output_path.exists(): + typer.secho(f"Warning: Output file {output_path} already exists and will be overwritten.", fg=typer.colors.YELLOW) + + try: + with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip: + for file_to_add in folder_path.rglob('*'): + if file_to_add.is_file(): + arcname = file_to_add.relative_to(folder_path) + bundle_zip.write(file_to_add, arcname) + typer.echo(f" Adding: {arcname}") + + typer.secho(f"\n✅ Successfully created bundle: {output_path.resolve()}", fg=typer.colors.GREEN, bold=True) + + # Final status report + with open(folder_path / "manifest.yaml", 'r') as f: + manifest = yaml.safe_load(f) + + is_signed = 'bundleAuthorKeyFingerprint' in manifest and (folder_path / "manifest.sig").exists() + if is_signed: + typer.secho("Bundle Status: ✅ SIGNED", fg=typer.colors.GREEN) + else: + typer.secho("Bundle Status: 🟡 UNSIGNED", fg=typer.colors.YELLOW) + + except Exception as e: + typer.secho(f"An unexpected error occurred during packing: {e}", fg=typer.colors.RED) + raise typer.Exit(code=1) diff --git a/fourdst/cli/plugin/validate.py b/fourdst/cli/plugin/validate.py new file mode 100644 index 0000000..1e1c430 --- /dev/null +++ b/fourdst/cli/plugin/validate.py @@ -0,0 +1,74 @@ +# fourdst/cli/plugin/validate.py +import typer +from pathlib import Path +from rich.console import Console +from rich.panel import Panel +from rich.text import Text + +console = Console() + +def plugin_validate( + plugin_path: Path = typer.Argument( + ".", + help="The path to the plugin directory to validate.", + exists=True, + file_okay=False, + dir_okay=True, + resolve_path=True + ) +): + """ + Validates a plugin's structure and meson.build file. + """ + console.print(Panel(f"Validating Plugin: [bold]{plugin_path.name}[/bold]", border_style="blue")) + + errors = 0 + warnings = 0 + + def check(condition, success_msg, error_msg, is_warning=False): + nonlocal errors, warnings + if condition: + console.print(Text(f"✅ {success_msg}", style="green")) + return True + else: + if is_warning: + console.print(Text(f"⚠️ {error_msg}", style="yellow")) + warnings += 1 + else: + console.print(Text(f"❌ {error_msg}", style="red")) + errors += 1 + return False + + # 1. Check for meson.build + meson_file = plugin_path / "meson.build" + if check(meson_file.exists(), "Found meson.build file.", "Missing meson.build file."): + meson_content = meson_file.read_text() + # 2. Check for project() definition + check("project(" in meson_content, "Contains project() definition.", "meson.build is missing a project() definition.", is_warning=True) + # 3. Check for shared_library() + check("shared_library(" in meson_content, "Contains shared_library() definition.", "meson.build does not appear to define a shared_library().") + + # 4. Check for source files + has_cpp = any(plugin_path.rglob("*.cpp")) + has_h = any(plugin_path.rglob("*.h")) or any(plugin_path.rglob("*.hpp")) + check(has_cpp, "Found C++ source files (.cpp).", "No .cpp source files found in the directory.", is_warning=True) + check(has_h, "Found C++ header files (.h/.hpp).", "No .h or .hpp header files found in the directory.", is_warning=True) + + # 5. Check for test definition (optional) + check("test(" in meson_content, "Contains test() definitions.", "No test() definitions found in meson.build. Consider adding tests.", is_warning=True) + + # Final summary + console.print("-" * 40) + if errors == 0: + console.print(Panel( + f"[bold green]Validation Passed[/bold green]\nWarnings: {warnings}", + title="Result", + border_style="green" + )) + else: + console.print(Panel( + f"[bold red]Validation Failed[/bold red]\nErrors: {errors}\nWarnings: {warnings}", + title="Result", + border_style="red" + )) + raise typer.Exit(code=1) diff --git a/pyproject.toml b/pyproject.toml index 0a4cb29..75887a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ dependencies = [ "typer[all]", "libclang", "questionary", + "rich", ] [project.scripts]