fix(python-bindings): added darwin duplicate rpath patch and fixed python rpath poiting
due to a current bug in meson-python duplicate rpaths are registered in the shared object files created by meson-python. The new masos dynamic loader refuses to load shared object files with duplicate rpaths. There is a small patch script which removes any duplicates. This is a temporary but effective fix (https://github.com/mesonbuild/meson-python/issues/813). Further, there was an issue due to mixed use of pure python and C++ code with name conflicts. This has been resolved so that both python and C++ code can be imported just find now.
This commit is contained in:
@@ -6,7 +6,7 @@
|
||||
#include "composition/bindings.h"
|
||||
#include "config/bindings.h"
|
||||
|
||||
PYBIND11_MODULE(fourdst, m) {
|
||||
PYBIND11_MODULE(_phys, m) {
|
||||
m.doc() = "Python bindings for the fourdst utility modules which are a part of the 4D-STAR project.";
|
||||
|
||||
auto compMod = m.def_submodule("composition", "Composition-module bindings");
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
from ._phys import *
|
||||
import sys
|
||||
|
||||
from ._phys import atomic, composition, constants, config
|
||||
|
||||
sys.modules['fourdst.atomic'] = atomic
|
||||
sys.modules['fourdst.composition'] = composition
|
||||
sys.modules['fourdst.constants'] = constants
|
||||
sys.modules['fourdst.config'] = config
|
||||
|
||||
__all__ = ['atomic', 'composition', 'constants', 'config', 'core', 'cli']
|
||||
0
src-pybind/fourdst/cli/__init__.py
Normal file
0
src-pybind/fourdst/cli/__init__.py
Normal file
0
src-pybind/fourdst/cli/bundle/__init__.py
Normal file
0
src-pybind/fourdst/cli/bundle/__init__.py
Normal file
23
src-pybind/fourdst/cli/bundle/clear.py
Normal file
23
src-pybind/fourdst/cli/bundle/clear.py
Normal file
@@ -0,0 +1,23 @@
|
||||
# fourdst/cli/bundle/clear.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
|
||||
from fourdst.core.bundle import clear_bundle
|
||||
|
||||
def bundle_clear(
|
||||
bundle_path: Path = typer.Argument(
|
||||
...,
|
||||
help="The path to the .fbundle file to clear.",
|
||||
exists=True,
|
||||
readable=True,
|
||||
writable=True
|
||||
)
|
||||
):
|
||||
"""
|
||||
Removes all compiled binaries and signatures from a bundle.
|
||||
"""
|
||||
try:
|
||||
clear_bundle(bundle_path, progress_callback=typer.echo)
|
||||
except Exception as e:
|
||||
typer.secho(f"An error occurred while clearing the bundle: {e}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
37
src-pybind/fourdst/cli/bundle/create.py
Normal file
37
src-pybind/fourdst/cli/bundle/create.py
Normal file
@@ -0,0 +1,37 @@
|
||||
# fourdst/cli/bundle/create.py
|
||||
|
||||
import typer
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
from fourdst.core.bundle import create_bundle
|
||||
|
||||
def bundle_create(
|
||||
plugin_dirs: list[Path] = typer.Argument(..., help="A list of plugin project directories to include.", exists=True, file_okay=False),
|
||||
output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."),
|
||||
bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."),
|
||||
bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."),
|
||||
bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle."),
|
||||
bundle_comment: str = typer.Option(None, "--comment", help="A comment to embed in the bundle."),
|
||||
target_macos_version: str = typer.Option(None, "--target-macos-version", help="The minimum macOS version to target (e.g., '12.0').")
|
||||
):
|
||||
"""
|
||||
Builds and packages one or more plugin projects into a single .fbundle file.
|
||||
"""
|
||||
def progress_callback(message):
|
||||
typer.secho(message, fg=typer.colors.BRIGHT_BLUE)
|
||||
|
||||
try:
|
||||
create_bundle(
|
||||
plugin_dirs=plugin_dirs,
|
||||
output_bundle=output_bundle,
|
||||
bundle_name=bundle_name,
|
||||
bundle_version=bundle_version,
|
||||
bundle_author=bundle_author,
|
||||
bundle_comment=bundle_comment,
|
||||
target_macos_version=target_macos_version,
|
||||
progress_callback=progress_callback
|
||||
)
|
||||
except Exception as e:
|
||||
typer.secho(f"Error creating bundle: {e}", fg=typer.colors.RED, err=True)
|
||||
raise typer.Exit(code=1)
|
||||
77
src-pybind/fourdst/cli/bundle/diff.py
Normal file
77
src-pybind/fourdst/cli/bundle/diff.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# fourdst/cli/bundle/diff.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.text import Text
|
||||
from rich.table import Table
|
||||
|
||||
from fourdst.core.bundle import diff_bundle
|
||||
|
||||
console = Console()
|
||||
|
||||
def bundle_diff(
|
||||
bundle_a_path: Path = typer.Argument(..., help="The first bundle to compare.", exists=True, readable=True),
|
||||
bundle_b_path: Path = typer.Argument(..., help="The second bundle to compare.", exists=True, readable=True),
|
||||
):
|
||||
"""
|
||||
Compares two bundle files, showing differences in their manifests, signatures, and contents.
|
||||
"""
|
||||
console.print(Panel(f"Comparing [bold blue]{bundle_a_path.name}[/bold blue] with [bold blue]{bundle_b_path.name}[/bold blue]"))
|
||||
|
||||
try:
|
||||
results = diff_bundle(bundle_a_path, bundle_b_path, progress_callback=typer.echo)
|
||||
except Exception as e:
|
||||
typer.secho(f"Error comparing bundles: {e}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# --- 1. Display Signature Differences ---
|
||||
sig_status = results['signature']['status']
|
||||
style_map = {
|
||||
'UNCHANGED': ('[green]UNCHANGED[/green]', 'green'),
|
||||
'REMOVED': ('[yellow]REMOVED[/yellow]', 'yellow'),
|
||||
'ADDED': ('[yellow]ADDED[/yellow]', 'yellow'),
|
||||
'CHANGED': ('[bold red]CHANGED[/bold red]', 'red'),
|
||||
'UNSIGNED': ('[dim]Both Unsigned[/dim]', 'dim'),
|
||||
}
|
||||
sig_text, sig_style = style_map.get(sig_status, (sig_status, 'white'))
|
||||
console.print(Panel(f"Signature Status: {sig_text}", title="[bold]Signature Verification[/bold]", border_style=sig_style, expand=False))
|
||||
|
||||
# --- 2. Display Manifest Differences ---
|
||||
manifest_diff = results['manifest']['diff']
|
||||
if manifest_diff:
|
||||
diff_text = Text()
|
||||
for line in manifest_diff:
|
||||
if line.startswith('+'):
|
||||
diff_text.append(line, style="green")
|
||||
elif line.startswith('-'):
|
||||
diff_text.append(line, style="red")
|
||||
elif line.startswith('^'):
|
||||
diff_text.append(line, style="blue")
|
||||
else:
|
||||
diff_text.append(line)
|
||||
console.print(Panel(diff_text, title="[bold]Manifest Differences[/bold]", border_style="yellow"))
|
||||
else:
|
||||
console.print(Panel("[green]Manifests are identical.[/green]", title="[bold]Manifest[/bold]", border_style="green"))
|
||||
|
||||
# --- 3. Display File Content Differences ---
|
||||
file_diffs = results['files']
|
||||
if file_diffs:
|
||||
table = Table(title="File Content Comparison")
|
||||
table.add_column("File Path", style="cyan")
|
||||
table.add_column("Status", style="magenta")
|
||||
table.add_column("Details", style="yellow")
|
||||
|
||||
status_map = {
|
||||
'REMOVED': '[red]REMOVED[/red]',
|
||||
'ADDED': '[green]ADDED[/green]',
|
||||
'MODIFIED': '[yellow]MODIFIED[/yellow]'
|
||||
}
|
||||
|
||||
for diff in file_diffs:
|
||||
status_text = status_map.get(diff['status'], diff['status'])
|
||||
table.add_row(diff['path'], status_text, diff['details'])
|
||||
|
||||
console.print(table)
|
||||
else:
|
||||
console.print(Panel("[green]All file contents are identical.[/green]", title="[bold]File Contents[/bold]", border_style="green"))
|
||||
190
src-pybind/fourdst/cli/bundle/fill.py
Normal file
190
src-pybind/fourdst/cli/bundle/fill.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# fourdst/cli/bundle/fill.py
|
||||
|
||||
import typer
|
||||
import shutil
|
||||
import datetime
|
||||
import yaml
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
import questionary
|
||||
from prompt_toolkit.key_binding import KeyBindings
|
||||
from questionary.prompts.checkbox import checkbox
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
try:
|
||||
import docker
|
||||
except ImportError:
|
||||
docker = None # Docker is an optional dependency for the 'fill' command
|
||||
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
|
||||
console = Console()
|
||||
|
||||
from fourdst.core.bundle import get_fillable_targets, fill_bundle
|
||||
from fourdst.cli.common.utils import run_command_rich # Keep for progress display if needed
|
||||
|
||||
custom_key_bindings = KeyBindings()
|
||||
|
||||
def _is_arch(target_info, arch_keywords):
|
||||
"""Helper to check if a target's info contains architecture keywords."""
|
||||
# Combine all relevant string values from the target dict to check against.
|
||||
text_to_check = ""
|
||||
if 'triplet' in target_info:
|
||||
text_to_check += target_info['triplet'].lower()
|
||||
if 'docker_image' in target_info:
|
||||
text_to_check += target_info['docker_image'].lower()
|
||||
if 'cross_file' in target_info:
|
||||
# Convert path to string for searching
|
||||
text_to_check += str(target_info['cross_file']).lower()
|
||||
|
||||
if not text_to_check:
|
||||
return False
|
||||
|
||||
return any(keyword in text_to_check for keyword in arch_keywords)
|
||||
|
||||
@custom_key_bindings.add('c-a')
|
||||
def _(event):
|
||||
"""
|
||||
Handler for Ctrl+A. Selects all ARM targets.
|
||||
"""
|
||||
control = event.app.layout.current_control
|
||||
# Keywords to identify ARM architectures
|
||||
arm_keywords = ['aarch64', 'arm64']
|
||||
|
||||
for i, choice in enumerate(control.choices):
|
||||
# The choice.value is the dictionary we passed to questionary.Choice
|
||||
target_info = choice.value.get('target', {})
|
||||
if _is_arch(target_info, arm_keywords):
|
||||
# Add the index to the set of selected items
|
||||
if i not in control.selected_indexes:
|
||||
control.selected_indexes.add(i)
|
||||
|
||||
# Redraw the UI to show the new selections
|
||||
event.app.invalidate()
|
||||
|
||||
|
||||
@custom_key_bindings.add('c-x')
|
||||
def _(event):
|
||||
"""
|
||||
Handler for Ctrl+X. Selects all x86 targets.
|
||||
"""
|
||||
control = event.app.layout.current_control
|
||||
# Keywords to identify x86 architectures
|
||||
x86_keywords = ['x86_64', 'x86', 'amd64'] # 'amd64' is a common alias in Docker
|
||||
|
||||
for i, choice in enumerate(control.choices):
|
||||
target_info = choice.value.get('target', {})
|
||||
if _is_arch(target_info, x86_keywords):
|
||||
if i not in control.selected_indexes:
|
||||
control.selected_indexes.add(i)
|
||||
|
||||
event.app.invalidate()
|
||||
|
||||
def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file to fill with new binaries.", exists=True)):
|
||||
"""
|
||||
Builds new binaries for the current host or cross-targets from the bundle's source.
|
||||
"""
|
||||
staging_dir = Path(f"temp_fill_{bundle_path.stem}")
|
||||
if staging_dir.exists():
|
||||
shutil.rmtree(staging_dir)
|
||||
|
||||
console.print(Panel(f"[bold]Filling Bundle:[/bold] {bundle_path.name}", expand=False, border_style="blue"))
|
||||
|
||||
# 1. Find available targets and missing binaries using the core function
|
||||
try:
|
||||
response = get_fillable_targets(bundle_path)
|
||||
if not response.get('success', False):
|
||||
console.print(f"[red]Error analyzing bundle: {response.get('error', 'Unknown error')}[/red]")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
fillable_targets = response.get('data', {})
|
||||
except Exception as e:
|
||||
console.print(f"[red]Error analyzing bundle: {e}[/red]")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
if not fillable_targets:
|
||||
console.print("[green]✅ Bundle is already full for all available build targets.[/green]")
|
||||
raise typer.Exit()
|
||||
|
||||
# 2. Create interactive choices for the user
|
||||
build_options = []
|
||||
BOLD = "\033[1m"
|
||||
RESET = "\033[0m"
|
||||
CYAN = "\033[36m"
|
||||
for plugin_name, targets in fillable_targets.items():
|
||||
for target in targets:
|
||||
if target['type'] == 'docker':
|
||||
display_name = f"Docker: {target['docker_image']}"
|
||||
elif target['type'] == 'cross':
|
||||
display_name = f"Cross-compile: {Path(target['cross_file']).name}"
|
||||
else: # native
|
||||
display_name = f"Native: {target['triplet']}"
|
||||
|
||||
build_options.append({
|
||||
"name": f"Build {plugin_name} for {display_name}",
|
||||
"value": {"plugin_name": plugin_name, "target": target}
|
||||
})
|
||||
|
||||
# 3. Prompt user to select which targets to build
|
||||
if not build_options:
|
||||
console.print("[yellow]No buildable targets found.[/yellow]")
|
||||
raise typer.Exit()
|
||||
|
||||
choices = [
|
||||
questionary.Choice(title=opt['name'], value=opt['value'])
|
||||
for opt in build_options
|
||||
]
|
||||
|
||||
message = (
|
||||
"Select which missing binaries to build:\n"
|
||||
" (Press [Ctrl+A] to select all ARM, [Ctrl+X] to select all x86)"
|
||||
)
|
||||
|
||||
# --- START OF FIX ---
|
||||
# 1. Instantiate the Checkbox class directly instead of using the shortcut.
|
||||
prompt = checkbox(
|
||||
message,
|
||||
choices=choices,
|
||||
# key_bindings=custom_key_bindings
|
||||
)
|
||||
|
||||
# 2. Use .unsafe_ask() to run the prompt object.
|
||||
selected_jobs = prompt.unsafe_ask()
|
||||
# --- END OF FIX ---
|
||||
|
||||
if not selected_jobs:
|
||||
console.print("No binaries selected to build. Exiting.")
|
||||
raise typer.Exit()
|
||||
|
||||
targets_to_build = {}
|
||||
for job in selected_jobs:
|
||||
plugin_name = job['plugin_name']
|
||||
target = job['target']
|
||||
if plugin_name not in targets_to_build:
|
||||
targets_to_build[plugin_name] = []
|
||||
targets_to_build[plugin_name].append(target)
|
||||
|
||||
try:
|
||||
console.print("--- Starting build process ---")
|
||||
fill_bundle(
|
||||
bundle_path,
|
||||
targets_to_build,
|
||||
progress_callback=lambda msg: console.print(f"[dim] {msg}[/dim]")
|
||||
)
|
||||
console.print("--- Build process finished ---")
|
||||
console.print(f"[green]✅ Bundle '{bundle_path.name}' has been filled successfully.[/green]")
|
||||
console.print("[yellow]⚠️ If the bundle was signed, the signature is now invalid. Please re-sign.[/yellow]")
|
||||
|
||||
except Exception as e:
|
||||
console.print(f"[red]An error occurred during the build process: {e}[/red]")
|
||||
tb_str = traceback.format_exc()
|
||||
console.print(Panel(
|
||||
tb_str,
|
||||
title="Traceback",
|
||||
border_style="red",
|
||||
expand=False
|
||||
))
|
||||
raise typer.Exit(code=1)
|
||||
119
src-pybind/fourdst/cli/bundle/inspect.py
Normal file
119
src-pybind/fourdst/cli/bundle/inspect.py
Normal file
@@ -0,0 +1,119 @@
|
||||
# fourdst/cli/bundle/inspect.py
|
||||
|
||||
import typer
|
||||
from pathlib import Path
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from fourdst.core.bundle import inspect_bundle
|
||||
|
||||
console = Console()
|
||||
|
||||
def display_inspection_report(report: dict):
|
||||
"""
|
||||
Displays the inspection report using rich components.
|
||||
"""
|
||||
manifest = report.get('manifest', {})
|
||||
host_info = report.get('host_info', {})
|
||||
validation = report.get('validation', {})
|
||||
signature = report.get('signature', {})
|
||||
plugins = report.get('plugins', {})
|
||||
|
||||
# --- Header ---
|
||||
console.print(Panel(f"Inspection Report for [bold blue]{manifest.get('bundleName', 'N/A')}[/bold blue]", expand=False))
|
||||
|
||||
meta_table = Table.grid(padding=(0, 2))
|
||||
meta_table.add_column()
|
||||
meta_table.add_column()
|
||||
meta_table.add_row("Name:", manifest.get('bundleName', 'N/A'))
|
||||
meta_table.add_row("Version:", manifest.get('bundleVersion', 'N/A'))
|
||||
meta_table.add_row("Author:", manifest.get('bundleAuthor', 'N/A'))
|
||||
meta_table.add_row("Bundled On:", manifest.get('bundledOn', 'N/A'))
|
||||
meta_table.add_row("Host ABI:", Text(host_info.get('abi_signature', 'N/A'), style="dim"))
|
||||
meta_table.add_row("Host Arch:", Text(host_info.get('triplet', 'N/A'), style="dim"))
|
||||
console.print(meta_table)
|
||||
console.print("─" * 50)
|
||||
|
||||
# --- Trust Status ---
|
||||
status = signature.get('status', 'UNKNOWN')
|
||||
if status == 'TRUSTED':
|
||||
console.print(Panel(f"[bold green]✅ Trust Status: SIGNED and TRUSTED[/bold green]\nKey: [dim]{signature.get('key_path')}[/dim]", expand=False, border_style="green"))
|
||||
elif status == 'UNSIGNED':
|
||||
console.print(Panel("[bold yellow]🟡 Trust Status: UNSIGNED[/bold yellow]", expand=False, border_style="yellow"))
|
||||
elif status == 'UNTRUSTED':
|
||||
console.print(Panel(f"[bold yellow]⚠️ Trust Status: SIGNED but UNTRUSTED AUTHOR[/bold yellow]\nFingerprint: [dim]{signature.get('fingerprint')}[/dim]", expand=False, border_style="yellow"))
|
||||
elif status == 'INVALID':
|
||||
console.print(Panel(f"[bold red]❌ Trust Status: INVALID SIGNATURE[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||
elif status == 'TAMPERED':
|
||||
console.print(Panel(f"[bold red]❌ Trust Status: TAMPERED[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||
elif status == 'UNSUPPORTED':
|
||||
console.print(Panel(f"[bold red]❌ Trust Status: CRYPTOGRAPHY NOT SUPPORTED[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||
else:
|
||||
console.print(Panel(f"[bold red]❌ Trust Status: ERROR[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||
|
||||
# --- Validation Issues ---
|
||||
errors = validation.get('errors', [])
|
||||
warnings = validation.get('warnings', [])
|
||||
if errors or warnings:
|
||||
console.print("─" * 50)
|
||||
console.print("[bold]Validation Issues:[/bold]")
|
||||
for error in errors:
|
||||
console.print(Text(f" - [red]Error:[/red] {error}"))
|
||||
for warning in warnings:
|
||||
console.print(Text(f" - [yellow]Warning:[/yellow] {warning}"))
|
||||
|
||||
# --- Plugin Details ---
|
||||
console.print("─" * 50)
|
||||
console.print("[bold]Available Plugins:[/bold]")
|
||||
if not plugins:
|
||||
console.print(" No plugins found in bundle.")
|
||||
|
||||
for name, data in plugins.items():
|
||||
console.print(Panel(f"Plugin: [bold]{name}[/bold]", expand=False, border_style="blue"))
|
||||
console.print(f" Source Dist: [dim]{data.get('sdist_path', 'N/A')}[/dim]")
|
||||
|
||||
binaries = data.get('binaries', [])
|
||||
if not binaries:
|
||||
console.print(" Binaries: None")
|
||||
else:
|
||||
bin_table = Table(title="Binaries", show_header=True, header_style="bold magenta")
|
||||
bin_table.add_column("Path")
|
||||
bin_table.add_column("Architecture")
|
||||
bin_table.add_column("ABI")
|
||||
bin_table.add_column("Host Compatible?", style="cyan")
|
||||
bin_table.add_column("Reason for Incompatibility", style="red")
|
||||
|
||||
for b in binaries:
|
||||
plat = b.get('platform', {})
|
||||
style = "green" if b.get('is_compatible') else "default"
|
||||
compat_text = "✅ Yes" if b.get('is_compatible') else "No"
|
||||
reason = b.get('incompatibility_reason', '') or ''
|
||||
bin_table.add_row(
|
||||
Text(b.get('path', 'N/A'), style=style),
|
||||
Text(plat.get('triplet', 'N/A'), style=style),
|
||||
Text(plat.get('abi_signature', 'N/A'), style=style),
|
||||
Text(compat_text, style="cyan"),
|
||||
Text(reason, style="red")
|
||||
)
|
||||
console.print(bin_table)
|
||||
|
||||
if not data.get('compatible_found'):
|
||||
console.print(Text(" Note: No compatible binary found for the current system.", style="yellow"))
|
||||
console.print(Text(" Run 'fourdst bundle fill' to build one.", style="yellow"))
|
||||
|
||||
def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True, resolve_path=True)):
|
||||
"""
|
||||
Inspects a bundle, validating its contents and cryptographic signature.
|
||||
"""
|
||||
try:
|
||||
report = inspect_bundle(bundle_path)
|
||||
display_inspection_report(report)
|
||||
# Exit with an error code if validation failed, to support scripting
|
||||
if report.get('validation', {}).get('status') != 'passed':
|
||||
raise typer.Exit(code=1)
|
||||
except Exception:
|
||||
console.print_exception(show_locals=True)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
26
src-pybind/fourdst/cli/bundle/sign.py
Normal file
26
src-pybind/fourdst/cli/bundle/sign.py
Normal file
@@ -0,0 +1,26 @@
|
||||
# fourdst/cli/bundle/sign.py
|
||||
|
||||
import typer
|
||||
from pathlib import Path
|
||||
|
||||
from fourdst.core.bundle import sign_bundle
|
||||
|
||||
def bundle_sign(
|
||||
bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True),
|
||||
private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True)
|
||||
):
|
||||
"""
|
||||
Signs a bundle with an author's private key.
|
||||
"""
|
||||
def progress_callback(message):
|
||||
typer.secho(message, fg=typer.colors.BRIGHT_BLUE)
|
||||
|
||||
try:
|
||||
sign_bundle(
|
||||
bundle_path=bundle_path,
|
||||
private_key=private_key,
|
||||
progress_callback=progress_callback
|
||||
)
|
||||
except Exception as e:
|
||||
typer.secho(f"Error signing bundle: {e}", fg=typer.colors.RED, err=True)
|
||||
raise typer.Exit(code=1)
|
||||
80
src-pybind/fourdst/cli/bundle/validate.py
Normal file
80
src-pybind/fourdst/cli/bundle/validate.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# fourdst/cli/bundle/validate.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from fourdst.core.bundle import validate_bundle
|
||||
|
||||
console = Console()
|
||||
|
||||
def bundle_validate(
|
||||
bundle_path: Path = typer.Argument(
|
||||
...,
|
||||
help="The .fbundle file to validate.",
|
||||
exists=True,
|
||||
resolve_path=True,
|
||||
file_okay=True,
|
||||
dir_okay=False
|
||||
)
|
||||
):
|
||||
"""
|
||||
Validates the integrity and checksums of a .fbundle file.
|
||||
"""
|
||||
def progress_callback(message):
|
||||
# For a CLI, we can choose to show progress or just wait for the final report.
|
||||
# In this case, the final report is more structured and useful.
|
||||
pass
|
||||
|
||||
try:
|
||||
results = validate_bundle(
|
||||
bundle_path=bundle_path,
|
||||
progress_callback=progress_callback
|
||||
)
|
||||
|
||||
console.print(Panel(f"Validation Report for: [bold]{bundle_path.name}[/bold]", border_style="blue"))
|
||||
|
||||
if results['errors']:
|
||||
console.print(Panel("Errors", border_style="red", expand=False))
|
||||
for error in results['errors']:
|
||||
console.print(Text(f"❌ {error}", style="red"))
|
||||
|
||||
if results['warnings']:
|
||||
console.print(Panel("Warnings", border_style="yellow", expand=False))
|
||||
for warning in results['warnings']:
|
||||
console.print(Text(f"⚠️ {warning}", style="yellow"))
|
||||
|
||||
# Summary Table
|
||||
summary_table = Table(title="Validation Summary")
|
||||
summary_table.add_column("Result")
|
||||
summary_table.add_column("Errors", justify="right")
|
||||
summary_table.add_column("Warnings", justify="right")
|
||||
|
||||
status = results.get('status', 'failed')
|
||||
summary = results.get('summary', {'errors': len(results['errors']), 'warnings': len(results['warnings'])})
|
||||
|
||||
if status == 'passed':
|
||||
result_text = "Passed"
|
||||
style = "green"
|
||||
else:
|
||||
result_text = "Failed"
|
||||
style = "red"
|
||||
|
||||
summary_table.add_row(
|
||||
f"[bold {style}]{result_text}[/bold {style}]",
|
||||
str(summary['errors']),
|
||||
str(summary['warnings'])
|
||||
)
|
||||
console.print(summary_table)
|
||||
|
||||
if status != 'passed':
|
||||
raise typer.Exit(code=1)
|
||||
else:
|
||||
console.print("\n[bold green]✅ Bundle is valid.[/bold green]")
|
||||
|
||||
except Exception as e:
|
||||
# Catch exceptions from the core function itself
|
||||
console.print(Panel(f"[bold red]An unexpected error occurred:[/bold red]\n{e}", title="Validation Error"))
|
||||
raise typer.Exit(code=1)
|
||||
0
src-pybind/fourdst/cli/cache/__init__.py
vendored
Normal file
0
src-pybind/fourdst/cli/cache/__init__.py
vendored
Normal file
20
src-pybind/fourdst/cli/cache/clear.py
vendored
Normal file
20
src-pybind/fourdst/cli/cache/clear.py
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
# fourdst/cli/cache/clear.py
|
||||
|
||||
import typer
|
||||
import shutil
|
||||
from fourdst.cli.common.config import CACHE_PATH
|
||||
import typer
|
||||
|
||||
cache_app = typer.Typer()
|
||||
|
||||
@cache_app.command("clear")
|
||||
def cache_clear():
|
||||
"""
|
||||
Clears all cached data, including the ABI signature.
|
||||
Run this if you have updated your C++ compiler.
|
||||
"""
|
||||
if CACHE_PATH.exists():
|
||||
shutil.rmtree(CACHE_PATH)
|
||||
print("✅ Local cache cleared.")
|
||||
else:
|
||||
print("No cache found to clear.")
|
||||
0
src-pybind/fourdst/cli/common/__init__.py
Normal file
0
src-pybind/fourdst/cli/common/__init__.py
Normal file
11
src-pybind/fourdst/cli/common/config.py
Normal file
11
src-pybind/fourdst/cli/common/config.py
Normal file
@@ -0,0 +1,11 @@
|
||||
# fourdst/cli/common/config.py
|
||||
|
||||
# This file is now a proxy for the core config to maintain compatibility.
|
||||
from fourdst.core.config import (
|
||||
FOURDST_CONFIG_DIR,
|
||||
LOCAL_TRUST_STORE_PATH,
|
||||
CROSS_FILES_PATH,
|
||||
CACHE_PATH,
|
||||
ABI_CACHE_FILE,
|
||||
DOCKER_BUILD_IMAGES
|
||||
)
|
||||
129
src-pybind/fourdst/cli/common/templates.py
Normal file
129
src-pybind/fourdst/cli/common/templates.py
Normal file
@@ -0,0 +1,129 @@
|
||||
# fourdst/cli/common/templates.py
|
||||
|
||||
ABI_DETECTOR_CPP_SRC = """
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#ifdef __GNUC__
|
||||
#if __has_include(<gnu/libc-version.h>)
|
||||
#include <gnu/libc-version.h>
|
||||
#endif
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
std::string os;
|
||||
std::string compiler;
|
||||
std::string compiler_version;
|
||||
std::string stdlib;
|
||||
std::string stdlib_version;
|
||||
std::string abi;
|
||||
|
||||
#if defined(__APPLE__) && defined(__MACH__)
|
||||
os = "macos";
|
||||
#elif defined(__linux__)
|
||||
os = "linux";
|
||||
#elif defined(_WIN32)
|
||||
os = "windows";
|
||||
#else
|
||||
os = "unknown_os";
|
||||
#endif
|
||||
|
||||
#if defined(__clang__)
|
||||
compiler = "clang";
|
||||
compiler_version = __clang_version__;
|
||||
#elif defined(__GNUC__)
|
||||
compiler = "gcc";
|
||||
compiler_version = std::to_string(__GNUC__) + "." + std::to_string(__GNUC_MINOR__) + "." + std::to_string(__GNUC_PATCHLEVEL__);
|
||||
#elif defined(_MSC_VER)
|
||||
compiler = "msvc";
|
||||
compiler_version = std::to_string(_MSC_VER);
|
||||
#else
|
||||
compiler = "unknown_compiler";
|
||||
compiler_version = "0";
|
||||
#endif
|
||||
|
||||
#if defined(_LIBCPP_VERSION)
|
||||
stdlib = "libc++";
|
||||
stdlib_version = std::to_string(_LIBCPP_VERSION);
|
||||
abi = "libc++_abi"; // On libc++, the ABI is tightly coupled with the library itself.
|
||||
#elif defined(__GLIBCXX__)
|
||||
stdlib = "libstdc++";
|
||||
#if defined(_GLIBCXX_USE_CXX11_ABI)
|
||||
abi = _GLIBCXX_USE_CXX11_ABI == 1 ? "cxx11_abi" : "pre_cxx11_abi";
|
||||
#else
|
||||
abi = "pre_cxx11_abi";
|
||||
#endif
|
||||
#if __has_include(<gnu/libc-version.h>)
|
||||
stdlib_version = gnu_get_libc_version();
|
||||
#else
|
||||
stdlib_version = "unknown";
|
||||
#endif
|
||||
#else
|
||||
stdlib = "unknown_stdlib";
|
||||
abi = "unknown_abi";
|
||||
#endif
|
||||
|
||||
std::cout << "os=" << os << std::endl;
|
||||
std::cout << "compiler=" << compiler << std::endl;
|
||||
std::cout << "compiler_version=" << compiler_version << std::endl;
|
||||
std::cout << "stdlib=" << stdlib << std::endl;
|
||||
if (!stdlib_version.empty()) {
|
||||
std::cout << "stdlib_version=" << stdlib_version << std::endl;
|
||||
}
|
||||
// Always print the ABI key for consistent parsing
|
||||
std::cout << "abi=" << abi << std::endl;
|
||||
|
||||
return 0;
|
||||
}
|
||||
"""
|
||||
|
||||
ABI_DETECTOR_MESON_SRC = """
|
||||
project('abi-detector', 'cpp', default_options : ['cpp_std=c++23'])
|
||||
executable('detector', 'main.cpp')
|
||||
"""
|
||||
|
||||
GITIGNORE_CONTENT = """# General
|
||||
*.swp
|
||||
*~
|
||||
.DS_Store
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.pyd
|
||||
.Python
|
||||
.venv/
|
||||
venv/
|
||||
env/
|
||||
*.egg-info/
|
||||
dist/
|
||||
|
||||
# C++ Build Artifacts
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
*.dylib
|
||||
*.dll
|
||||
*.lib
|
||||
*.exe
|
||||
|
||||
# Meson Build System
|
||||
# Ignore any directory containing meson-private, which is a reliable marker
|
||||
**/meson-private/
|
||||
# Also ignore common build directory names
|
||||
build/
|
||||
builddir/
|
||||
|
||||
# Subprojects - ignore all subdirectories except 'packagefiles' and root .wrap files
|
||||
/subprojects/*
|
||||
!/subprojects/packagefiles
|
||||
!/subprojects/*.wrap
|
||||
|
||||
# Editor specific
|
||||
.vscode/
|
||||
.idea/
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
"""
|
||||
212
src-pybind/fourdst/cli/common/utils.py
Normal file
212
src-pybind/fourdst/cli/common/utils.py
Normal file
@@ -0,0 +1,212 @@
|
||||
# fourdst/cli/common/utils.py
|
||||
|
||||
import typer
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
import importlib.resources
|
||||
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
|
||||
console = Console()
|
||||
|
||||
def run_command_rich(command: list[str], cwd: Path = None, check=True, env: dict = None):
|
||||
"""
|
||||
Runs a command and displays its output live using rich.
|
||||
"""
|
||||
command_str = ' '.join(command)
|
||||
console.print(Panel(f"Running: [bold cyan]{command_str}[/bold cyan]", title="Command", border_style="blue"))
|
||||
|
||||
process = subprocess.Popen(
|
||||
command,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
cwd=cwd,
|
||||
env=env,
|
||||
bufsize=1, # line-buffered
|
||||
universal_newlines=True
|
||||
)
|
||||
|
||||
# Read and print stdout and stderr line by line
|
||||
if process.stdout:
|
||||
for line in iter(process.stdout.readline, ''):
|
||||
console.print(line.strip())
|
||||
|
||||
if process.stderr:
|
||||
for line in iter(process.stderr.readline, ''):
|
||||
console.print(f"[yellow]{line.strip()}[/yellow]")
|
||||
|
||||
process.wait()
|
||||
|
||||
if check and process.returncode != 0:
|
||||
console.print(Panel(f"Command failed with exit code {process.returncode}", title="[bold red]Error[/bold red]", border_style="red"))
|
||||
raise subprocess.CalledProcessError(process.returncode, command)
|
||||
|
||||
return process
|
||||
|
||||
def get_template_content(template_name: str) -> str:
|
||||
"""Safely reads content from a template file packaged with the CLI."""
|
||||
try:
|
||||
return importlib.resources.files('fourdst.cli.templates').joinpath(template_name).read_text()
|
||||
except FileNotFoundError:
|
||||
print(f"Error: Template file '{template_name}' not found.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
def run_command(command: list[str], cwd: Path = None, check=True, display_output: bool = False, env: dict = None):
|
||||
"""Runs a command, optionally displaying its output and using a custom environment."""
|
||||
command_str = ' '.join(command)
|
||||
|
||||
try:
|
||||
result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd, env=env)
|
||||
|
||||
if display_output and (result.stdout or result.stderr):
|
||||
output_text = ""
|
||||
if result.stdout:
|
||||
output_text += result.stdout.strip()
|
||||
if result.stderr:
|
||||
output_text += f"\n[yellow]{result.stderr.strip()}[/yellow]"
|
||||
|
||||
console.print(Panel(
|
||||
output_text,
|
||||
title=f"Output from: `{command_str}`",
|
||||
border_style="blue",
|
||||
expand=False
|
||||
))
|
||||
|
||||
return result
|
||||
except subprocess.CalledProcessError as e:
|
||||
if check:
|
||||
output_text = ""
|
||||
if e.stdout:
|
||||
output_text += f"[bold]--- STDOUT ---[/bold]\n{e.stdout.strip()}"
|
||||
if e.stderr:
|
||||
output_text += f"\n[bold]--- STDERR ---[/bold]\n{e.stderr.strip()}"
|
||||
|
||||
console.print(Panel(
|
||||
output_text,
|
||||
title=f"Error running: `{command_str}`",
|
||||
border_style="red",
|
||||
expand=False
|
||||
))
|
||||
raise typer.Exit(code=1)
|
||||
return e
|
||||
|
||||
def is_abi_compatible(host_abi: str, binary_abi: str) -> bool:
|
||||
"""
|
||||
Checks if a binary's ABI is compatible with the host's ABI.
|
||||
|
||||
Compatibility is defined as:
|
||||
1. Same compiler, stdlib, and ABI name.
|
||||
2. Host's stdlib version is >= binary's stdlib version.
|
||||
"""
|
||||
try:
|
||||
host_parts = host_abi.split('-')
|
||||
bin_parts = binary_abi.split('-')
|
||||
|
||||
if len(host_parts) != 4 or len(bin_parts) != 4:
|
||||
# Fallback to exact match for non-standard ABI strings
|
||||
return host_abi == binary_abi
|
||||
|
||||
host_compiler, host_stdlib, host_version, host_abi_name = host_parts
|
||||
bin_compiler, bin_stdlib, bin_version, bin_abi_name = bin_parts
|
||||
|
||||
# 1. Check for exact match on compiler, stdlib, and abi name
|
||||
if not (host_compiler == bin_compiler and host_stdlib == bin_stdlib and host_abi_name == bin_abi_name):
|
||||
return False
|
||||
|
||||
# 2. Compare stdlib versions (e.g., "2.41" vs "2.28")
|
||||
# We can treat them as dot-separated integers for comparison.
|
||||
host_v_parts = list(map(int, host_version.split('.')))
|
||||
bin_v_parts = list(map(int, bin_version.split('.')))
|
||||
|
||||
# Pad shorter version with zeros for safe comparison
|
||||
max_len = max(len(host_v_parts), len(bin_v_parts))
|
||||
host_v_parts.extend([0] * (max_len - len(host_v_parts)))
|
||||
bin_v_parts.extend([0] * (max_len - len(bin_v_parts)))
|
||||
|
||||
return host_v_parts >= bin_v_parts
|
||||
|
||||
except (ValueError, IndexError):
|
||||
# If parsing fails, fall back to a simple string comparison
|
||||
return host_abi == binary_abi
|
||||
|
||||
def calculate_sha256(file_path: Path) -> str:
|
||||
"""Calculates the SHA256 checksum of a file."""
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(file_path, "rb") as f:
|
||||
for byte_block in iter(lambda: f.read(4096), b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
def parse_cpp_header(header_path: Path):
|
||||
"""
|
||||
Parses a C++ header file using libclang to find classes and their pure virtual methods.
|
||||
"""
|
||||
# This function requires python-clang-16
|
||||
try:
|
||||
from clang import cindex
|
||||
except ImportError:
|
||||
print("Error: The 'init' command requires 'libclang'. Please install it.", file=sys.stderr)
|
||||
print("Run: pip install python-clang-16", file=sys.stderr)
|
||||
# Also ensure the libclang.so/dylib is in your system's library path.
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
if not cindex.Config.loaded:
|
||||
try:
|
||||
# Attempt to find libclang automatically. This may need to be configured by the user.
|
||||
cindex.Config.set_library_file(cindex.conf.get_filename())
|
||||
except cindex.LibclangError as e:
|
||||
print(f"Error: libclang library not found. Please ensure it's installed and in your system's path.", file=sys.stderr)
|
||||
print(f"Details: {e}", file=sys.stderr)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# --- Get compiler flags from pkg-config to help clang find includes ---
|
||||
try:
|
||||
pkg_config_proc = subprocess.run(
|
||||
['pkg-config', '--cflags', 'fourdst_plugin'],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True
|
||||
)
|
||||
# Split the flags string into a list of arguments for libclang
|
||||
compiler_flags = pkg_config_proc.stdout.strip().split()
|
||||
print(f"Using compiler flags from pkg-config: {' '.join(compiler_flags)}")
|
||||
except (subprocess.CalledProcessError, FileNotFoundError):
|
||||
print("Warning: `pkg-config --cflags fourdst-plugin` failed. Parsing may not succeed if the header has dependencies.", file=sys.stderr)
|
||||
print("Please ensure 'pkg-config' is installed and 'fourdst-plugin.pc' is in your PKG_CONFIG_PATH.", file=sys.stderr)
|
||||
compiler_flags = []
|
||||
|
||||
index = cindex.Index.create()
|
||||
# Add the pkg-config flags to the parser arguments
|
||||
translation_unit = index.parse(str(header_path), args=['-x', 'c++', '-std=c++23'] + compiler_flags)
|
||||
|
||||
interfaces = {}
|
||||
for cursor in translation_unit.cursor.walk_preorder():
|
||||
if cursor.kind == cindex.CursorKind.CLASS_DECL and cursor.is_definition():
|
||||
class_name = cursor.spelling
|
||||
methods = []
|
||||
for child in cursor.get_children():
|
||||
if child.kind == cindex.CursorKind.CXX_METHOD and child.is_pure_virtual_method():
|
||||
method_name = child.spelling
|
||||
result_type = child.result_type.spelling
|
||||
# Recreate the full method signature
|
||||
params = [p.spelling or f"param{i+1}" for i, p in enumerate(child.get_arguments())]
|
||||
param_str = ", ".join(f"{p.type.spelling} {p.spelling}" for p in child.get_arguments())
|
||||
const_qualifier = " const" if child.is_const_method() else ""
|
||||
|
||||
signature = f"{result_type} {method_name}({param_str}){const_qualifier}"
|
||||
|
||||
# Generate a placeholder body
|
||||
body = f" // TODO: Implement the {method_name} method.\n"
|
||||
if result_type != "void":
|
||||
body += f" return {{}};" # Default return
|
||||
|
||||
methods.append({'signature': signature, 'body': body})
|
||||
|
||||
if methods: # Only consider classes with pure virtual methods as interfaces
|
||||
interfaces[class_name] = methods
|
||||
|
||||
return interfaces
|
||||
0
src-pybind/fourdst/cli/keys/__init__.py
Normal file
0
src-pybind/fourdst/cli/keys/__init__.py
Normal file
20
src-pybind/fourdst/cli/keys/add.py
Normal file
20
src-pybind/fourdst/cli/keys/add.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# fourdst/cli/keys/add.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
from fourdst.core.keys import add_key
|
||||
|
||||
def keys_add(
|
||||
key_path: Path = typer.Argument(..., help="Path to the public key file to add.", exists=True, readable=True)
|
||||
):
|
||||
"""Adds a single public key to the local trust store."""
|
||||
result = add_key(key_path)
|
||||
|
||||
if result["success"]:
|
||||
if result["already_existed"]:
|
||||
typer.secho(f"Key '{result['key_name']}' with same content already exists.", fg=typer.colors.YELLOW)
|
||||
else:
|
||||
typer.secho(f"✅ Key '{result['key_name']}' added to manual trust store.", fg=typer.colors.GREEN)
|
||||
typer.echo(f"Fingerprint: {result['fingerprint']}")
|
||||
else:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
38
src-pybind/fourdst/cli/keys/generate.py
Normal file
38
src-pybind/fourdst/cli/keys/generate.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# fourdst/cli/keys/generate.py
|
||||
|
||||
import typer
|
||||
from pathlib import Path
|
||||
from fourdst.core.keys import generate_key
|
||||
|
||||
keys_app = typer.Typer()
|
||||
|
||||
@keys_app.command("generate")
|
||||
def keys_generate(
|
||||
key_name: str = typer.Option("author_key", "--name", "-n", help="The base name for the generated key files."),
|
||||
key_type: str = typer.Option("ed25519", "--type", "-t", help="Type of key to generate (ed25519|rsa).", case_sensitive=False),
|
||||
output_dir: str = typer.Option(".", "--output", "-o", help="Directory to save the generated keys.")
|
||||
):
|
||||
"""
|
||||
Generates a new Ed25519 or RSA key pair for signing bundles.
|
||||
"""
|
||||
def progress_callback(message):
|
||||
typer.echo(message)
|
||||
|
||||
result = generate_key(
|
||||
key_name=key_name,
|
||||
key_type=key_type,
|
||||
output_dir=Path(output_dir),
|
||||
progress_callback=progress_callback
|
||||
)
|
||||
|
||||
if result["success"]:
|
||||
typer.echo("\n✅ PEM and OpenSSH-compatible keys generated successfully!")
|
||||
typer.echo(f" -> Private Key (KEEP SECRET): {result['private_key_path']}")
|
||||
typer.echo(f" -> Public Key (SHARE): {result['public_key_path']}")
|
||||
typer.echo(f" -> OpenSSH Public Key: {result['openssh_public_key_path']}")
|
||||
typer.echo(f" -> Key Type: {result['key_type'].upper()}")
|
||||
typer.echo(f" -> Fingerprint: {result['fingerprint']}")
|
||||
typer.echo("\nShare the public key with users who need to trust your bundles.")
|
||||
else:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
25
src-pybind/fourdst/cli/keys/list.py
Normal file
25
src-pybind/fourdst/cli/keys/list.py
Normal file
@@ -0,0 +1,25 @@
|
||||
# fourdst/cli/keys/list.py
|
||||
import typer
|
||||
from fourdst.core.keys import list_keys
|
||||
|
||||
def keys_list():
|
||||
"""Lists all trusted public keys."""
|
||||
result = list_keys()
|
||||
|
||||
if not result["success"]:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
if result["total_count"] == 0:
|
||||
typer.echo("No trusted keys found.")
|
||||
return
|
||||
|
||||
typer.echo(f"Found {result['total_count']} trusted keys:\n")
|
||||
|
||||
for source_name, keys in result["keys"].items():
|
||||
typer.secho(f"--- Source: {source_name} ---", bold=True)
|
||||
for key_info in keys:
|
||||
typer.echo(f" - {key_info['name']}")
|
||||
typer.echo(f" Fingerprint: {key_info['fingerprint']}")
|
||||
typer.echo(f" Size: {key_info['size_bytes']} bytes")
|
||||
typer.echo() # Empty line between sources
|
||||
1
src-pybind/fourdst/cli/keys/remote/__init__.py
Normal file
1
src-pybind/fourdst/cli/keys/remote/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# fourdst/cli/keys/remote/__init__.py
|
||||
16
src-pybind/fourdst/cli/keys/remote/add.py
Normal file
16
src-pybind/fourdst/cli/keys/remote/add.py
Normal file
@@ -0,0 +1,16 @@
|
||||
# fourdst/cli/keys/remote/add.py
|
||||
import typer
|
||||
from fourdst.core.keys import add_remote_source
|
||||
|
||||
def remote_add(
|
||||
url: str = typer.Argument(..., help="The URL of the Git repository."),
|
||||
name: str = typer.Argument(..., help="A local name for the remote.")
|
||||
):
|
||||
"""Adds a new remote key source."""
|
||||
result = add_remote_source(name, url)
|
||||
|
||||
if result["success"]:
|
||||
typer.secho(f"✅ Remote '{result['name']}' added.", fg=typer.colors.GREEN)
|
||||
else:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
24
src-pybind/fourdst/cli/keys/remote/list.py
Normal file
24
src-pybind/fourdst/cli/keys/remote/list.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# fourdst/cli/keys/remote/list.py
|
||||
import typer
|
||||
from fourdst.core.keys import get_remote_sources
|
||||
|
||||
def remote_list():
|
||||
"""Lists all configured remote key sources."""
|
||||
result = get_remote_sources()
|
||||
|
||||
if not result["success"]:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
if not result["remotes"]:
|
||||
typer.echo("No remotes configured.")
|
||||
return
|
||||
|
||||
typer.secho("Configured Key Remotes:", bold=True)
|
||||
for remote in result["remotes"]:
|
||||
status = "✅" if remote["exists"] else "❌"
|
||||
typer.echo(f" {status} {remote['name']}: {remote['url']}")
|
||||
if remote["exists"]:
|
||||
typer.echo(f" Keys: {remote['keys_count']}")
|
||||
else:
|
||||
typer.echo(f" Status: Not synced yet")
|
||||
15
src-pybind/fourdst/cli/keys/remote/remove.py
Normal file
15
src-pybind/fourdst/cli/keys/remote/remove.py
Normal file
@@ -0,0 +1,15 @@
|
||||
# fourdst/cli/keys/remote/remove.py
|
||||
import typer
|
||||
from fourdst.core.keys import remove_remote_source
|
||||
|
||||
def remote_remove(
|
||||
name: str = typer.Argument(..., help="The name of the remote to remove.")
|
||||
):
|
||||
"""Removes a remote key source."""
|
||||
result = remove_remote_source(name)
|
||||
|
||||
if result["success"]:
|
||||
typer.secho(f"✅ Remote '{result['name']}' removed.", fg=typer.colors.GREEN)
|
||||
else:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
53
src-pybind/fourdst/cli/keys/remove.py
Normal file
53
src-pybind/fourdst/cli/keys/remove.py
Normal file
@@ -0,0 +1,53 @@
|
||||
# fourdst/cli/keys/remove.py
|
||||
import typer
|
||||
import questionary
|
||||
from pathlib import Path
|
||||
|
||||
from fourdst.core.keys import remove_key, list_keys
|
||||
|
||||
def keys_remove(
|
||||
key_path: Path = typer.Argument(None, help="Path to the public key file to remove.", exists=True, readable=True)
|
||||
):
|
||||
"""Removes a single public key from the local trust store."""
|
||||
if key_path:
|
||||
# Remove by path
|
||||
result = remove_key(str(key_path))
|
||||
|
||||
if result["success"]:
|
||||
for removed_key in result["removed_keys"]:
|
||||
typer.secho(f"✅ Removed key '{removed_key['name']}' from source '{removed_key['source']}'.", fg=typer.colors.GREEN)
|
||||
else:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.YELLOW)
|
||||
else:
|
||||
# Interactive removal
|
||||
keys_result = list_keys()
|
||||
|
||||
if not keys_result["success"]:
|
||||
typer.secho(f"Error: {keys_result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
if keys_result["total_count"] == 0:
|
||||
typer.echo("No keys to remove.")
|
||||
raise typer.Exit()
|
||||
|
||||
# Build choices for interactive selection
|
||||
choices = []
|
||||
for source_name, keys in keys_result["keys"].items():
|
||||
for key_info in keys:
|
||||
relative_path = f"{source_name}/{key_info['name']}"
|
||||
choice_name = f"{relative_path} ({key_info['fingerprint']})"
|
||||
choices.append({
|
||||
"name": choice_name,
|
||||
"value": key_info['fingerprint'] # Use fingerprint as identifier
|
||||
})
|
||||
|
||||
selected_fingerprints = questionary.checkbox("Select keys to remove:", choices=choices).ask()
|
||||
|
||||
if selected_fingerprints:
|
||||
for fingerprint in selected_fingerprints:
|
||||
result = remove_key(fingerprint)
|
||||
if result["success"]:
|
||||
for removed_key in result["removed_keys"]:
|
||||
typer.secho(f"✅ Removed key '{removed_key['name']}'.", fg=typer.colors.GREEN)
|
||||
else:
|
||||
typer.secho(f"Error removing key: {result['error']}", fg=typer.colors.RED)
|
||||
52
src-pybind/fourdst/cli/keys/sync.py
Normal file
52
src-pybind/fourdst/cli/keys/sync.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# fourdst/cli/keys/sync.py
|
||||
import typer
|
||||
import questionary
|
||||
|
||||
from fourdst.core.keys import sync_remotes, remove_remote_source
|
||||
|
||||
keys_app = typer.Typer()
|
||||
|
||||
@keys_app.command("sync")
|
||||
def keys_sync():
|
||||
"""
|
||||
Syncs the local trust store with all configured remote Git repositories.
|
||||
"""
|
||||
def progress_callback(message):
|
||||
typer.echo(message)
|
||||
|
||||
result = sync_remotes(progress_callback=progress_callback)
|
||||
|
||||
if not result["success"]:
|
||||
typer.secho(f"Error: {result['error']}", fg=typer.colors.YELLOW)
|
||||
raise typer.Exit()
|
||||
|
||||
# Display results
|
||||
success_count = len([r for r in result["synced_remotes"] if r["status"] == "success"])
|
||||
failed_count = len([r for r in result["synced_remotes"] if r["status"] == "failed"])
|
||||
|
||||
typer.echo(f"\nSync completed:")
|
||||
typer.echo(f" ✅ Successful: {success_count}")
|
||||
typer.echo(f" ❌ Failed: {failed_count}")
|
||||
typer.echo(f" 📦 Total keys synced: {result['total_keys_synced']}")
|
||||
|
||||
# Show details for each remote
|
||||
for remote_info in result["synced_remotes"]:
|
||||
if remote_info["status"] == "success":
|
||||
typer.secho(f" ✅ {remote_info['name']}: {remote_info.get('keys_count', 0)} keys", fg=typer.colors.GREEN)
|
||||
else:
|
||||
typer.secho(f" ❌ {remote_info['name']}: {remote_info['error']}", fg=typer.colors.RED)
|
||||
|
||||
# Handle removed remotes
|
||||
if result["removed_remotes"]:
|
||||
typer.secho(f"\nRemoved failing remotes: {', '.join(result['removed_remotes'])}", fg=typer.colors.YELLOW)
|
||||
|
||||
# Ask about failed remotes that weren't automatically removed
|
||||
failed_remotes = [r for r in result["synced_remotes"] if r["status"] == "failed" and r["name"] not in result["removed_remotes"]]
|
||||
for remote_info in failed_remotes:
|
||||
if questionary.confirm(f"Do you want to remove the failing remote '{remote_info['name']}'?").ask():
|
||||
remove_result = remove_remote_source(remote_info['name'])
|
||||
if remove_result["success"]:
|
||||
typer.secho(f"✅ Removed remote '{remote_info['name']}'", fg=typer.colors.GREEN)
|
||||
else:
|
||||
typer.secho(f"❌ Failed to remove remote '{remote_info['name']}': {remove_result['error']}", fg=typer.colors.RED)
|
||||
|
||||
98
src-pybind/fourdst/cli/main.py
Normal file
98
src-pybind/fourdst/cli/main.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# fourdst/cli/main.py
|
||||
|
||||
import typer
|
||||
from pathlib import Path
|
||||
|
||||
from fourdst.cli.common.config import CACHE_PATH
|
||||
|
||||
from fourdst.cli.bundle.create import bundle_create
|
||||
from fourdst.cli.bundle.fill import bundle_fill
|
||||
from fourdst.cli.bundle.sign import bundle_sign
|
||||
from fourdst.cli.bundle.inspect import bundle_inspect
|
||||
from fourdst.cli.bundle.clear import bundle_clear
|
||||
from fourdst.cli.bundle.diff import bundle_diff
|
||||
from fourdst.cli.bundle.validate import bundle_validate
|
||||
|
||||
from fourdst.cli.plugin.init import plugin_init
|
||||
from fourdst.cli.plugin.pack import plugin_pack
|
||||
from fourdst.cli.plugin.extract import plugin_extract
|
||||
from fourdst.cli.plugin.diff import plugin_diff
|
||||
from fourdst.cli.plugin.validate import plugin_validate
|
||||
|
||||
from fourdst.cli.cache.clear import cache_clear
|
||||
|
||||
from fourdst.cli.keys.generate import keys_generate
|
||||
from fourdst.cli.keys.sync import keys_sync
|
||||
from fourdst.cli.keys.add import keys_add
|
||||
from fourdst.cli.keys.remove import keys_remove
|
||||
from fourdst.cli.keys.list import keys_list
|
||||
|
||||
from fourdst.cli.keys.remote.add import remote_add
|
||||
from fourdst.cli.keys.remote.list import remote_list
|
||||
from fourdst.cli.keys.remote.remove import remote_remove
|
||||
|
||||
|
||||
app = typer.Typer(
|
||||
name="fourdst-cli",
|
||||
help="A command-line tool for managing fourdst projects, plugins, and bundles."
|
||||
)
|
||||
|
||||
plugin_app = typer.Typer(name="plugin", help="Commands for managing individual fourdst plugins.")
|
||||
bundle_app = typer.Typer(name="bundle", help="Commands for creating, signing, and managing plugin bundles.")
|
||||
|
||||
bundle_app.command("create")(bundle_create)
|
||||
bundle_app.command("fill")(bundle_fill)
|
||||
bundle_app.command("sign")(bundle_sign)
|
||||
bundle_app.command("inspect")(bundle_inspect)
|
||||
bundle_app.command("clear")(bundle_clear)
|
||||
bundle_app.command("diff")(bundle_diff)
|
||||
bundle_app.command("validate")(bundle_validate)
|
||||
cache_app = typer.Typer(name="cache", help="Commands for managing the local cache.")
|
||||
|
||||
keys_app = typer.Typer(name="keys", help="Commands for cryptographic key generation and management.")
|
||||
remote_app = typer.Typer(name="remote", help="Manage remote git repositories for public keys.")
|
||||
|
||||
# Add commands to their respective apps
|
||||
plugin_app.command("init")(plugin_init)
|
||||
plugin_app.command("pack")(plugin_pack)
|
||||
plugin_app.command("extract")(plugin_extract)
|
||||
plugin_app.command("validate")(plugin_validate)
|
||||
plugin_app.command("diff")(plugin_diff)
|
||||
|
||||
bundle_app.command("create")(bundle_create)
|
||||
bundle_app.command("fill")(bundle_fill)
|
||||
bundle_app.command("sign")(bundle_sign)
|
||||
bundle_app.command("inspect")(bundle_inspect)
|
||||
bundle_app.command("clear")(bundle_clear)
|
||||
bundle_app.command("diff")(bundle_diff)
|
||||
bundle_app.command("validate")(bundle_validate)
|
||||
|
||||
cache_app.command("clear")(cache_clear)
|
||||
|
||||
|
||||
keys_app.add_typer(remote_app)
|
||||
|
||||
keys_app.command("generate")(keys_generate)
|
||||
keys_app.command("sync")(keys_sync)
|
||||
keys_app.command("add")(keys_add)
|
||||
keys_app.command("remove")(keys_remove)
|
||||
keys_app.command("list")(keys_list)
|
||||
|
||||
remote_app.command("add")(remote_add)
|
||||
remote_app.command("list")(remote_list)
|
||||
remote_app.command("remove")(remote_remove)
|
||||
|
||||
|
||||
# Add the sub-apps to the main app
|
||||
app.add_typer(plugin_app, name="plugin")
|
||||
app.add_typer(bundle_app, name="bundle")
|
||||
app.add_typer(keys_app, name="keys")
|
||||
app.add_typer(cache_app, name="cache")
|
||||
|
||||
def main():
|
||||
# Create config directory if it doesn't exist
|
||||
CACHE_PATH.mkdir(parents=True, exist_ok=True)
|
||||
app()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
0
src-pybind/fourdst/cli/plugin/__init__.py
Normal file
0
src-pybind/fourdst/cli/plugin/__init__.py
Normal file
59
src-pybind/fourdst/cli/plugin/diff.py
Normal file
59
src-pybind/fourdst/cli/plugin/diff.py
Normal file
@@ -0,0 +1,59 @@
|
||||
# fourdst/cli/plugin/diff.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.text import Text
|
||||
|
||||
from fourdst.core.plugin import compare_plugin_sources
|
||||
|
||||
console = Console()
|
||||
|
||||
def plugin_diff(
|
||||
plugin_name: str = typer.Argument(..., help="The name of the plugin to compare."),
|
||||
bundle_a_path: Path = typer.Argument(..., help="The first bundle to compare.", exists=True, readable=True),
|
||||
bundle_b_path: Path = typer.Argument(..., help="The second bundle to compare.", exists=True, readable=True),
|
||||
):
|
||||
"""
|
||||
Compares the source code of a specific plugin between two different bundles.
|
||||
"""
|
||||
console.print(Panel(f"Comparing source for plugin [bold blue]{plugin_name}[/bold blue] between bundles"))
|
||||
|
||||
# Compare using core function
|
||||
compare_result = compare_plugin_sources(bundle_a_path, bundle_b_path, plugin_name)
|
||||
if not compare_result['success']:
|
||||
console.print(f"[red]Error: {compare_result['error']}[/red]")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# Display results
|
||||
compare_data = compare_result['data']
|
||||
has_changes = compare_data['has_changes']
|
||||
added_files = compare_data['added_files']
|
||||
removed_files = compare_data['removed_files']
|
||||
modified_files = compare_data['modified_files']
|
||||
|
||||
if added_files:
|
||||
console.print(Panel("\n".join(f"[green]+ {f}[/green]" for f in added_files), title="[bold]Added Files[/bold]"))
|
||||
|
||||
if removed_files:
|
||||
console.print(Panel("\n".join(f"[red]- {f}[/red]" for f in removed_files), title="[bold]Removed Files[/bold]"))
|
||||
|
||||
for modified_file in modified_files:
|
||||
file_path = modified_file['file_path']
|
||||
diff_content = modified_file['diff']
|
||||
|
||||
diff_text = Text()
|
||||
for line in diff_content.splitlines(keepends=True):
|
||||
if line.startswith('+'):
|
||||
diff_text.append(line, style="green")
|
||||
elif line.startswith('-'):
|
||||
diff_text.append(line, style="red")
|
||||
else:
|
||||
diff_text.append(line)
|
||||
|
||||
console.print(Panel(diff_text, title=f"[bold yellow]Modified: {file_path}[/bold yellow]", border_style="yellow", expand=False))
|
||||
|
||||
if not has_changes:
|
||||
console.print(Panel("[green]No source code changes detected for this plugin.[/green]", title="Result"))
|
||||
else:
|
||||
console.print(f"\nFound changes in {len(modified_files)} file(s).")
|
||||
39
src-pybind/fourdst/cli/plugin/extract.py
Normal file
39
src-pybind/fourdst/cli/plugin/extract.py
Normal file
@@ -0,0 +1,39 @@
|
||||
# fourdst/cli/plugin/extract.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
|
||||
from fourdst.core.plugin import extract_plugin_from_bundle
|
||||
|
||||
def plugin_extract(
|
||||
plugin_name: str = typer.Argument(..., help="The name of the plugin to extract."),
|
||||
bundle_path: Path = typer.Argument(..., help="The path to the .fbundle file.", exists=True, readable=True),
|
||||
output_dir: Path = typer.Option(
|
||||
Path("."),
|
||||
"--out", "-o",
|
||||
help="The directory to extract the plugin source to. Defaults to the current directory.",
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
writable=True,
|
||||
resolve_path=True
|
||||
)
|
||||
):
|
||||
"""
|
||||
Extracts a plugin's source code from a bundle.
|
||||
"""
|
||||
typer.echo(f"Opening bundle: {bundle_path.name}")
|
||||
|
||||
# Extract using core function
|
||||
extract_result = extract_plugin_from_bundle(bundle_path, plugin_name, output_dir)
|
||||
if not extract_result['success']:
|
||||
typer.secho(f"Error: {extract_result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# Display results
|
||||
extract_data = extract_result['data']
|
||||
final_destination = Path(extract_data['output_path'])
|
||||
|
||||
if final_destination.exists():
|
||||
typer.secho(f"Warning: Output directory '{final_destination}' already existed. Files may have been overwritten.", fg=typer.colors.YELLOW)
|
||||
|
||||
typer.echo(f"Extracting '{plugin_name}' source to '{final_destination}'...")
|
||||
typer.secho(f"\n✅ Plugin '{plugin_name}' extracted successfully.", fg=typer.colors.GREEN)
|
||||
78
src-pybind/fourdst/cli/plugin/init.py
Normal file
78
src-pybind/fourdst/cli/plugin/init.py
Normal file
@@ -0,0 +1,78 @@
|
||||
# fourdst/cli/plugin/init.py
|
||||
|
||||
import typer
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import questionary
|
||||
|
||||
from fourdst.core.plugin import parse_cpp_interface, generate_plugin_project
|
||||
|
||||
plugin_app = typer.Typer()
|
||||
|
||||
@plugin_app.command("init")
|
||||
def plugin_init(
|
||||
project_name: str = typer.Argument(..., help="The name of the new plugin project."),
|
||||
header: Path = typer.Option(..., "--header", "-H", help="Path to the C++ header file defining the plugin interface.", exists=True, file_okay=True, dir_okay=False, readable=True),
|
||||
directory: Path = typer.Option(".", "-d", "--directory", help="The directory to create the project in.", resolve_path=True),
|
||||
version: str = typer.Option("0.1.0", "--ver", help="The initial SemVer version of the plugin."),
|
||||
libplugin_rev: str = typer.Option("main", "--libplugin-rev", help="The git revision of libplugin to use.")
|
||||
):
|
||||
"""
|
||||
Initializes a new Meson-based C++ plugin project from an interface header.
|
||||
"""
|
||||
print(f"Parsing interface header: {header.name}")
|
||||
|
||||
# Parse the C++ header using core function
|
||||
parse_result = parse_cpp_interface(header)
|
||||
if not parse_result['success']:
|
||||
print(f"Error: {parse_result['error']}", file=sys.stderr)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
interfaces = parse_result['data']
|
||||
if not interfaces:
|
||||
print(f"Error: No suitable interfaces (classes with pure virtual methods) found in {header}", file=sys.stderr)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# Display found interfaces
|
||||
for interface_name, methods in interfaces.items():
|
||||
print(f"Found interface: '{interface_name}'")
|
||||
for method in methods:
|
||||
print(f" -> Found pure virtual method: {method['signature']}")
|
||||
|
||||
# Interactive Selection
|
||||
chosen_interface = questionary.select(
|
||||
"Which interface would you like to implement?",
|
||||
choices=list(interfaces.keys())
|
||||
).ask()
|
||||
|
||||
if not chosen_interface:
|
||||
raise typer.Exit() # User cancelled
|
||||
|
||||
print(f"Initializing plugin '{project_name}' implementing interface '{chosen_interface}'...")
|
||||
|
||||
# Generate the project using core function
|
||||
config = {
|
||||
'project_name': project_name,
|
||||
'header_path': header,
|
||||
'directory': directory,
|
||||
'version': version,
|
||||
'libplugin_rev': libplugin_rev,
|
||||
'chosen_interface': chosen_interface,
|
||||
'interfaces': interfaces
|
||||
}
|
||||
|
||||
generation_result = generate_plugin_project(config)
|
||||
if not generation_result['success']:
|
||||
print(f"Error creating project structure: {generation_result['error']}", file=sys.stderr)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# Display results
|
||||
project_data = generation_result['data']
|
||||
for file_path in project_data['files_created']:
|
||||
print(f" -> Created {file_path}")
|
||||
|
||||
print("\n✅ Project initialized successfully and committed to Git!")
|
||||
print("To build your new plugin:")
|
||||
print(f" cd {project_data['project_path']}")
|
||||
print(" meson setup builddir")
|
||||
print(" meson compile -C builddir")
|
||||
57
src-pybind/fourdst/cli/plugin/pack.py
Normal file
57
src-pybind/fourdst/cli/plugin/pack.py
Normal file
@@ -0,0 +1,57 @@
|
||||
# fourdst/cli/plugin/pack.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
|
||||
from fourdst.core.plugin import validate_bundle_directory, pack_bundle_directory
|
||||
|
||||
|
||||
def plugin_pack(
|
||||
folder_path: Path = typer.Argument(..., help="The directory to pack into a bundle.", exists=True, file_okay=False, dir_okay=True, readable=True),
|
||||
name: str = typer.Option(None, "--name", "-n", help="The name for the output bundle file (without extension). Defaults to the folder name.")
|
||||
):
|
||||
"""
|
||||
Validates and packs a directory into a .fbundle archive.
|
||||
"""
|
||||
typer.echo(f"--- Validating Bundle Directory: {folder_path.resolve()} ---")
|
||||
|
||||
# Validate using core function
|
||||
validation_result = validate_bundle_directory(folder_path)
|
||||
if not validation_result['success']:
|
||||
typer.secho(f"Error during validation: {validation_result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
validation_errors = validation_result['data']['errors']
|
||||
if validation_errors:
|
||||
typer.secho("Validation Failed. The following issues were found:", fg=typer.colors.RED, bold=True)
|
||||
for error in validation_errors:
|
||||
typer.echo(f" - {error}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
typer.secho("✅ Validation Successful.", fg=typer.colors.GREEN)
|
||||
typer.echo("\n--- Packing Bundle ---")
|
||||
|
||||
output_name = name if name else folder_path.name
|
||||
if folder_path.parent.exists():
|
||||
typer.secho(f"Warning: Output file {folder_path.parent / f'{output_name}.fbundle'} will be created/overwritten.", fg=typer.colors.YELLOW)
|
||||
|
||||
# Pack using core function
|
||||
output_config = {
|
||||
'name': output_name,
|
||||
'output_dir': folder_path.parent
|
||||
}
|
||||
|
||||
pack_result = pack_bundle_directory(folder_path, output_config)
|
||||
if not pack_result['success']:
|
||||
typer.secho(f"An unexpected error occurred during packing: {pack_result['error']}", fg=typer.colors.RED)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# Display results
|
||||
pack_data = pack_result['data']
|
||||
typer.echo(f" Added {pack_data['files_packed']} files to bundle")
|
||||
typer.secho(f"\n✅ Successfully created bundle: {pack_data['output_path']}", fg=typer.colors.GREEN, bold=True)
|
||||
|
||||
# Final status report
|
||||
if pack_data['is_signed']:
|
||||
typer.secho("Bundle Status: ✅ SIGNED", fg=typer.colors.GREEN)
|
||||
else:
|
||||
typer.secho("Bundle Status: 🟡 UNSIGNED", fg=typer.colors.YELLOW)
|
||||
63
src-pybind/fourdst/cli/plugin/validate.py
Normal file
63
src-pybind/fourdst/cli/plugin/validate.py
Normal file
@@ -0,0 +1,63 @@
|
||||
# fourdst/cli/plugin/validate.py
|
||||
import typer
|
||||
from pathlib import Path
|
||||
from rich.console import Console
|
||||
from rich.panel import Panel
|
||||
from rich.text import Text
|
||||
|
||||
from fourdst.core.plugin import validate_plugin_project
|
||||
|
||||
console = Console()
|
||||
|
||||
def plugin_validate(
|
||||
plugin_path: Path = typer.Argument(
|
||||
".",
|
||||
help="The path to the plugin directory to validate.",
|
||||
exists=True,
|
||||
file_okay=False,
|
||||
dir_okay=True,
|
||||
resolve_path=True
|
||||
)
|
||||
):
|
||||
"""
|
||||
Validates a plugin's structure and meson.build file.
|
||||
"""
|
||||
console.print(Panel(f"Validating Plugin: [bold]{plugin_path.name}[/bold]", border_style="blue"))
|
||||
|
||||
# Validate using core function
|
||||
validate_result = validate_plugin_project(plugin_path)
|
||||
if not validate_result['success']:
|
||||
console.print(f"[red]Error during validation: {validate_result['error']}[/red]")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
# Display results
|
||||
validate_data = validate_result['data']
|
||||
errors = validate_data['errors']
|
||||
warnings = validate_data['warnings']
|
||||
checks = validate_data['checks']
|
||||
|
||||
# Display each check result
|
||||
for check in checks:
|
||||
if check['passed']:
|
||||
console.print(Text(f"✅ {check['message']}", style="green"))
|
||||
else:
|
||||
if check['is_warning']:
|
||||
console.print(Text(f"⚠️ {check['message']}", style="yellow"))
|
||||
else:
|
||||
console.print(Text(f"❌ {check['message']}", style="red"))
|
||||
|
||||
# Final summary
|
||||
console.print("-" * 40)
|
||||
if not errors:
|
||||
console.print(Panel(
|
||||
f"[bold green]Validation Passed[/bold green]\nWarnings: {len(warnings)}",
|
||||
title="Result",
|
||||
border_style="green"
|
||||
))
|
||||
else:
|
||||
console.print(Panel(
|
||||
f"[bold red]Validation Failed[/bold red]\nErrors: {len(errors)}\nWarnings: {len(warnings)}",
|
||||
title="Result",
|
||||
border_style="red"
|
||||
))
|
||||
raise typer.Exit(code=1)
|
||||
19
src-pybind/fourdst/cli/templates/meson.build.in
Normal file
19
src-pybind/fourdst/cli/templates/meson.build.in
Normal file
@@ -0,0 +1,19 @@
|
||||
project('{project_name}', 'cpp',
|
||||
version : '{version}',
|
||||
default_options : ['warning_level=3', 'cpp_std=c++23'])
|
||||
|
||||
# Find the fourdst-plugin dependency, falling back to the subproject if not found.
|
||||
plugin_dep = dependency('fourdst_plugin',
|
||||
required: true,
|
||||
fallback: ['libplugin', 'plugin_dep'],
|
||||
default_options: ['tests=false']
|
||||
)
|
||||
|
||||
# Define the shared library for the plugin
|
||||
shared_library('{project_name}',
|
||||
'src/{project_name}.cpp',
|
||||
dependencies : [plugin_dep],
|
||||
# Add the path to the local copy of the interface header
|
||||
include_directories: include_directories('src/include'),
|
||||
install : true,
|
||||
)
|
||||
16
src-pybind/fourdst/cli/templates/plugin.cpp.in
Normal file
16
src-pybind/fourdst/cli/templates/plugin.cpp.in
Normal file
@@ -0,0 +1,16 @@
|
||||
#include "{interface_header_path}"
|
||||
#include <fourdst/plugin/plugin.h>
|
||||
#include <iostream>
|
||||
|
||||
class {class_name} final : public {interface} {{
|
||||
public:
|
||||
using {interface}::{interface};
|
||||
~{class_name}() override {{
|
||||
// Implement any custom destruction logic here
|
||||
}}
|
||||
|
||||
// --- Implemented Abstract Methods ---
|
||||
{method_stubs}
|
||||
}};
|
||||
|
||||
FOURDST_DECLARE_PLUGIN({class_name}, "{project_name}", "0.1.0"); // Version can be static or dynamic
|
||||
0
src-pybind/fourdst/core/__init__.py
Normal file
0
src-pybind/fourdst/core/__init__.py
Normal file
222
src-pybind/fourdst/core/build.py
Normal file
222
src-pybind/fourdst/core/build.py
Normal file
@@ -0,0 +1,222 @@
|
||||
# fourdst/core/build.py
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import zipfile
|
||||
import io
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import docker
|
||||
except ImportError:
|
||||
docker = None
|
||||
|
||||
from fourdst.core.utils import run_command
|
||||
from fourdst.core.platform import get_platform_identifier, get_macos_targeted_platform_identifier
|
||||
from fourdst.core.config import CROSS_FILES_PATH, DOCKER_BUILD_IMAGES
|
||||
|
||||
def get_available_build_targets(progress_callback=None):
|
||||
"""Gets native, cross-compilation, and Docker build targets."""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
|
||||
targets = [get_platform_identifier()]
|
||||
|
||||
# Add cross-file targets
|
||||
CROSS_FILES_PATH.mkdir(exist_ok=True)
|
||||
for cross_file in CROSS_FILES_PATH.glob("*.cross"):
|
||||
triplet = cross_file.stem
|
||||
targets.append({
|
||||
"triplet": triplet,
|
||||
"abi_signature": f"cross-{triplet}",
|
||||
"is_native": False,
|
||||
"cross_file": str(cross_file.resolve()),
|
||||
"docker_image": None,
|
||||
'type': 'cross'
|
||||
})
|
||||
|
||||
# Add Docker targets if Docker is available
|
||||
if docker:
|
||||
try:
|
||||
client = docker.from_env()
|
||||
client.ping()
|
||||
for name, image in DOCKER_BUILD_IMAGES.items():
|
||||
arch = name.split(' ')[0]
|
||||
targets.append({
|
||||
"triplet": f"{arch}-linux",
|
||||
"abi_signature": f"docker-{image}",
|
||||
"is_native": False,
|
||||
"cross_file": None,
|
||||
"docker_image": image,
|
||||
"arch": arch,
|
||||
'type': 'docker'
|
||||
})
|
||||
except Exception:
|
||||
report_progress("Warning: Docker is installed but the daemon is not running. Docker targets are unavailable.")
|
||||
|
||||
return targets
|
||||
|
||||
def build_plugin_for_target(sdist_path: Path, build_dir: Path, target: dict, progress_callback=None):
|
||||
"""Builds a plugin natively or with a cross file."""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
|
||||
source_dir = build_dir / "src"
|
||||
if source_dir.exists():
|
||||
shutil.rmtree(source_dir)
|
||||
|
||||
with zipfile.ZipFile(sdist_path, 'r') as sdist_zip:
|
||||
sdist_zip.extractall(source_dir)
|
||||
|
||||
setup_cmd = ["meson", "setup"]
|
||||
if target.get("cross_file"):
|
||||
setup_cmd.extend(["--cross-file", target["cross_file"]])
|
||||
setup_cmd.append("build")
|
||||
|
||||
run_command(setup_cmd, cwd=source_dir, progress_callback=progress_callback)
|
||||
run_command(["meson", "compile", "-C", "build"], cwd=source_dir, progress_callback=progress_callback)
|
||||
|
||||
meson_build_dir = source_dir / "build"
|
||||
compiled_lib = next(meson_build_dir.rglob("lib*.so"), None) or next(meson_build_dir.rglob("lib*.dylib"), None)
|
||||
if not compiled_lib:
|
||||
raise FileNotFoundError("Could not find compiled library after build.")
|
||||
|
||||
return compiled_lib, target
|
||||
|
||||
def build_plugin_in_docker(sdist_path: Path, build_dir: Path, target: dict, plugin_name: str, progress_callback=None):
|
||||
"""Builds a plugin inside a Docker container."""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
|
||||
client = docker.from_env()
|
||||
image_name = target["docker_image"]
|
||||
|
||||
arch = target.get("arch", "unknown_arch")
|
||||
|
||||
report_progress(f" - Pulling Docker image '{image_name}' (if necessary)...")
|
||||
client.images.pull(image_name)
|
||||
|
||||
source_dir = build_dir / "src"
|
||||
if source_dir.exists():
|
||||
shutil.rmtree(source_dir)
|
||||
|
||||
with zipfile.ZipFile(sdist_path, 'r') as sdist_zip:
|
||||
sdist_zip.extractall(source_dir)
|
||||
|
||||
from fourdst.core.platform import ABI_DETECTOR_CPP_SRC, ABI_DETECTOR_MESON_SRC
|
||||
build_script = f"""
|
||||
set -e
|
||||
echo "--- Installing build dependencies ---"
|
||||
export PATH="/opt/python/cp313-cp313/bin:$PATH"
|
||||
dnf install -y openssl-devel
|
||||
pip install meson ninja cmake
|
||||
|
||||
echo "--- Configuring with Meson ---"
|
||||
meson setup /build/meson_build
|
||||
echo "--- Compiling with Meson ---"
|
||||
meson compile -C /build/meson_build
|
||||
|
||||
echo "--- Running ABI detector ---"
|
||||
mkdir -p /tmp/abi && cd /tmp/abi
|
||||
|
||||
cat > main.cpp << 'EOF'
|
||||
{ABI_DETECTOR_CPP_SRC}
|
||||
EOF
|
||||
|
||||
cat > meson.build << 'EOF'
|
||||
{ABI_DETECTOR_MESON_SRC}
|
||||
EOF
|
||||
|
||||
meson setup build && meson compile -C build
|
||||
./build/detector > /build/abi_details.txt
|
||||
"""
|
||||
|
||||
|
||||
container_build_dir = Path("/build")
|
||||
|
||||
report_progress(" - Running build container...")
|
||||
container = client.containers.run(
|
||||
image=image_name,
|
||||
command=["/bin/sh", "-c", build_script],
|
||||
volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'rw'}},
|
||||
working_dir=str(container_build_dir),
|
||||
detach=True
|
||||
)
|
||||
|
||||
for line in container.logs(stream=True, follow=True):
|
||||
report_progress(f" [docker] {line.decode('utf-8').strip()}")
|
||||
|
||||
result = container.wait()
|
||||
if result["StatusCode"] != 0:
|
||||
log_output = container.logs()
|
||||
container.remove()
|
||||
raise subprocess.CalledProcessError(result["StatusCode"], f"Build inside Docker failed. Full log:\n{log_output.decode('utf-8')}")
|
||||
|
||||
report_progress(" - Locating compiled library in container...")
|
||||
meson_build_dir_str = (container_build_dir / "meson_build").as_posix()
|
||||
expected_lib_name = f"lib{plugin_name}.so"
|
||||
|
||||
find_cmd = f"find {meson_build_dir_str} -name {expected_lib_name}"
|
||||
|
||||
find_output = client.containers.run(
|
||||
image=image_name,
|
||||
command=["/bin/sh", "-c", find_cmd],
|
||||
volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'ro'}},
|
||||
remove=True,
|
||||
detach=False
|
||||
)
|
||||
found_path_str = find_output.decode('utf-8').strip()
|
||||
if not found_path_str:
|
||||
raise FileNotFoundError(f"Could not locate '{expected_lib_name}' inside the container.")
|
||||
compiled_lib_path_in_container = Path(found_path_str)
|
||||
|
||||
# Use the tarfile module for robust extraction
|
||||
bits, _ = container.get_archive(str(container_build_dir / "abi_details.txt"))
|
||||
with tarfile.open(fileobj=io.BytesIO(b''.join(bits))) as tar:
|
||||
extracted_file = None
|
||||
for member in tar.getmembers():
|
||||
if member.isfile():
|
||||
extracted_file = tar.extractfile(member)
|
||||
break
|
||||
if not extracted_file:
|
||||
raise FileNotFoundError("Could not extract abi_details.txt from container archive.")
|
||||
abi_details_content = extracted_file.read()
|
||||
|
||||
abi_details = {}
|
||||
for line in abi_details_content.decode('utf-8').strip().split('\n'):
|
||||
if '=' in line:
|
||||
key, value = line.split('=', 1)
|
||||
abi_details[key.strip()] = value.strip()
|
||||
|
||||
compiler = abi_details.get('compiler', 'unk_compiler')
|
||||
stdlib = abi_details.get('stdlib', 'unk_stdlib')
|
||||
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
||||
abi = abi_details.get('abi', 'unk_abi')
|
||||
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
||||
|
||||
final_target = {
|
||||
"triplet": f"{arch}-{abi_details.get('os', 'linux')}",
|
||||
"abi_signature": abi_string,
|
||||
"is_native": False,
|
||||
"cross_file": None,
|
||||
"docker_image": image_name,
|
||||
"arch": arch
|
||||
}
|
||||
|
||||
local_lib_path = build_dir / compiled_lib_path_in_container.name
|
||||
bits, _ = container.get_archive(str(compiled_lib_path_in_container))
|
||||
with tarfile.open(fileobj=io.BytesIO(b''.join(bits))) as tar:
|
||||
member = tar.getmembers()[0]
|
||||
extracted_file = tar.extractfile(member)
|
||||
if not extracted_file:
|
||||
raise FileNotFoundError(f"Could not extract {local_lib_path.name} from container archive.")
|
||||
with open(local_lib_path, 'wb') as f:
|
||||
f.write(extracted_file.read())
|
||||
|
||||
container.remove()
|
||||
|
||||
return local_lib_path, final_target
|
||||
1099
src-pybind/fourdst/core/bundle.py
Normal file
1099
src-pybind/fourdst/core/bundle.py
Normal file
File diff suppressed because it is too large
Load Diff
21
src-pybind/fourdst/core/config.py
Normal file
21
src-pybind/fourdst/core/config.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# fourdst/core/config.py
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
FOURDST_CONFIG_DIR = Path.home() / ".config" / "fourdst"
|
||||
LOCAL_TRUST_STORE_PATH = FOURDST_CONFIG_DIR / "keys"
|
||||
CROSS_FILES_PATH = FOURDST_CONFIG_DIR / "cross"
|
||||
CACHE_PATH = FOURDST_CONFIG_DIR / "cache"
|
||||
ABI_CACHE_FILE = CACHE_PATH / "abi_identifier.json"
|
||||
DOCKER_BUILD_IMAGES = {
|
||||
"x86_64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_x86_64",
|
||||
"aarch64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_aarch64",
|
||||
"i686 (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_i686",
|
||||
"ppc64le (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_ppc64le",
|
||||
"s390x (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_s390x"
|
||||
}
|
||||
|
||||
# Ensure the necessary directories exist
|
||||
LOCAL_TRUST_STORE_PATH.mkdir(parents=True, exist_ok=True)
|
||||
CROSS_FILES_PATH.mkdir(parents=True, exist_ok=True)
|
||||
CACHE_PATH.mkdir(parents=True, exist_ok=True)
|
||||
746
src-pybind/fourdst/core/keys.py
Normal file
746
src-pybind/fourdst/core/keys.py
Normal file
@@ -0,0 +1,746 @@
|
||||
# fourdst/core/keys.py
|
||||
"""
|
||||
Core key management functions for 4DSTAR.
|
||||
|
||||
This module provides the core functionality for managing cryptographic keys
|
||||
used for bundle signing and verification. All key operations should go through
|
||||
these functions to maintain consistency between CLI and Electron interfaces.
|
||||
|
||||
ARCHITECTURE:
|
||||
=============
|
||||
- All functions return JSON-serializable dictionaries
|
||||
- Progress callbacks are separate from return values
|
||||
- Consistent error format: {"success": false, "error": "message"}
|
||||
- Functions handle both interactive and programmatic usage
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import hashlib
|
||||
import logging
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, Callable, List
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric import ed25519, rsa
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
|
||||
from fourdst.core.config import FOURDST_CONFIG_DIR, LOCAL_TRUST_STORE_PATH
|
||||
from fourdst.core.utils import run_command
|
||||
|
||||
# Configure logging to go to stderr only, never stdout
|
||||
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
|
||||
|
||||
# Key management paths
|
||||
MANUAL_KEYS_DIR = LOCAL_TRUST_STORE_PATH / "manual"
|
||||
REMOTES_DIR = LOCAL_TRUST_STORE_PATH / "remotes"
|
||||
KEY_REMOTES_CONFIG = FOURDST_CONFIG_DIR / "key_remotes.json"
|
||||
|
||||
|
||||
def list_keys(progress_callback: Optional[Callable] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Lists all trusted public keys organized by source.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"keys": {
|
||||
"source_name": [
|
||||
{
|
||||
"name": str,
|
||||
"path": str,
|
||||
"fingerprint": str,
|
||||
"size_bytes": int
|
||||
}
|
||||
]
|
||||
},
|
||||
"total_count": int
|
||||
}
|
||||
|
||||
On error:
|
||||
{
|
||||
"success": false,
|
||||
"error": "error message"
|
||||
}
|
||||
"""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
logging.info(message)
|
||||
|
||||
try:
|
||||
report_progress("Scanning trust store for keys...")
|
||||
|
||||
if not LOCAL_TRUST_STORE_PATH.exists():
|
||||
return {
|
||||
"success": True,
|
||||
"keys": {},
|
||||
"total_count": 0,
|
||||
"message": "Trust store not found - no keys available"
|
||||
}
|
||||
|
||||
keys_by_source = {}
|
||||
total_count = 0
|
||||
|
||||
for source_dir in LOCAL_TRUST_STORE_PATH.iterdir():
|
||||
if source_dir.is_dir():
|
||||
source_keys = []
|
||||
# Look for both .pub and .pub.pem files
|
||||
key_patterns = ["*.pub", "*.pub.pem"]
|
||||
for pattern in key_patterns:
|
||||
for key_file in source_dir.glob(pattern):
|
||||
try:
|
||||
fingerprint = _get_key_fingerprint(key_file)
|
||||
key_info = {
|
||||
"name": key_file.name,
|
||||
"path": str(key_file),
|
||||
"fingerprint": fingerprint,
|
||||
"size_bytes": key_file.stat().st_size
|
||||
}
|
||||
source_keys.append(key_info)
|
||||
total_count += 1
|
||||
except Exception as e:
|
||||
report_progress(f"Warning: Could not process key {key_file}: {e}")
|
||||
|
||||
if source_keys:
|
||||
keys_by_source[source_dir.name] = source_keys
|
||||
|
||||
report_progress(f"Found {total_count} keys across {len(keys_by_source)} sources")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"keys": keys_by_source,
|
||||
"total_count": total_count
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error listing keys")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to list keys: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def generate_key(
|
||||
key_name: str = "author_key",
|
||||
key_type: str = "ed25519",
|
||||
output_dir: Optional[Path] = None,
|
||||
progress_callback: Optional[Callable] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Generates a new Ed25519 or RSA key pair for signing bundles.
|
||||
|
||||
Args:
|
||||
key_name: Base name for the generated key files
|
||||
key_type: Type of key to generate ("ed25519" or "rsa")
|
||||
output_dir: Directory to save keys (defaults to current directory)
|
||||
progress_callback: Optional function for progress updates
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"private_key_path": str,
|
||||
"public_key_path": str,
|
||||
"openssh_public_key_path": str,
|
||||
"key_type": str,
|
||||
"fingerprint": str
|
||||
}
|
||||
|
||||
On error:
|
||||
{
|
||||
"success": false,
|
||||
"error": "error message"
|
||||
}
|
||||
"""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
logging.info(message)
|
||||
|
||||
try:
|
||||
if output_dir is None:
|
||||
output_dir = Path.cwd()
|
||||
else:
|
||||
output_dir = Path(output_dir)
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Define key file paths
|
||||
private_key_path = output_dir / f"{key_name}.pem"
|
||||
public_key_path = output_dir / f"{key_name}.pub.pem"
|
||||
openssh_public_key_path = output_dir / f"{key_name}.pub"
|
||||
|
||||
# Check if files already exist
|
||||
if private_key_path.exists() or public_key_path.exists() or openssh_public_key_path.exists():
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Key files already exist: {private_key_path.name}, {public_key_path.name}, or {openssh_public_key_path.name}"
|
||||
}
|
||||
|
||||
# Generate key based on requested type
|
||||
key_type = key_type.lower()
|
||||
if key_type == "ed25519":
|
||||
report_progress("Generating Ed25519 key pair...")
|
||||
private_key_obj = ed25519.Ed25519PrivateKey.generate()
|
||||
elif key_type == "rsa":
|
||||
report_progress("Generating RSA-2048 key pair...")
|
||||
private_key_obj = rsa.generate_private_key(public_exponent=65537, key_size=2048)
|
||||
else:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Unsupported key type: {key_type}. Supported types: ed25519, rsa"
|
||||
}
|
||||
|
||||
# Serialize private key to PEM
|
||||
report_progress("Writing private key...")
|
||||
priv_pem = private_key_obj.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
)
|
||||
private_key_path.write_bytes(priv_pem)
|
||||
|
||||
# Derive and serialize public key to PEM
|
||||
report_progress("Writing public key...")
|
||||
public_key_obj = private_key_obj.public_key()
|
||||
pub_pem = public_key_obj.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
||||
)
|
||||
public_key_path.write_bytes(pub_pem)
|
||||
|
||||
# Also write OpenSSH-compatible public key
|
||||
openssh_pub = public_key_obj.public_bytes(
|
||||
encoding=serialization.Encoding.OpenSSH,
|
||||
format=serialization.PublicFormat.OpenSSH
|
||||
)
|
||||
openssh_public_key_path.write_bytes(openssh_pub)
|
||||
|
||||
# Generate fingerprint
|
||||
fingerprint = _get_key_fingerprint(public_key_path)
|
||||
|
||||
report_progress("Key generation completed successfully!")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"private_key_path": str(private_key_path.resolve()),
|
||||
"public_key_path": str(public_key_path.resolve()),
|
||||
"openssh_public_key_path": str(openssh_public_key_path.resolve()),
|
||||
"key_type": key_type,
|
||||
"fingerprint": fingerprint,
|
||||
"message": f"Generated {key_type.upper()} key pair successfully"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error generating key")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to generate key: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def add_key(
|
||||
key_path: Path,
|
||||
progress_callback: Optional[Callable] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Adds a single public key to the local trust store.
|
||||
|
||||
Args:
|
||||
key_path: Path to the public key file to add
|
||||
progress_callback: Optional function for progress updates
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"key_name": str,
|
||||
"fingerprint": str,
|
||||
"destination_path": str,
|
||||
"already_existed": bool
|
||||
}
|
||||
|
||||
On error:
|
||||
{
|
||||
"success": false,
|
||||
"error": "error message"
|
||||
}
|
||||
"""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
logging.info(message)
|
||||
|
||||
try:
|
||||
key_path = Path(key_path)
|
||||
|
||||
if not key_path.exists():
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Key file does not exist: {key_path}"
|
||||
}
|
||||
|
||||
if not key_path.is_file():
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Path is not a file: {key_path}"
|
||||
}
|
||||
|
||||
# Ensure manual keys directory exists
|
||||
MANUAL_KEYS_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
destination = MANUAL_KEYS_DIR / key_path.name
|
||||
already_existed = False
|
||||
|
||||
if destination.exists():
|
||||
# Check if content is identical
|
||||
if destination.read_bytes() == key_path.read_bytes():
|
||||
already_existed = True
|
||||
report_progress(f"Key '{key_path.name}' already exists with identical content")
|
||||
else:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Key '{key_path.name}' already exists with different content"
|
||||
}
|
||||
else:
|
||||
report_progress(f"Adding key '{key_path.name}' to trust store...")
|
||||
shutil.copy(key_path, destination)
|
||||
|
||||
# Generate fingerprint
|
||||
fingerprint = _get_key_fingerprint(destination)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"key_name": key_path.name,
|
||||
"fingerprint": fingerprint,
|
||||
"destination_path": str(destination),
|
||||
"already_existed": already_existed,
|
||||
"message": f"Key '{key_path.name}' {'already exists in' if already_existed else 'added to'} trust store"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error adding key")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to add key: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def remove_key(
|
||||
key_identifier: str,
|
||||
progress_callback: Optional[Callable] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Removes a key from the trust store by fingerprint, name, or path.
|
||||
|
||||
Args:
|
||||
key_identifier: Key fingerprint, name, or path to identify the key to remove
|
||||
progress_callback: Optional function for progress updates
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"removed_keys": [
|
||||
{
|
||||
"name": str,
|
||||
"path": str,
|
||||
"source": str
|
||||
}
|
||||
],
|
||||
"removed_count": int
|
||||
}
|
||||
|
||||
On error:
|
||||
{
|
||||
"success": false,
|
||||
"error": "error message"
|
||||
}
|
||||
"""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
logging.info(message)
|
||||
|
||||
try:
|
||||
if not LOCAL_TRUST_STORE_PATH.exists():
|
||||
return {
|
||||
"success": False,
|
||||
"error": "Trust store not found"
|
||||
}
|
||||
|
||||
removed_keys = []
|
||||
|
||||
# Search for matching keys (same patterns as list_keys)
|
||||
for source_dir in LOCAL_TRUST_STORE_PATH.iterdir():
|
||||
if source_dir.is_dir():
|
||||
key_patterns = ["*.pub", "*.pub.pem"]
|
||||
for pattern in key_patterns:
|
||||
for key_file in source_dir.glob(pattern):
|
||||
should_remove = False
|
||||
|
||||
# Check if identifier matches fingerprint, name, or path
|
||||
try:
|
||||
fingerprint = _get_key_fingerprint(key_file)
|
||||
if (key_identifier == fingerprint or
|
||||
key_identifier == key_file.name or
|
||||
key_identifier == str(key_file) or
|
||||
key_identifier == str(key_file.resolve())):
|
||||
should_remove = True
|
||||
except Exception as e:
|
||||
report_progress(f"Warning: Could not process key {key_file}: {e}")
|
||||
continue
|
||||
|
||||
if should_remove:
|
||||
report_progress(f"Removing key '{key_file.name}' from source '{source_dir.name}'")
|
||||
removed_keys.append({
|
||||
"name": key_file.name,
|
||||
"path": str(key_file),
|
||||
"source": source_dir.name
|
||||
})
|
||||
key_file.unlink()
|
||||
|
||||
if not removed_keys:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"No matching key found for identifier: {key_identifier}"
|
||||
}
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"removed_keys": removed_keys,
|
||||
"removed_count": len(removed_keys),
|
||||
"message": f"Removed {len(removed_keys)} key(s)"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error removing key")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to remove key: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def sync_remotes(progress_callback: Optional[Callable] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Syncs the local trust store with all configured remote Git repositories.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"synced_remotes": [
|
||||
{
|
||||
"name": str,
|
||||
"url": str,
|
||||
"status": "success" | "failed",
|
||||
"error": str (if failed)
|
||||
}
|
||||
],
|
||||
"removed_remotes": [str], # Names of remotes that were removed due to failures
|
||||
"total_keys_synced": int
|
||||
}
|
||||
|
||||
On error:
|
||||
{
|
||||
"success": false,
|
||||
"error": "error message"
|
||||
}
|
||||
"""
|
||||
def report_progress(message):
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
logging.info(message)
|
||||
|
||||
try:
|
||||
if not KEY_REMOTES_CONFIG.exists():
|
||||
return {
|
||||
"success": False,
|
||||
"error": "No remotes configured. Use remote management to add remotes first."
|
||||
}
|
||||
|
||||
with open(KEY_REMOTES_CONFIG, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
remotes = config.get("remotes", [])
|
||||
if not remotes:
|
||||
return {
|
||||
"success": False,
|
||||
"error": "No remotes configured in config file"
|
||||
}
|
||||
|
||||
REMOTES_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
synced_remotes = []
|
||||
remotes_to_remove = []
|
||||
total_keys_synced = 0
|
||||
|
||||
for remote in remotes:
|
||||
name = remote['name']
|
||||
url = remote['url']
|
||||
remote_path = REMOTES_DIR / name
|
||||
|
||||
report_progress(f"Syncing remote '{name}' from {url}")
|
||||
|
||||
try:
|
||||
if remote_path.exists():
|
||||
run_command(["git", "pull"], cwd=remote_path)
|
||||
else:
|
||||
run_command(["git", "clone", "--depth", "1", url, str(remote_path)])
|
||||
|
||||
# Clean up non-public key files and count keys
|
||||
keys_count = 0
|
||||
for item in remote_path.rglob("*"):
|
||||
if item.is_file():
|
||||
if item.suffix == '.pub':
|
||||
keys_count += 1
|
||||
else:
|
||||
item.unlink()
|
||||
|
||||
total_keys_synced += keys_count
|
||||
|
||||
synced_remotes.append({
|
||||
"name": name,
|
||||
"url": url,
|
||||
"status": "success",
|
||||
"keys_count": keys_count
|
||||
})
|
||||
|
||||
report_progress(f"Successfully synced '{name}' ({keys_count} keys)")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
synced_remotes.append({
|
||||
"name": name,
|
||||
"url": url,
|
||||
"status": "failed",
|
||||
"error": error_msg
|
||||
})
|
||||
remotes_to_remove.append(name)
|
||||
report_progress(f"Failed to sync remote '{name}': {error_msg}")
|
||||
|
||||
# Remove failed remotes from config if any
|
||||
if remotes_to_remove:
|
||||
config['remotes'] = [r for r in config['remotes'] if r['name'] not in remotes_to_remove]
|
||||
with open(KEY_REMOTES_CONFIG, 'w') as f:
|
||||
json.dump(config, f, indent=2)
|
||||
|
||||
success_count = len([r for r in synced_remotes if r["status"] == "success"])
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"synced_remotes": synced_remotes,
|
||||
"removed_remotes": remotes_to_remove,
|
||||
"total_keys_synced": total_keys_synced,
|
||||
"message": f"Sync completed: {success_count} successful, {len(remotes_to_remove)} failed"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error syncing remotes")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to sync remotes: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def get_remote_sources(progress_callback: Optional[Callable] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Lists all configured remote key sources.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"remotes": [
|
||||
{
|
||||
"name": str,
|
||||
"url": str,
|
||||
"local_path": str,
|
||||
"exists": bool,
|
||||
"keys_count": int
|
||||
}
|
||||
]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
if not KEY_REMOTES_CONFIG.exists():
|
||||
return {
|
||||
"success": True,
|
||||
"remotes": [],
|
||||
"message": "No remotes configured"
|
||||
}
|
||||
|
||||
with open(KEY_REMOTES_CONFIG, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
remotes_info = []
|
||||
for remote in config.get("remotes", []):
|
||||
remote_path = REMOTES_DIR / remote['name']
|
||||
keys_count = len(list(remote_path.glob("*.pub"))) if remote_path.exists() else 0
|
||||
|
||||
remotes_info.append({
|
||||
"name": remote['name'],
|
||||
"url": remote['url'],
|
||||
"local_path": str(remote_path),
|
||||
"exists": remote_path.exists(),
|
||||
"keys_count": keys_count
|
||||
})
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"remotes": remotes_info
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error getting remote sources")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to get remote sources: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def add_remote_source(
|
||||
name: str,
|
||||
url: str,
|
||||
progress_callback: Optional[Callable] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Adds a new remote key source.
|
||||
|
||||
Args:
|
||||
name: Name for the remote source
|
||||
url: Git repository URL
|
||||
progress_callback: Optional function for progress updates
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"name": str,
|
||||
"url": str,
|
||||
"message": str
|
||||
}
|
||||
"""
|
||||
try:
|
||||
FOURDST_CONFIG_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Load existing config or create new one
|
||||
config = {"remotes": []}
|
||||
if KEY_REMOTES_CONFIG.exists():
|
||||
with open(KEY_REMOTES_CONFIG, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Check if remote already exists
|
||||
for remote in config.get("remotes", []):
|
||||
if remote['name'] == name:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Remote '{name}' already exists"
|
||||
}
|
||||
|
||||
# Add new remote
|
||||
config.setdefault("remotes", []).append({
|
||||
"name": name,
|
||||
"url": url
|
||||
})
|
||||
|
||||
# Save config
|
||||
with open(KEY_REMOTES_CONFIG, 'w') as f:
|
||||
json.dump(config, f, indent=2)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"name": name,
|
||||
"url": url,
|
||||
"message": f"Remote '{name}' added successfully"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error adding remote source")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to add remote source: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def remove_remote_source(
|
||||
name: str,
|
||||
progress_callback: Optional[Callable] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Removes a remote key source.
|
||||
|
||||
Args:
|
||||
name: Name of the remote source to remove
|
||||
progress_callback: Optional function for progress updates
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"name": str,
|
||||
"message": str
|
||||
}
|
||||
"""
|
||||
try:
|
||||
if not KEY_REMOTES_CONFIG.exists():
|
||||
return {
|
||||
"success": False,
|
||||
"error": "No remotes configured"
|
||||
}
|
||||
|
||||
with open(KEY_REMOTES_CONFIG, 'r') as f:
|
||||
config = json.load(f)
|
||||
|
||||
original_len = len(config.get("remotes", []))
|
||||
config["remotes"] = [r for r in config.get("remotes", []) if r['name'] != name]
|
||||
|
||||
if len(config["remotes"]) == original_len:
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Remote '{name}' not found"
|
||||
}
|
||||
|
||||
# Save updated config
|
||||
with open(KEY_REMOTES_CONFIG, 'w') as f:
|
||||
json.dump(config, f, indent=2)
|
||||
|
||||
# Remove local directory if it exists
|
||||
remote_path = REMOTES_DIR / name
|
||||
if remote_path.exists():
|
||||
shutil.rmtree(remote_path)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"name": name,
|
||||
"message": f"Remote '{name}' removed successfully"
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error removing remote source")
|
||||
return {
|
||||
"success": False,
|
||||
"error": f"Failed to remove remote source: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def _get_key_fingerprint(key_path: Path) -> str:
|
||||
"""
|
||||
Generates a SHA256 fingerprint for a public key.
|
||||
|
||||
Args:
|
||||
key_path: Path to the public key file
|
||||
|
||||
Returns:
|
||||
SHA256 fingerprint in format "sha256:hexdigest"
|
||||
"""
|
||||
pub_key_bytes = key_path.read_bytes()
|
||||
return "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest()
|
||||
363
src-pybind/fourdst/core/platform.py
Normal file
363
src-pybind/fourdst/core/platform.py
Normal file
@@ -0,0 +1,363 @@
|
||||
# fourdst/core/platform.py
|
||||
|
||||
import json
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
from fourdst.core.config import ABI_CACHE_FILE, CACHE_PATH
|
||||
from fourdst.core.utils import run_command
|
||||
|
||||
ABI_DETECTOR_CPP_SRC = """
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#ifdef __GNUC__
|
||||
#if __has_include(<gnu/libc-version.h>)
|
||||
#include <gnu/libc-version.h>
|
||||
#endif
|
||||
#endif
|
||||
|
||||
int main() {
|
||||
std::string os;
|
||||
std::string compiler;
|
||||
std::string compiler_version;
|
||||
std::string stdlib;
|
||||
std::string stdlib_version;
|
||||
std::string abi;
|
||||
|
||||
#if defined(__APPLE__) && defined(__MACH__)
|
||||
os = "macos";
|
||||
#elif defined(__linux__)
|
||||
os = "linux";
|
||||
#elif defined(_WIN32)
|
||||
os = "windows";
|
||||
#else
|
||||
os = "unknown_os";
|
||||
#endif
|
||||
|
||||
#if defined(__clang__)
|
||||
compiler = "clang";
|
||||
compiler_version = __clang_version__;
|
||||
#elif defined(__GNUC__)
|
||||
compiler = "gcc";
|
||||
compiler_version = std::to_string(__GNUC__) + "." + std::to_string(__GNUC_MINOR__) + "." + std::to_string(__GNUC_PATCHLEVEL__);
|
||||
#elif defined(_MSC_VER)
|
||||
compiler = "msvc";
|
||||
compiler_version = std::to_string(_MSC_VER);
|
||||
#else
|
||||
compiler = "unknown_compiler";
|
||||
compiler_version = "0";
|
||||
#endif
|
||||
|
||||
#if defined(_LIBCPP_VERSION)
|
||||
stdlib = "libc++";
|
||||
stdlib_version = std::to_string(_LIBCPP_VERSION);
|
||||
abi = "libc++_abi"; // On libc++, the ABI is tightly coupled with the library itself.
|
||||
#elif defined(__GLIBCXX__)
|
||||
stdlib = "libstdc++";
|
||||
#if defined(_GLIBCXX_USE_CXX11_ABI)
|
||||
abi = _GLIBCXX_USE_CXX11_ABI == 1 ? "cxx11_abi" : "pre_cxx11_abi";
|
||||
#else
|
||||
abi = "pre_cxx11_abi";
|
||||
#endif
|
||||
#if __has_include(<gnu/libc-version.h>)
|
||||
stdlib_version = gnu_get_libc_version();
|
||||
#else
|
||||
stdlib_version = "unknown";
|
||||
#endif
|
||||
#else
|
||||
stdlib = "unknown_stdlib";
|
||||
abi = "unknown_abi";
|
||||
#endif
|
||||
|
||||
std::cout << "os=" << os << std::endl;
|
||||
std::cout << "compiler=" << compiler << std::endl;
|
||||
std::cout << "compiler_version=" << compiler_version << std::endl;
|
||||
std::cout << "stdlib=" << stdlib << std::endl;
|
||||
if (!stdlib_version.empty()) {
|
||||
std::cout << "stdlib_version=" << stdlib_version << std::endl;
|
||||
}
|
||||
// Always print the ABI key for consistent parsing
|
||||
std::cout << "abi=" << abi << std::endl;
|
||||
|
||||
return 0;
|
||||
}
|
||||
"""
|
||||
|
||||
ABI_DETECTOR_MESON_SRC = """
|
||||
project('abi-detector', 'cpp', default_options : ['cpp_std=c++23'])
|
||||
executable('detector', 'main.cpp')
|
||||
"""
|
||||
|
||||
def _detect_and_cache_abi() -> dict:
|
||||
"""
|
||||
Compiles and runs a C++ program to detect the compiler ABI, then caches it.
|
||||
Falls back to platform-based detection if meson is not available (e.g., in packaged apps).
|
||||
"""
|
||||
import sys
|
||||
import logging
|
||||
|
||||
# Use logging instead of print to avoid stdout contamination
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Performing one-time native C++ ABI detection...")
|
||||
|
||||
# Check if meson is available
|
||||
meson_available = shutil.which("meson") is not None
|
||||
|
||||
if not meson_available:
|
||||
logger.warning("Meson not available, using fallback platform detection")
|
||||
return _fallback_platform_detection()
|
||||
|
||||
temp_dir = CACHE_PATH / "abi_detector"
|
||||
if temp_dir.exists():
|
||||
shutil.rmtree(temp_dir)
|
||||
temp_dir.mkdir(parents=True)
|
||||
|
||||
try:
|
||||
(temp_dir / "main.cpp").write_text(ABI_DETECTOR_CPP_SRC)
|
||||
(temp_dir / "meson.build").write_text(ABI_DETECTOR_MESON_SRC)
|
||||
|
||||
logger.info(" - Configuring detector...")
|
||||
run_command(["meson", "setup", "build"], cwd=temp_dir)
|
||||
logger.info(" - Compiling detector...")
|
||||
run_command(["meson", "compile", "-C", "build"], cwd=temp_dir)
|
||||
|
||||
detector_exe = temp_dir / "build" / "detector"
|
||||
logger.info(" - Running detector...")
|
||||
proc = subprocess.run([str(detector_exe)], check=True, capture_output=True, text=True)
|
||||
|
||||
abi_details = {}
|
||||
for line in proc.stdout.strip().split('\n'):
|
||||
if '=' in line:
|
||||
key, value = line.split('=', 1)
|
||||
abi_details[key] = value.strip()
|
||||
|
||||
arch = platform.machine()
|
||||
stdlib_version = abi_details.get('stdlib_version', 'unknown')
|
||||
abi_string = f"{abi_details['compiler']}-{abi_details['stdlib']}-{stdlib_version}-{abi_details['abi']}"
|
||||
|
||||
platform_data = {
|
||||
"os": abi_details['os'],
|
||||
"arch": arch,
|
||||
"triplet": f"{arch}-{abi_details['os']}",
|
||||
"abi_signature": abi_string,
|
||||
"details": abi_details,
|
||||
"is_native": True,
|
||||
"cross_file": None,
|
||||
"docker_image": None
|
||||
}
|
||||
|
||||
with open(ABI_CACHE_FILE, 'w') as f:
|
||||
json.dump(platform_data, f, indent=4)
|
||||
|
||||
logger.info(f" - ABI details cached to {ABI_CACHE_FILE}")
|
||||
return platform_data
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"ABI detection failed: {e}, falling back to platform detection")
|
||||
return _fallback_platform_detection()
|
||||
finally:
|
||||
if temp_dir.exists():
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def _fallback_platform_detection() -> dict:
|
||||
"""
|
||||
Fallback platform detection that doesn't require external tools.
|
||||
Used when meson is not available (e.g., in packaged applications).
|
||||
"""
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Using fallback platform detection (no external tools required)")
|
||||
|
||||
# Get basic platform information
|
||||
arch = platform.machine()
|
||||
system = platform.system().lower()
|
||||
|
||||
# Map common architectures
|
||||
arch_mapping = {
|
||||
'x86_64': 'x86_64',
|
||||
'AMD64': 'x86_64',
|
||||
'arm64': 'aarch64',
|
||||
'aarch64': 'aarch64',
|
||||
'i386': 'i686',
|
||||
'i686': 'i686'
|
||||
}
|
||||
normalized_arch = arch_mapping.get(arch, arch)
|
||||
|
||||
# Detect compiler and stdlib based on platform
|
||||
if system == 'darwin':
|
||||
# macOS
|
||||
os_name = 'darwin'
|
||||
compiler = 'clang'
|
||||
stdlib = 'libc++'
|
||||
# Get macOS version for stdlib version
|
||||
mac_version = platform.mac_ver()[0]
|
||||
stdlib_version = mac_version.split('.')[0] if mac_version else 'unknown'
|
||||
abi = 'cxx11'
|
||||
elif system == 'linux':
|
||||
# Linux
|
||||
os_name = 'linux'
|
||||
# Try to detect if we're using GCC or Clang
|
||||
compiler = 'gcc' # Default assumption
|
||||
stdlib = 'libstdc++'
|
||||
stdlib_version = '11' # Common default
|
||||
abi = 'cxx11'
|
||||
elif system == 'windows':
|
||||
# Windows
|
||||
os_name = 'windows'
|
||||
compiler = 'msvc'
|
||||
stdlib = 'msvcrt'
|
||||
stdlib_version = 'unknown'
|
||||
abi = 'cxx11'
|
||||
else:
|
||||
# Unknown system
|
||||
os_name = system
|
||||
compiler = 'unknown'
|
||||
stdlib = 'unknown'
|
||||
stdlib_version = 'unknown'
|
||||
abi = 'unknown'
|
||||
|
||||
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
||||
|
||||
platform_data = {
|
||||
"os": os_name,
|
||||
"arch": normalized_arch,
|
||||
"triplet": f"{normalized_arch}-{os_name}",
|
||||
"abi_signature": abi_string,
|
||||
"details": {
|
||||
"compiler": compiler,
|
||||
"stdlib": stdlib,
|
||||
"stdlib_version": stdlib_version,
|
||||
"abi": abi,
|
||||
"os": os_name,
|
||||
"detection_method": "fallback"
|
||||
},
|
||||
"is_native": True,
|
||||
"cross_file": None,
|
||||
"docker_image": None
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
try:
|
||||
CACHE_PATH.mkdir(parents=True, exist_ok=True)
|
||||
with open(ABI_CACHE_FILE, 'w') as f:
|
||||
json.dump(platform_data, f, indent=4)
|
||||
logger.info(f"Fallback platform data cached to {ABI_CACHE_FILE}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cache platform data: {e}")
|
||||
|
||||
return platform_data
|
||||
|
||||
|
||||
def get_platform_identifier() -> dict:
|
||||
"""
|
||||
Gets the native platform identifier, using a cached value if available.
|
||||
"""
|
||||
if ABI_CACHE_FILE.exists():
|
||||
with open(ABI_CACHE_FILE, 'r') as f:
|
||||
plat = json.load(f)
|
||||
else:
|
||||
plat = _detect_and_cache_abi()
|
||||
plat['type'] = 'native'
|
||||
return plat
|
||||
|
||||
def _parse_version(version_str: str) -> tuple:
|
||||
"""Parses a version string like '12.3.1' into a tuple of integers."""
|
||||
return tuple(map(int, (version_str.split('.') + ['0', '0'])[:3]))
|
||||
|
||||
def is_abi_compatible(host_platform: dict, binary_platform: dict) -> tuple[bool, str]:
|
||||
"""
|
||||
Checks if a binary's platform is compatible with the host's platform.
|
||||
This is more nuanced than a simple string comparison, allowing for forward compatibility.
|
||||
- macOS: A binary for an older OS version can run on a newer one, if the toolchain matches.
|
||||
- Linux: A binary for an older GLIBC version can run on a newer one.
|
||||
"""
|
||||
required_keys = ['os', 'arch', 'abi_signature']
|
||||
if not all(key in host_platform for key in required_keys):
|
||||
return False, f"Host platform data is malformed. Missing keys: {[k for k in required_keys if k not in host_platform]}"
|
||||
if not all(key in binary_platform for key in required_keys):
|
||||
return False, f"Binary platform data is malformed. Missing keys: {[k for k in required_keys if k not in binary_platform]}"
|
||||
|
||||
host_os = host_platform.get('os') or host_platform.get('details', {}).get('os')
|
||||
binary_os = binary_platform.get('os') or binary_platform.get('details', {}).get('os')
|
||||
host_arch = host_platform.get('arch') or host_platform.get('details', {}).get('arch')
|
||||
binary_arch = binary_platform.get('arch') or binary_platform.get('details', {}).get('arch')
|
||||
|
||||
if host_os != binary_os:
|
||||
return False, f"OS mismatch: host is {host_os}, binary is {binary_os}"
|
||||
if host_arch != binary_arch:
|
||||
return False, f"Architecture mismatch: host is {host_arch}, binary is {binary_arch}"
|
||||
|
||||
host_sig = host_platform['abi_signature']
|
||||
binary_sig = binary_platform['abi_signature']
|
||||
|
||||
try:
|
||||
host_parts = host_sig.split('-')
|
||||
binary_parts = binary_sig.split('-')
|
||||
|
||||
# Find version numbers in any position
|
||||
host_ver_str = next((p for p in host_parts if p[0].isdigit()), None)
|
||||
binary_ver_str = next((p for p in binary_parts if p[0].isdigit()), None)
|
||||
|
||||
if not host_ver_str or not binary_ver_str:
|
||||
return False, "Could not extract version from ABI signature"
|
||||
|
||||
host_ver = _parse_version(host_ver_str)
|
||||
binary_ver = _parse_version(binary_ver_str)
|
||||
|
||||
if host_platform['os'] == 'macos':
|
||||
# For macOS, also check for clang and libc++
|
||||
if 'clang' not in binary_sig:
|
||||
return False, "Toolchain mismatch: 'clang' not in binary signature"
|
||||
if 'libc++' not in binary_sig:
|
||||
return False, "Toolchain mismatch: 'libc++' not in binary signature"
|
||||
if host_ver < binary_ver:
|
||||
return False, f"macOS version too old: host is {host_ver_str}, binary needs {binary_ver_str}"
|
||||
return True, "Compatible"
|
||||
|
||||
elif host_platform['os'] == 'linux':
|
||||
if host_ver < binary_ver:
|
||||
return False, f"GLIBC version too old: host is {host_ver_str}, binary needs {binary_ver_str}"
|
||||
return True, "Compatible"
|
||||
|
||||
except (IndexError, ValueError, StopIteration):
|
||||
return False, "Malformed ABI signature string"
|
||||
|
||||
return False, "Unknown compatibility check failure"
|
||||
|
||||
def get_macos_targeted_platform_identifier(target_version: str) -> dict:
|
||||
"""
|
||||
Generates a platform identifier for a specific target macOS version.
|
||||
"""
|
||||
host_platform = get_platform_identifier()
|
||||
host_details = host_platform['details']
|
||||
|
||||
compiler = host_details.get('compiler', 'clang')
|
||||
stdlib = host_details.get('stdlib', 'libc++')
|
||||
abi = host_details.get('abi', 'libc++_abi')
|
||||
arch = platform.machine()
|
||||
|
||||
abi_string = f"{compiler}-{stdlib}-{target_version}-{abi}"
|
||||
|
||||
return {
|
||||
"triplet": f"{arch}-macos",
|
||||
"abi_signature": abi_string,
|
||||
"details": {
|
||||
"os": "macos",
|
||||
"compiler": compiler,
|
||||
"compiler_version": host_details.get('compiler_version'),
|
||||
"stdlib": stdlib,
|
||||
"stdlib_version": target_version,
|
||||
"abi": abi,
|
||||
},
|
||||
"is_native": True,
|
||||
"cross_file": None,
|
||||
"docker_image": None,
|
||||
"arch": arch
|
||||
}
|
||||
649
src-pybind/fourdst/core/plugin.py
Normal file
649
src-pybind/fourdst/core/plugin.py
Normal file
@@ -0,0 +1,649 @@
|
||||
# fourdst/core/plugin.py
|
||||
|
||||
import yaml
|
||||
import zipfile
|
||||
import shutil
|
||||
import tempfile
|
||||
import difflib
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import logging
|
||||
|
||||
from fourdst.cli.common.utils import calculate_sha256, run_command, get_template_content
|
||||
from fourdst.cli.common.templates import GITIGNORE_CONTENT
|
||||
|
||||
|
||||
def parse_cpp_interface(header_path: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Parses a C++ header file using libclang to find classes and their pure virtual methods.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"data": {
|
||||
"interface_name": [
|
||||
{"signature": str, "body": str},
|
||||
...
|
||||
]
|
||||
},
|
||||
"error": str (if success=False)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Import libclang
|
||||
try:
|
||||
from clang import cindex
|
||||
except ImportError:
|
||||
return {
|
||||
'success': False,
|
||||
'error': "The 'init' command requires 'libclang'. Please install it with: pip install python-clang-16"
|
||||
}
|
||||
|
||||
if not cindex.Config.loaded:
|
||||
try:
|
||||
cindex.Config.set_library_file(cindex.conf.get_filename())
|
||||
except cindex.LibclangError as e:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"libclang library not found. Please ensure it's installed and in your system's path. Details: {e}"
|
||||
}
|
||||
|
||||
index = cindex.Index.create()
|
||||
args = ['-x', 'c++', '-std=c++17']
|
||||
translation_unit = index.parse(str(header_path), args=args)
|
||||
|
||||
if not translation_unit:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unable to parse the translation unit {header_path}"
|
||||
}
|
||||
|
||||
interfaces = {}
|
||||
|
||||
def walk_ast(node):
|
||||
if node.kind == cindex.CursorKind.CLASS_DECL and node.is_definition():
|
||||
pv_methods = [m for m in node.get_children()
|
||||
if m.kind == cindex.CursorKind.CXX_METHOD and m.is_pure_virtual_method()]
|
||||
|
||||
if pv_methods:
|
||||
interface_name = node.spelling
|
||||
methods = []
|
||||
for method in pv_methods:
|
||||
args_str = ', '.join([arg.type.spelling for arg in method.get_arguments()])
|
||||
sig = f"{method.result_type.spelling} {method.spelling}({args_str})"
|
||||
|
||||
if method.is_const_method():
|
||||
sig += " const"
|
||||
|
||||
methods.append({
|
||||
"signature": sig,
|
||||
"body": " // TODO: Implement this method"
|
||||
})
|
||||
|
||||
interfaces[interface_name] = methods
|
||||
|
||||
for child in node.get_children():
|
||||
walk_ast(child)
|
||||
|
||||
walk_ast(translation_unit.cursor)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'data': interfaces
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error parsing C++ header {header_path}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unexpected error: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def generate_plugin_project(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Generates a new plugin project from configuration.
|
||||
|
||||
Args:
|
||||
config: {
|
||||
"project_name": str,
|
||||
"header_path": Path,
|
||||
"directory": Path,
|
||||
"version": str,
|
||||
"libplugin_rev": str,
|
||||
"chosen_interface": str,
|
||||
"interfaces": dict # from parse_cpp_interface
|
||||
}
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"data": {
|
||||
"project_path": str,
|
||||
"files_created": [str, ...]
|
||||
},
|
||||
"error": str (if success=False)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
project_name = config['project_name']
|
||||
header_path = Path(config['header_path']) # Convert string to Path object
|
||||
directory = Path(config['directory']) # Convert string to Path object
|
||||
version = config['version']
|
||||
libplugin_rev = config['libplugin_rev']
|
||||
chosen_interface = config['chosen_interface']
|
||||
interfaces = config['interfaces']
|
||||
|
||||
# Generate method stubs
|
||||
method_stubs = "\n".join(
|
||||
f" {method['signature']} override {{\n{method['body']}\n }}"
|
||||
for method in interfaces[chosen_interface]
|
||||
)
|
||||
|
||||
class_name = ''.join(filter(str.isalnum, project_name.replace('_', ' ').title().replace(' ', ''))) + "Plugin"
|
||||
root_path = directory / project_name
|
||||
src_path = root_path / "src"
|
||||
include_path = src_path / "include"
|
||||
subprojects_path = root_path / "subprojects"
|
||||
|
||||
files_created = []
|
||||
|
||||
# Create directory structure
|
||||
src_path.mkdir(parents=True, exist_ok=True)
|
||||
include_path.mkdir(exist_ok=True)
|
||||
subprojects_path.mkdir(exist_ok=True)
|
||||
|
||||
# Copy interface header
|
||||
local_header_path = include_path / header_path.name
|
||||
shutil.copy(header_path, local_header_path)
|
||||
files_created.append(str(local_header_path.relative_to(root_path)))
|
||||
|
||||
# Create libplugin.wrap file
|
||||
libplugin_wrap_content = f"""[wrap-git]
|
||||
url = https://github.com/4D-STAR/libplugin
|
||||
revision = {libplugin_rev}
|
||||
depth = 1
|
||||
"""
|
||||
wrap_file = subprojects_path / "libplugin.wrap"
|
||||
wrap_file.write_text(libplugin_wrap_content)
|
||||
files_created.append(str(wrap_file.relative_to(root_path)))
|
||||
|
||||
# Create meson.build from template
|
||||
meson_template = get_template_content("meson.build.in")
|
||||
meson_content = meson_template.format(
|
||||
project_name=project_name,
|
||||
version=version
|
||||
)
|
||||
meson_file = root_path / "meson.build"
|
||||
meson_file.write_text(meson_content)
|
||||
files_created.append(str(meson_file.relative_to(root_path)))
|
||||
|
||||
# Create C++ source file from template
|
||||
cpp_template = get_template_content("plugin.cpp.in")
|
||||
cpp_content = cpp_template.format(
|
||||
class_name=class_name,
|
||||
project_name=project_name,
|
||||
interface=chosen_interface,
|
||||
interface_header_path=header_path.name,
|
||||
method_stubs=method_stubs
|
||||
)
|
||||
cpp_file = src_path / f"{project_name}.cpp"
|
||||
cpp_file.write_text(cpp_content)
|
||||
files_created.append(str(cpp_file.relative_to(root_path)))
|
||||
|
||||
# Create .gitignore
|
||||
gitignore_file = root_path / ".gitignore"
|
||||
gitignore_file.write_text(GITIGNORE_CONTENT)
|
||||
files_created.append(str(gitignore_file.relative_to(root_path)))
|
||||
|
||||
# Initialize Git Repository
|
||||
run_command(["git", "init"], cwd=root_path)
|
||||
run_command(["git", "add", "."], cwd=root_path)
|
||||
commit_message = f"Initial commit: Scaffold fourdst plugin '{project_name}'"
|
||||
run_command(["git", "commit", "-m", commit_message], cwd=root_path)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'project_path': str(root_path),
|
||||
'files_created': files_created
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error generating plugin project")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unexpected error: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def validate_bundle_directory(directory: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Validates that a directory has the structure of a valid bundle.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"data": {
|
||||
"errors": [str, ...],
|
||||
"is_signed": bool
|
||||
},
|
||||
"error": str (if success=False)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
errors = []
|
||||
manifest_path = directory / "manifest.yaml"
|
||||
|
||||
if not manifest_path.is_file():
|
||||
errors.append("Missing 'manifest.yaml' in the root of the directory.")
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'errors': errors,
|
||||
'is_signed': False
|
||||
}
|
||||
}
|
||||
|
||||
try:
|
||||
with open(manifest_path, 'r') as f:
|
||||
manifest = yaml.safe_load(f)
|
||||
except yaml.YAMLError as e:
|
||||
errors.append(f"Invalid YAML in manifest.yaml: {e}")
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'errors': errors,
|
||||
'is_signed': False
|
||||
}
|
||||
}
|
||||
|
||||
# Check that all files referenced in the manifest exist
|
||||
for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items():
|
||||
sdist_info = plugin_data.get('sdist', {})
|
||||
if sdist_info:
|
||||
sdist_path = sdist_info.get('path')
|
||||
if sdist_path and not (directory / sdist_path).is_file():
|
||||
errors.append(f"Missing sdist file for '{plugin_name}': {sdist_path}")
|
||||
|
||||
for binary in plugin_data.get('binaries', []):
|
||||
binary_path = binary.get('path')
|
||||
if binary_path and not (directory / binary_path).is_file():
|
||||
errors.append(f"Missing binary file for '{plugin_name}': {binary_path}")
|
||||
|
||||
# If checksums exist, validate them
|
||||
expected_checksum = binary.get('checksum')
|
||||
if binary_path and expected_checksum:
|
||||
file_to_check = directory / binary_path
|
||||
if file_to_check.is_file():
|
||||
actual_checksum = "sha256:" + calculate_sha256(file_to_check)
|
||||
if actual_checksum != expected_checksum:
|
||||
errors.append(f"Checksum mismatch for '{binary_path}'")
|
||||
|
||||
# Check if bundle is signed
|
||||
is_signed = ('bundleAuthorKeyFingerprint' in manifest and
|
||||
(directory / "manifest.sig").exists())
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'errors': errors,
|
||||
'is_signed': is_signed
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error validating bundle directory {directory}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unexpected error: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def pack_bundle_directory(directory: Path, output_config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Packs a directory into a .fbundle archive.
|
||||
|
||||
Args:
|
||||
directory: Path to directory to pack
|
||||
output_config: {
|
||||
"name": str (optional, defaults to directory name),
|
||||
"output_dir": Path (optional, defaults to directory.parent)
|
||||
}
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"data": {
|
||||
"output_path": str,
|
||||
"is_signed": bool,
|
||||
"files_packed": int
|
||||
},
|
||||
"error": str (if success=False)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# First validate the directory
|
||||
validation_result = validate_bundle_directory(directory)
|
||||
if not validation_result['success']:
|
||||
return validation_result
|
||||
|
||||
if validation_result['data']['errors']:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Validation failed: {'; '.join(validation_result['data']['errors'])}"
|
||||
}
|
||||
|
||||
output_name = output_config.get('name', directory.name)
|
||||
output_dir = output_config.get('output_dir', directory.parent)
|
||||
output_path = output_dir / f"{output_name}.fbundle"
|
||||
|
||||
files_packed = 0
|
||||
with zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip:
|
||||
for file_to_add in directory.rglob('*'):
|
||||
if file_to_add.is_file():
|
||||
arcname = file_to_add.relative_to(directory)
|
||||
bundle_zip.write(file_to_add, arcname)
|
||||
files_packed += 1
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'output_path': str(output_path.resolve()),
|
||||
'is_signed': validation_result['data']['is_signed'],
|
||||
'files_packed': files_packed
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error packing bundle directory {directory}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unexpected error: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def extract_plugin_from_bundle(bundle_path: Path, plugin_name: str, output_path: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Extracts a plugin's source code from a bundle.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"data": {
|
||||
"output_path": str,
|
||||
"plugin_info": dict
|
||||
},
|
||||
"error": str (if success=False)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
output_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir_str:
|
||||
temp_dir = Path(temp_dir_str)
|
||||
|
||||
# Unpack the main bundle
|
||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
||||
bundle_zip.extractall(temp_dir)
|
||||
|
||||
# Read the manifest
|
||||
manifest_path = temp_dir / "manifest.yaml"
|
||||
if not manifest_path.exists():
|
||||
return {
|
||||
'success': False,
|
||||
'error': "Bundle is invalid. Missing manifest.yaml."
|
||||
}
|
||||
|
||||
with open(manifest_path, 'r') as f:
|
||||
manifest = yaml.safe_load(f)
|
||||
|
||||
# Find the plugin and its sdist
|
||||
plugin_data = manifest.get('bundlePlugins', {}).get(plugin_name)
|
||||
if not plugin_data:
|
||||
available_plugins = list(manifest.get('bundlePlugins', {}).keys())
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Plugin '{plugin_name}' not found in the bundle. Available plugins: {', '.join(available_plugins) if available_plugins else 'none'}"
|
||||
}
|
||||
|
||||
sdist_info = plugin_data.get('sdist')
|
||||
if not sdist_info or 'path' not in sdist_info:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Source distribution (sdist) not found for plugin '{plugin_name}'."
|
||||
}
|
||||
|
||||
sdist_path_in_bundle = temp_dir / sdist_info['path']
|
||||
if not sdist_path_in_bundle.is_file():
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"sdist file '{sdist_info['path']}' is missing from the bundle archive."
|
||||
}
|
||||
|
||||
# Extract the sdist to the final output directory
|
||||
final_destination = output_path / plugin_name
|
||||
final_destination.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with zipfile.ZipFile(sdist_path_in_bundle, 'r') as sdist_zip:
|
||||
sdist_zip.extractall(final_destination)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'output_path': str(final_destination.resolve()),
|
||||
'plugin_info': plugin_data
|
||||
}
|
||||
}
|
||||
|
||||
except zipfile.BadZipFile:
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"'{bundle_path}' is not a valid bundle (zip) file."
|
||||
}
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error extracting plugin {plugin_name} from {bundle_path}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unexpected error: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def compare_plugin_sources(bundle_a_path: Path, bundle_b_path: Path, plugin_name: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Compares the source code of a specific plugin between two different bundles.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"data": {
|
||||
"has_changes": bool,
|
||||
"added_files": [str, ...],
|
||||
"removed_files": [str, ...],
|
||||
"modified_files": [
|
||||
{
|
||||
"file_path": str,
|
||||
"diff": str
|
||||
},
|
||||
...
|
||||
]
|
||||
},
|
||||
"error": str (if success=False)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
def extract_sdist(bundle_path: Path, plugin_name: str, temp_dir: Path):
|
||||
"""Helper function to extract sdist from bundle."""
|
||||
sdist_extract_path = temp_dir / f"{plugin_name}_src"
|
||||
|
||||
with tempfile.TemporaryDirectory() as bundle_unpack_dir_str:
|
||||
bundle_unpack_dir = Path(bundle_unpack_dir_str)
|
||||
|
||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
||||
bundle_zip.extractall(bundle_unpack_dir)
|
||||
|
||||
manifest_path = bundle_unpack_dir / "manifest.yaml"
|
||||
if not manifest_path.exists():
|
||||
raise FileNotFoundError("manifest.yaml not found in bundle.")
|
||||
|
||||
with open(manifest_path, 'r') as f:
|
||||
manifest = yaml.safe_load(f)
|
||||
|
||||
plugin_data = manifest.get('bundlePlugins', {}).get(plugin_name)
|
||||
if not plugin_data or 'sdist' not in plugin_data:
|
||||
raise FileNotFoundError(f"Plugin '{plugin_name}' or its sdist not found in {bundle_path.name}.")
|
||||
|
||||
sdist_path_in_bundle = bundle_unpack_dir / plugin_data['sdist']['path']
|
||||
if not sdist_path_in_bundle.exists():
|
||||
raise FileNotFoundError(f"sdist archive '{plugin_data['sdist']['path']}' not found in bundle.")
|
||||
|
||||
with zipfile.ZipFile(sdist_path_in_bundle, 'r') as sdist_zip:
|
||||
sdist_zip.extractall(sdist_extract_path)
|
||||
|
||||
return sdist_extract_path
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_a_str, tempfile.TemporaryDirectory() as temp_b_str:
|
||||
try:
|
||||
src_a_path = extract_sdist(bundle_a_path, plugin_name, Path(temp_a_str))
|
||||
src_b_path = extract_sdist(bundle_b_path, plugin_name, Path(temp_b_str))
|
||||
except FileNotFoundError as e:
|
||||
return {
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
files_a = {p.relative_to(src_a_path) for p in src_a_path.rglob('*') if p.is_file()}
|
||||
files_b = {p.relative_to(src_b_path) for p in src_b_path.rglob('*') if p.is_file()}
|
||||
|
||||
added_files = list(sorted(files_b - files_a))
|
||||
removed_files = list(sorted(files_a - files_b))
|
||||
common_files = files_a & files_b
|
||||
|
||||
modified_files = []
|
||||
for file_rel_path in sorted(list(common_files)):
|
||||
content_a = (src_a_path / file_rel_path).read_text()
|
||||
content_b = (src_b_path / file_rel_path).read_text()
|
||||
|
||||
if content_a != content_b:
|
||||
diff = ''.join(difflib.unified_diff(
|
||||
content_a.splitlines(keepends=True),
|
||||
content_b.splitlines(keepends=True),
|
||||
fromfile=f"a/{file_rel_path}",
|
||||
tofile=f"b/{file_rel_path}",
|
||||
))
|
||||
modified_files.append({
|
||||
'file_path': str(file_rel_path),
|
||||
'diff': diff
|
||||
})
|
||||
|
||||
has_changes = bool(added_files or removed_files or modified_files)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'has_changes': has_changes,
|
||||
'added_files': [str(f) for f in added_files],
|
||||
'removed_files': [str(f) for f in removed_files],
|
||||
'modified_files': modified_files
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error comparing plugin {plugin_name} between bundles")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unexpected error: {str(e)}"
|
||||
}
|
||||
|
||||
|
||||
def validate_plugin_project(project_path: Path) -> Dict[str, Any]:
|
||||
"""
|
||||
Validates a plugin's structure and meson.build file.
|
||||
|
||||
Returns:
|
||||
Dict with structure:
|
||||
{
|
||||
"success": bool,
|
||||
"data": {
|
||||
"errors": [str, ...],
|
||||
"warnings": [str, ...],
|
||||
"checks": [
|
||||
{
|
||||
"name": str,
|
||||
"passed": bool,
|
||||
"is_warning": bool,
|
||||
"message": str
|
||||
},
|
||||
...
|
||||
]
|
||||
},
|
||||
"error": str (if success=False)
|
||||
}
|
||||
"""
|
||||
try:
|
||||
# Convert string path to Path object if needed
|
||||
if isinstance(project_path, str):
|
||||
project_path = Path(project_path)
|
||||
|
||||
errors = []
|
||||
warnings = []
|
||||
checks = []
|
||||
|
||||
def check(condition, name, success_msg, error_msg, is_warning=False):
|
||||
passed = bool(condition)
|
||||
checks.append({
|
||||
'name': name,
|
||||
'passed': passed,
|
||||
'is_warning': is_warning,
|
||||
'message': success_msg if passed else error_msg
|
||||
})
|
||||
|
||||
if not passed:
|
||||
if is_warning:
|
||||
warnings.append(error_msg)
|
||||
else:
|
||||
errors.append(error_msg)
|
||||
|
||||
return passed
|
||||
|
||||
# Check for meson.build
|
||||
meson_file = project_path / "meson.build"
|
||||
meson_content = ""
|
||||
if check(meson_file.exists(), "meson_build_exists", "Found meson.build file.", "Missing meson.build file."):
|
||||
meson_content = meson_file.read_text()
|
||||
# Check for project() definition
|
||||
check("project(" in meson_content, "has_project_definition", "Contains project() definition.", "meson.build is missing a project() definition.", is_warning=True)
|
||||
# Check for shared_library()
|
||||
check("shared_library(" in meson_content, "has_shared_library", "Contains shared_library() definition.", "meson.build does not appear to define a shared_library().")
|
||||
|
||||
# Check for source files
|
||||
has_cpp = any(project_path.rglob("*.cpp"))
|
||||
has_h = any(project_path.rglob("*.h")) or any(project_path.rglob("*.hpp"))
|
||||
check(has_cpp, "has_cpp_files", "Found C++ source files (.cpp).", "No .cpp source files found in the directory.", is_warning=True)
|
||||
check(has_h, "has_header_files", "Found C++ header files (.h/.hpp).", "No .h or .hpp header files found in the directory.", is_warning=True)
|
||||
|
||||
# Check for test definition (optional)
|
||||
check("test(" in meson_content, "has_tests", "Contains test() definitions.", "No test() definitions found in meson.build. Consider adding tests.", is_warning=True)
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'data': {
|
||||
'errors': errors,
|
||||
'warnings': warnings,
|
||||
'checks': checks
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(f"Unexpected error validating plugin project {project_path}")
|
||||
return {
|
||||
'success': False,
|
||||
'error': f"Unexpected error: {str(e)}"
|
||||
}
|
||||
47
src-pybind/fourdst/core/utils.py
Normal file
47
src-pybind/fourdst/core/utils.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# fourdst/core/utils.py
|
||||
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
import hashlib
|
||||
|
||||
def run_command(command: list[str], cwd: Path = None, check=True, progress_callback=None, input: bytes = None, env: dict = None, binary_output: bool = False):
|
||||
"""Runs a command, optionally reporting progress and using a custom environment."""
|
||||
command_str = ' '.join(command)
|
||||
if progress_callback:
|
||||
progress_callback(f"Running command: {command_str}")
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command,
|
||||
check=check,
|
||||
capture_output=True,
|
||||
text=not binary_output,
|
||||
input=input,
|
||||
cwd=cwd,
|
||||
env=env
|
||||
)
|
||||
|
||||
if progress_callback and result.stdout:
|
||||
if binary_output:
|
||||
progress_callback(f" - STDOUT: <binary data>")
|
||||
else:
|
||||
progress_callback(f" - STDOUT: {result.stdout.strip()}")
|
||||
if progress_callback and result.stderr:
|
||||
progress_callback(f" - STDERR: {result.stderr.strip()}")
|
||||
|
||||
return result
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_message = f"""Command '{command_str}' failed with exit code {e.returncode}.\n--- STDOUT ---\n{e.stdout.strip()}\n--- STDERR ---\n{e.stderr.strip()}\n"""
|
||||
if progress_callback:
|
||||
progress_callback(error_message)
|
||||
if check:
|
||||
raise Exception(error_message) from e
|
||||
return e
|
||||
|
||||
def calculate_sha256(file_path: Path) -> str:
|
||||
"""Calculates the SHA256 checksum of a file."""
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(file_path, "rb") as f:
|
||||
for byte_block in iter(lambda: f.read(4096), b""):
|
||||
sha256_hash.update(byte_block)
|
||||
return sha256_hash.hexdigest()
|
||||
Reference in New Issue
Block a user