feat(toolbox): ui update
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -97,3 +97,5 @@ output/
|
|||||||
.idea/
|
.idea/
|
||||||
|
|
||||||
scratch/
|
scratch/
|
||||||
|
|
||||||
|
node_modules/
|
||||||
|
|||||||
@@ -97,3 +97,22 @@ py_installation.install_sources(
|
|||||||
),
|
),
|
||||||
subdir: 'fourdst/cli/plugin'
|
subdir: 'fourdst/cli/plugin'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
py_installation.install_sources(
|
||||||
|
files(
|
||||||
|
meson.project_source_root() + '/fourdst/core/__init__.py',
|
||||||
|
meson.project_source_root() + '/fourdst/core/build.py',
|
||||||
|
meson.project_source_root() + '/fourdst/core/bundle.py',
|
||||||
|
meson.project_source_root() + '/fourdst/core/config.py',
|
||||||
|
meson.project_source_root() + '/fourdst/core/platform.py',
|
||||||
|
meson.project_source_root() + '/fourdst/core/utils.py'
|
||||||
|
),
|
||||||
|
subdir: 'fourdst/core'
|
||||||
|
)
|
||||||
|
|
||||||
|
py_installation.install_sources(
|
||||||
|
files(
|
||||||
|
meson.project_source_root() + '/electron/bridge.py',
|
||||||
|
),
|
||||||
|
subdir: 'fourdst/electron'
|
||||||
|
)
|
||||||
|
|||||||
127
electron/bridge.py
Normal file
127
electron/bridge.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Electron Bridge Script for 4DSTAR Bundle Management
|
||||||
|
|
||||||
|
UPDATED ARCHITECTURE (2025-08-09):
|
||||||
|
=====================================
|
||||||
|
|
||||||
|
This bridge script has been simplified to work with the refactored core functions
|
||||||
|
that now return JSON directly. No more complex stdout mixing or data wrapping.
|
||||||
|
|
||||||
|
Key Changes:
|
||||||
|
- Core functions return JSON-serializable dictionaries directly
|
||||||
|
- Progress messages go to stderr only (never mixed with JSON output)
|
||||||
|
- Clean JSON output to stdout for Electron to parse
|
||||||
|
- Simplified error handling with consistent JSON error format
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import inspect
|
||||||
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# Custom JSON encoder to handle Path and datetime objects
|
||||||
|
class FourdstEncoder(json.JSONEncoder):
|
||||||
|
def default(self, o):
|
||||||
|
if isinstance(o, Path):
|
||||||
|
return str(o)
|
||||||
|
if isinstance(o, (datetime.datetime, datetime.date)):
|
||||||
|
return o.isoformat()
|
||||||
|
return super().default(o)
|
||||||
|
|
||||||
|
# Add the project root to the Python path to allow importing 'fourdst'
|
||||||
|
project_root = Path(__file__).resolve().parent.parent
|
||||||
|
sys.path.insert(0, str(project_root))
|
||||||
|
|
||||||
|
from fourdst.core import bundle
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Use stderr for all logging to avoid interfering with JSON output on stdout
|
||||||
|
log_file = sys.stderr
|
||||||
|
print("--- Python backend bridge started ---", file=log_file, flush=True)
|
||||||
|
|
||||||
|
if len(sys.argv) < 3:
|
||||||
|
print(f"FATAL: Not enough arguments provided. Got {len(sys.argv)}. Exiting.", file=log_file, flush=True)
|
||||||
|
# Return JSON error even for argument errors
|
||||||
|
error_response = {
|
||||||
|
'success': False,
|
||||||
|
'error': f'Invalid arguments. Expected: <command> <json_args>. Got {len(sys.argv)} args.'
|
||||||
|
}
|
||||||
|
print(json.dumps(error_response), flush=True)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
command = sys.argv[1]
|
||||||
|
args_json = sys.argv[2]
|
||||||
|
print(f"[BRIDGE_INFO] Received command: {command}", file=log_file, flush=True)
|
||||||
|
print(f"[BRIDGE_INFO] Received raw args: {args_json}", file=log_file, flush=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
kwargs = json.loads(args_json)
|
||||||
|
print(f"[BRIDGE_INFO] Parsed kwargs: {kwargs}", file=log_file, flush=True)
|
||||||
|
|
||||||
|
# Convert path strings to Path objects where needed
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
if isinstance(value, str) and ('path' in key.lower() or 'key' in key.lower()):
|
||||||
|
kwargs[key] = Path(value)
|
||||||
|
elif isinstance(value, list) and 'dirs' in key.lower():
|
||||||
|
kwargs[key] = [Path(p) for p in value]
|
||||||
|
|
||||||
|
func = getattr(bundle, command)
|
||||||
|
|
||||||
|
# Create progress callback that sends structured progress to stderr
|
||||||
|
# This keeps progress separate from the final JSON result on stdout
|
||||||
|
def progress_callback(message):
|
||||||
|
# Progress goes to stderr to avoid mixing with JSON output
|
||||||
|
if isinstance(message, dict):
|
||||||
|
# Structured progress message (e.g., from fill_bundle)
|
||||||
|
progress_msg = f"[PROGRESS] {json.dumps(message)}"
|
||||||
|
else:
|
||||||
|
# Simple string message
|
||||||
|
progress_msg = f"[PROGRESS] {message}"
|
||||||
|
print(progress_msg, file=log_file, flush=True)
|
||||||
|
|
||||||
|
# Inspect the function signature to see if it accepts 'progress_callback'.
|
||||||
|
sig = inspect.signature(func)
|
||||||
|
if 'progress_callback' in sig.parameters:
|
||||||
|
kwargs['progress_callback'] = progress_callback
|
||||||
|
|
||||||
|
print(f"[BRIDGE_INFO] Calling function `bundle.{command}`...", file=log_file, flush=True)
|
||||||
|
result = func(**kwargs)
|
||||||
|
print(f"[BRIDGE_INFO] Function returned successfully.", file=log_file, flush=True)
|
||||||
|
|
||||||
|
# Core functions now return JSON-serializable dictionaries directly
|
||||||
|
# No need for wrapping or complex data transformation
|
||||||
|
if result is None:
|
||||||
|
# Fallback for functions that might still return None
|
||||||
|
result = {
|
||||||
|
'success': True,
|
||||||
|
'message': f'{command} completed successfully.'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Send the result directly as JSON to stdout
|
||||||
|
print("[BRIDGE_INFO] Sending JSON response to stdout.", file=log_file, flush=True)
|
||||||
|
json_response = json.dumps(result, cls=FourdstEncoder)
|
||||||
|
print(json_response, flush=True)
|
||||||
|
print("--- Python backend bridge finished successfully ---", file=log_file, flush=True)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Get the full traceback for detailed debugging
|
||||||
|
tb_str = traceback.format_exc()
|
||||||
|
# Print the traceback to stderr so it appears in the terminal
|
||||||
|
print(f"[BRIDGE_ERROR] Exception occurred: {tb_str}", file=sys.stderr, flush=True)
|
||||||
|
|
||||||
|
# Send consistent JSON error response to stdout
|
||||||
|
error_response = {
|
||||||
|
'success': False,
|
||||||
|
'error': f'Bridge error in {command}: {str(e)}',
|
||||||
|
'traceback': tb_str # Include traceback for debugging
|
||||||
|
}
|
||||||
|
json_response = json.dumps(error_response, cls=FourdstEncoder)
|
||||||
|
print(json_response, flush=True)
|
||||||
|
print("--- Python backend bridge finished with error ---", file=sys.stderr, flush=True)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
52
electron/fourdst-backend.spec
Normal file
52
electron/fourdst-backend.spec
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# -*- mode: python ; coding: utf-8 -*-
|
||||||
|
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# This is a PyInstaller spec file. It is used to bundle the Python backend
|
||||||
|
# into a single executable that can be shipped with the Electron app.
|
||||||
|
|
||||||
|
# The project_root is the 'fourdst/' directory that contains 'electron/', 'fourdst/', etc.
|
||||||
|
# SPECPATH is a variable provided by PyInstaller that contains the absolute path
|
||||||
|
# to the directory containing the spec file.
|
||||||
|
project_root = Path(SPECPATH).parent
|
||||||
|
|
||||||
|
# We need to add the project root to the path so that PyInstaller can find the 'fourdst' module.
|
||||||
|
sys.path.insert(0, str(project_root))
|
||||||
|
|
||||||
|
# The main script to be bundled.
|
||||||
|
analysis = Analysis(['bridge.py'],
|
||||||
|
pathex=[str(project_root)],
|
||||||
|
binaries=[],
|
||||||
|
# Add any modules that PyInstaller might not find automatically.
|
||||||
|
hiddenimports=['docker'],
|
||||||
|
hookspath=[],
|
||||||
|
runtime_hooks=[],
|
||||||
|
excludes=[],
|
||||||
|
win_no_prefer_redirects=False,
|
||||||
|
win_private_assemblies=False,
|
||||||
|
cipher=None,
|
||||||
|
noarchive=False)
|
||||||
|
|
||||||
|
pyz = PYZ(analysis.pure, analysis.zipped_data,
|
||||||
|
cipher=None)
|
||||||
|
|
||||||
|
exe = EXE(pyz,
|
||||||
|
analysis.scripts,
|
||||||
|
[],
|
||||||
|
exclude_binaries=True,
|
||||||
|
name='fourdst-backend',
|
||||||
|
debug=False,
|
||||||
|
bootloader_ignore_signals=False,
|
||||||
|
strip=False,
|
||||||
|
upx=True,
|
||||||
|
console=True )
|
||||||
|
|
||||||
|
coll = COLLECT(exe,
|
||||||
|
analysis.binaries,
|
||||||
|
analysis.zipfiles,
|
||||||
|
analysis.datas,
|
||||||
|
strip=False,
|
||||||
|
upx=True,
|
||||||
|
upx_exclude=[],
|
||||||
|
name='fourdst-backend')
|
||||||
104
electron/index.html
Normal file
104
electron/index.html
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>4DSTAR Bundle Manager</title>
|
||||||
|
<link rel="stylesheet" href="styles.css">
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class="main-container">
|
||||||
|
<aside class="sidebar">
|
||||||
|
<div class="sidebar-header">
|
||||||
|
<h3>4DSTAR</h3>
|
||||||
|
</div>
|
||||||
|
<nav class="sidebar-nav">
|
||||||
|
<button id="open-bundle-btn" class="nav-button active">Open Bundle</button>
|
||||||
|
<button id="create-bundle-btn" class="nav-button">Create Bundle</button>
|
||||||
|
</nav>
|
||||||
|
<div class="sidebar-footer">
|
||||||
|
<p>v1.0.0</p>
|
||||||
|
</div>
|
||||||
|
</aside>
|
||||||
|
|
||||||
|
<main class="content-area">
|
||||||
|
<div id="welcome-screen">
|
||||||
|
<h1>Welcome to 4DSTAR Bundle Manager</h1>
|
||||||
|
<p>Open or create a bundle to get started.</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="bundle-view" class="hidden">
|
||||||
|
<header class="content-header">
|
||||||
|
<h2 id="bundle-title"></h2>
|
||||||
|
<div class="action-buttons">
|
||||||
|
<button id="edit-bundle-btn">Edit</button>
|
||||||
|
<button id="sign-bundle-btn">Sign</button>
|
||||||
|
<button id="validate-bundle-btn">Validate</button>
|
||||||
|
<button id="fill-bundle-btn">Fill</button>
|
||||||
|
<button id="clear-bundle-btn">Clear</button>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<div class="tab-nav">
|
||||||
|
<button class="tab-link active" data-tab="overview-tab">Overview</button>
|
||||||
|
<button class="tab-link" data-tab="plugins-tab">Plugins</button>
|
||||||
|
<button class="tab-link" data-tab="validation-tab" class="hidden">Validation</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="tab-content">
|
||||||
|
<div id="overview-tab" class="tab-pane active">
|
||||||
|
<div class="action-buttons">
|
||||||
|
<button id="sign-bundle-button" class="action-button">Sign Bundle</button>
|
||||||
|
<button id="validate-bundle-button" class="action-button">Validate Bundle</button>
|
||||||
|
<button id="fill-bundle-button" class="action-button">Fill Bundle...</button>
|
||||||
|
<button id="clear-bundle-button" class="action-button">Clear Binaries</button>
|
||||||
|
</div>
|
||||||
|
<div id="manifest-details"></div>
|
||||||
|
</div>
|
||||||
|
<div id="plugins-tab" class="tab-pane">
|
||||||
|
<div id="plugins-list"></div>
|
||||||
|
</div>
|
||||||
|
<div id="validation-tab" class="tab-pane">
|
||||||
|
<pre id="validation-results"></pre>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="create-bundle-form" class="hidden">
|
||||||
|
<!-- The create form will be moved into a modal later -->
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Modal for status/error messages -->
|
||||||
|
<div id="modal" class="modal-container hidden">
|
||||||
|
<div class="modal-content">
|
||||||
|
<span id="modal-close-btn" class="modal-close">×</span>
|
||||||
|
<h3 id="modal-title"></h3>
|
||||||
|
<div id="modal-message"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<!-- Fill Modal -->
|
||||||
|
<div id="fill-modal" class="modal">
|
||||||
|
<div class="modal-content">
|
||||||
|
<span class="close-fill-modal-button">×</span>
|
||||||
|
<h2 id="fill-modal-title">Fill Bundle</h2>
|
||||||
|
<div id="fill-modal-body">
|
||||||
|
<p>Select targets to build and add to the bundle:</p>
|
||||||
|
<div id="fill-targets-list"></div>
|
||||||
|
<button id="start-fill-button" class="action-button">Start Fill</button>
|
||||||
|
</div>
|
||||||
|
<div id="fill-progress-view" style="display: none;">
|
||||||
|
<h3>Fill Progress:</h3>
|
||||||
|
<div id="fill-progress-list"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="spinner" class="spinner hidden"></div>
|
||||||
|
|
||||||
|
<script src="renderer.js"></script>
|
||||||
|
</body>
|
||||||
|
|
||||||
|
</html>
|
||||||
299
electron/main.js
Normal file
299
electron/main.js
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
const { app, BrowserWindow, ipcMain, dialog, nativeTheme } = require('electron');
|
||||||
|
const path = require('path');
|
||||||
|
const fs = require('fs-extra');
|
||||||
|
const yaml = require('js-yaml');
|
||||||
|
const AdmZip = require('adm-zip');
|
||||||
|
const { spawn } = require('child_process');
|
||||||
|
|
||||||
|
// Handle creating/removing shortcuts on Windows when installing/uninstalling.
|
||||||
|
if (require('electron-squirrel-startup')) {
|
||||||
|
app.quit();
|
||||||
|
}
|
||||||
|
|
||||||
|
let mainWindow;
|
||||||
|
|
||||||
|
const createWindow = () => {
|
||||||
|
// Create the browser window.
|
||||||
|
mainWindow = new BrowserWindow({
|
||||||
|
width: 1200,
|
||||||
|
height: 800,
|
||||||
|
webPreferences: {
|
||||||
|
nodeIntegration: true,
|
||||||
|
contextIsolation: false,
|
||||||
|
enableRemoteModule: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// and load the index.html of the app.
|
||||||
|
mainWindow.loadFile(path.join(__dirname, 'index.html'));
|
||||||
|
|
||||||
|
// Open the DevTools for debugging
|
||||||
|
// mainWindow.webContents.openDevTools();
|
||||||
|
|
||||||
|
nativeTheme.on('updated', () => {
|
||||||
|
if (mainWindow) {
|
||||||
|
mainWindow.webContents.send('theme-updated', { shouldUseDarkColors: nativeTheme.shouldUseDarkColors });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// This method will be called when Electron has finished
|
||||||
|
// initialization and is ready to create browser windows.
|
||||||
|
// Some APIs can only be used after this event occurs.
|
||||||
|
app.on('ready', createWindow);
|
||||||
|
|
||||||
|
// Quit when all windows are closed, except on macOS. There, it's common
|
||||||
|
// for applications and their menu bar to stay active until the user quits
|
||||||
|
// explicitly with Cmd + Q.
|
||||||
|
ipcMain.handle('get-dark-mode', () => {
|
||||||
|
return nativeTheme.shouldUseDarkColors;
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.on('show-error-dialog', (event, { title, content }) => {
|
||||||
|
dialog.showErrorBox(title, content);
|
||||||
|
});
|
||||||
|
|
||||||
|
app.on('window-all-closed', () => {
|
||||||
|
if (process.platform !== 'darwin') {
|
||||||
|
app.quit();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
app.on('activate', () => {
|
||||||
|
// On OS X it's common to re-create a window in the app when the
|
||||||
|
// dock icon is clicked and there are no other windows open.
|
||||||
|
if (BrowserWindow.getAllWindows().length === 0) {
|
||||||
|
createWindow();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// IPC handlers
|
||||||
|
ipcMain.handle('select-file', async () => {
|
||||||
|
const result = await dialog.showOpenDialog({
|
||||||
|
properties: ['openFile'],
|
||||||
|
filters: [
|
||||||
|
{ name: 'Fbundle Archives', extensions: ['fbundle'] },
|
||||||
|
{ name: 'All Files', extensions: ['*'] }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.canceled && result.filePaths.length > 0) {
|
||||||
|
return result.filePaths[0];
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('select-directory', async () => {
|
||||||
|
const result = await dialog.showOpenDialog({
|
||||||
|
properties: ['openDirectory']
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.canceled && result.filePaths.length > 0) {
|
||||||
|
return result.filePaths[0];
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('select-save-file', async () => {
|
||||||
|
const result = await dialog.showSaveDialog({
|
||||||
|
filters: [
|
||||||
|
{ name: 'Fbundle Archives', extensions: ['fbundle'] }
|
||||||
|
]
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.canceled) {
|
||||||
|
return result.filePath;
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Helper function to run python commands via the bundled backend
|
||||||
|
function runPythonCommand(command, kwargs, event) {
|
||||||
|
const buildDir = path.resolve(__dirname, '..', 'build');
|
||||||
|
let backendPath;
|
||||||
|
if (app.isPackaged) {
|
||||||
|
backendPath = path.join(process.resourcesPath, 'fourdst-backend');
|
||||||
|
} else {
|
||||||
|
backendPath = path.join(buildDir, 'electron', 'dist', 'fourdst-backend', 'fourdst-backend');
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`[MAIN_PROCESS] Spawning backend: ${backendPath}`);
|
||||||
|
const args = [command, JSON.stringify(kwargs)];
|
||||||
|
console.log(`[MAIN_PROCESS] With args: [${args.join(', ')}]`);
|
||||||
|
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const process = spawn(backendPath, args);
|
||||||
|
let stdoutBuffer = '';
|
||||||
|
let errorOutput = '';
|
||||||
|
|
||||||
|
process.stderr.on('data', (data) => {
|
||||||
|
errorOutput += data.toString();
|
||||||
|
console.error('Backend STDERR:', data.toString().trim());
|
||||||
|
});
|
||||||
|
|
||||||
|
const isStreaming = command === 'fill_bundle';
|
||||||
|
|
||||||
|
process.stdout.on('data', (data) => {
|
||||||
|
const chunk = data.toString();
|
||||||
|
stdoutBuffer += chunk;
|
||||||
|
|
||||||
|
if (isStreaming && event) {
|
||||||
|
// Process buffer line by line for streaming commands
|
||||||
|
let newlineIndex;
|
||||||
|
while ((newlineIndex = stdoutBuffer.indexOf('\n')) >= 0) {
|
||||||
|
const line = stdoutBuffer.substring(0, newlineIndex).trim();
|
||||||
|
stdoutBuffer = stdoutBuffer.substring(newlineIndex + 1);
|
||||||
|
|
||||||
|
if (line) {
|
||||||
|
try {
|
||||||
|
const parsed = JSON.parse(line);
|
||||||
|
if (parsed.type === 'progress') {
|
||||||
|
event.sender.send('fill-bundle-progress', parsed.data);
|
||||||
|
} else {
|
||||||
|
// Not a progress update, put it back in the buffer for final processing
|
||||||
|
stdoutBuffer = line + '\n' + stdoutBuffer;
|
||||||
|
break; // Stop processing lines
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore parsing errors for intermediate lines in a stream
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('close', (code) => {
|
||||||
|
console.log(`[MAIN_PROCESS] Backend process exited with code ${code}`);
|
||||||
|
let resultData = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Core functions now return clean JSON directly
|
||||||
|
const finalJson = JSON.parse(stdoutBuffer.trim());
|
||||||
|
resultData = finalJson; // Use the JSON response directly
|
||||||
|
} catch (e) {
|
||||||
|
console.error(`[MAIN_PROCESS] Could not parse backend output as JSON: ${e}`);
|
||||||
|
console.error(`[MAIN_PROCESS] Raw output: "${stdoutBuffer}"`);
|
||||||
|
// If parsing fails, return a structured error response
|
||||||
|
resultData = {
|
||||||
|
success: false,
|
||||||
|
error: `JSON parsing failed: ${e.message}`,
|
||||||
|
raw_output: stdoutBuffer
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const finalError = errorOutput.trim();
|
||||||
|
if (finalError && !resultData) {
|
||||||
|
resolve({ success: false, error: finalError });
|
||||||
|
} else if (resultData) {
|
||||||
|
resolve(resultData);
|
||||||
|
} else {
|
||||||
|
const errorMessage = finalError || `The script finished without returning a result (exit code: ${code})`;
|
||||||
|
resolve({ success: false, error: errorMessage });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('error', (err) => {
|
||||||
|
resolve({ success: false, error: `Failed to start backend process: ${err.message}` });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
ipcMain.handle('create-bundle', async (event, bundleData) => {
|
||||||
|
const kwargs = {
|
||||||
|
plugin_dirs: bundleData.pluginDirs,
|
||||||
|
output_path: bundleData.outputPath,
|
||||||
|
bundle_name: bundleData.bundleName,
|
||||||
|
bundle_version: bundleData.bundleVersion,
|
||||||
|
bundle_author: bundleData.bundleAuthor,
|
||||||
|
bundle_comment: bundleData.bundleComment,
|
||||||
|
};
|
||||||
|
|
||||||
|
const result = await runPythonCommand('create_bundle', kwargs, event);
|
||||||
|
|
||||||
|
// The renderer expects a 'path' property on success
|
||||||
|
if (result.success) {
|
||||||
|
result.path = bundleData.outputPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('sign-bundle', async (event, bundlePath) => {
|
||||||
|
// Prompt for private key
|
||||||
|
const result = await dialog.showOpenDialog({
|
||||||
|
properties: ['openFile'],
|
||||||
|
title: 'Select Private Key',
|
||||||
|
filters: [{ name: 'PEM Private Key', extensions: ['pem'] }],
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.canceled || !result.filePaths || result.filePaths.length === 0) {
|
||||||
|
return { success: false, error: 'Private key selection was canceled.' };
|
||||||
|
}
|
||||||
|
|
||||||
|
const privateKeyPath = result.filePaths[0];
|
||||||
|
|
||||||
|
const kwargs = {
|
||||||
|
bundle_path: bundlePath,
|
||||||
|
private_key: privateKeyPath,
|
||||||
|
};
|
||||||
|
|
||||||
|
return runPythonCommand('sign_bundle', kwargs, event);
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('validate-bundle', async (event, bundlePath) => {
|
||||||
|
const kwargs = {
|
||||||
|
bundle_path: bundlePath
|
||||||
|
};
|
||||||
|
return runPythonCommand('validate_bundle', kwargs, event);
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('clear-bundle', async (event, bundlePath) => {
|
||||||
|
const kwargs = { bundle_path: bundlePath };
|
||||||
|
return runPythonCommand('clear_bundle', kwargs, event);
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('get-fillable-targets', async (event, bundlePath) => {
|
||||||
|
const kwargs = { bundle_path: bundlePath };
|
||||||
|
return runPythonCommand('get_fillable_targets', kwargs, event);
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('fill-bundle', async (event, { bundlePath, targetsToBuild }) => {
|
||||||
|
const kwargs = {
|
||||||
|
bundle_path: bundlePath,
|
||||||
|
targets_to_build: targetsToBuild
|
||||||
|
};
|
||||||
|
|
||||||
|
// Pass event to stream progress
|
||||||
|
return runPythonCommand('fill_bundle', kwargs, event);
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('edit-bundle', async (event, { bundlePath, updatedManifest }) => {
|
||||||
|
const kwargs = {
|
||||||
|
bundle_path: bundlePath,
|
||||||
|
metadata: updatedManifest
|
||||||
|
};
|
||||||
|
return runPythonCommand('edit_bundle_metadata', kwargs, event);
|
||||||
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('open-bundle', async (event, bundlePath) => {
|
||||||
|
console.log(`[IPC_HANDLER] Opening bundle: ${bundlePath}`);
|
||||||
|
const kwargs = { bundle_path: bundlePath };
|
||||||
|
const result = await runPythonCommand('inspect_bundle', kwargs, event);
|
||||||
|
|
||||||
|
console.log(`[IPC_HANDLER] inspect_bundle result:`, result);
|
||||||
|
|
||||||
|
// Core functions now return consistent JSON structure directly
|
||||||
|
if (result && result.success) {
|
||||||
|
// The core inspect_bundle function returns the data directly
|
||||||
|
// We just need to add the bundlePath for the renderer
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
manifest: result.manifest,
|
||||||
|
report: result, // The entire result is the report
|
||||||
|
bundlePath: bundlePath
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return error as-is since it's already in the correct format
|
||||||
|
return result || { success: false, error: 'An unknown error occurred while opening the bundle.' };
|
||||||
|
});
|
||||||
3873
electron/package-lock.json
generated
Normal file
3873
electron/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
58
electron/package.json
Normal file
58
electron/package.json
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
{
|
||||||
|
"name": "fourdst-bundle-manager",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Electron app for managing fbundle archives",
|
||||||
|
"main": "main.js",
|
||||||
|
"scripts": {
|
||||||
|
"start": "electron .",
|
||||||
|
"dev": "electron .",
|
||||||
|
"build": "electron-builder",
|
||||||
|
"pack": "electron-builder --dir"
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/tboudreaux/fourdst"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"Electron",
|
||||||
|
"fbundle",
|
||||||
|
"4DSTAR"
|
||||||
|
],
|
||||||
|
"author": "4DSTAR Team",
|
||||||
|
"license": "MIT",
|
||||||
|
"devDependencies": {
|
||||||
|
"electron": "^31.0.2",
|
||||||
|
"adm-zip": "^0.5.14",
|
||||||
|
"electron-builder": "^24.0.0",
|
||||||
|
"electron-squirrel-startup": "^1.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"fs-extra": "^11.0.0",
|
||||||
|
"js-yaml": "^4.1.0",
|
||||||
|
"adm-zip": "^0.5.14",
|
||||||
|
"@electron/remote": "^2.0.0",
|
||||||
|
"python-shell": "^5.0.0"
|
||||||
|
},
|
||||||
|
"build": {
|
||||||
|
"appId": "com.fourdst.bundlemanager",
|
||||||
|
"productName": "4DSTAR Bundle Manager",
|
||||||
|
"directories": {
|
||||||
|
"output": "dist"
|
||||||
|
},
|
||||||
|
"mac": {
|
||||||
|
"category": "public.app-category.developer-tools",
|
||||||
|
"target": [
|
||||||
|
"dmg",
|
||||||
|
"zip"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"linux": {
|
||||||
|
"target": [
|
||||||
|
"AppImage",
|
||||||
|
"deb",
|
||||||
|
"rpm"
|
||||||
|
],
|
||||||
|
"category": "Development"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
488
electron/renderer.js
Normal file
488
electron/renderer.js
Normal file
@@ -0,0 +1,488 @@
|
|||||||
|
const { ipcRenderer } = require('electron');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
// --- STATE ---
|
||||||
|
let currentBundle = null;
|
||||||
|
|
||||||
|
// --- DOM ELEMENTS ---
|
||||||
|
// Views
|
||||||
|
const welcomeScreen = document.getElementById('welcome-screen');
|
||||||
|
const bundleView = document.getElementById('bundle-view');
|
||||||
|
const createBundleForm = document.getElementById('create-bundle-form'); // This will be a modal later
|
||||||
|
|
||||||
|
// Sidebar buttons
|
||||||
|
const openBundleBtn = document.getElementById('open-bundle-btn');
|
||||||
|
const createBundleBtn = document.getElementById('create-bundle-btn');
|
||||||
|
|
||||||
|
// Bundle action buttons
|
||||||
|
const editBundleBtn = document.getElementById('edit-bundle-btn');
|
||||||
|
const signBundleBtn = document.getElementById('sign-bundle-btn');
|
||||||
|
const validateBundleBtn = document.getElementById('validate-bundle-btn');
|
||||||
|
const fillBundleBtn = document.getElementById('fill-bundle-btn');
|
||||||
|
const clearBundleBtn = document.getElementById('clear-bundle-btn');
|
||||||
|
|
||||||
|
// Bundle display
|
||||||
|
const bundleTitle = document.getElementById('bundle-title');
|
||||||
|
const manifestDetails = document.getElementById('manifest-details');
|
||||||
|
const pluginsList = document.getElementById('plugins-list');
|
||||||
|
const validationResults = document.getElementById('validation-results');
|
||||||
|
|
||||||
|
// Tabs
|
||||||
|
const tabLinks = document.querySelectorAll('.tab-link');
|
||||||
|
const tabPanes = document.querySelectorAll('.tab-pane');
|
||||||
|
const validationTabLink = document.querySelector('button[data-tab="validation-tab"]');
|
||||||
|
|
||||||
|
// Modal
|
||||||
|
const modal = document.getElementById('modal');
|
||||||
|
const modalTitle = document.getElementById('modal-title');
|
||||||
|
const modalMessage = document.getElementById('modal-message');
|
||||||
|
const modalCloseBtn = document.getElementById('modal-close-btn');
|
||||||
|
|
||||||
|
// Spinner
|
||||||
|
const spinner = document.getElementById('spinner');
|
||||||
|
|
||||||
|
// Fill Modal elements
|
||||||
|
const fillModal = document.getElementById('fill-modal');
|
||||||
|
const closeFillModalButton = document.querySelector('.close-fill-modal-button');
|
||||||
|
const fillModalTitle = document.getElementById('fill-modal-title');
|
||||||
|
const fillModalBody = document.getElementById('fill-modal-body');
|
||||||
|
const fillTargetsList = document.getElementById('fill-targets-list');
|
||||||
|
const startFillButton = document.getElementById('start-fill-button');
|
||||||
|
const fillProgressView = document.getElementById('fill-progress-view');
|
||||||
|
const fillProgressList = document.getElementById('fill-progress-list');
|
||||||
|
|
||||||
|
let currentBundlePath = null;
|
||||||
|
|
||||||
|
// --- INITIALIZATION ---
|
||||||
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
|
// Set initial view
|
||||||
|
showView('welcome-screen');
|
||||||
|
|
||||||
|
// Set initial theme
|
||||||
|
const isDarkMode = await ipcRenderer.invoke('get-dark-mode');
|
||||||
|
document.body.classList.toggle('dark-mode', isDarkMode);
|
||||||
|
|
||||||
|
// Setup event listeners
|
||||||
|
setupEventListeners();
|
||||||
|
});
|
||||||
|
|
||||||
|
// --- EVENT LISTENERS ---
|
||||||
|
function setupEventListeners() {
|
||||||
|
// Theme updates
|
||||||
|
ipcRenderer.on('theme-updated', (event, { shouldUseDarkColors }) => {
|
||||||
|
document.body.classList.toggle('dark-mode', shouldUseDarkColors);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Sidebar navigation
|
||||||
|
openBundleBtn.addEventListener('click', handleOpenBundle);
|
||||||
|
createBundleBtn.addEventListener('click', () => {
|
||||||
|
// TODO: Replace with modal
|
||||||
|
showView('create-bundle-form');
|
||||||
|
showModal('Not Implemented', 'The create bundle form will be moved to a modal dialog.');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Tab navigation
|
||||||
|
tabLinks.forEach(link => {
|
||||||
|
link.addEventListener('click', () => switchTab(link.dataset.tab));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Modal close button
|
||||||
|
modalCloseBtn.addEventListener('click', hideModal);
|
||||||
|
|
||||||
|
// Bundle actions
|
||||||
|
signBundleBtn.addEventListener('click', handleSignBundle);
|
||||||
|
validateBundleBtn.addEventListener('click', handleValidateBundle);
|
||||||
|
clearBundleBtn.addEventListener('click', handleClearBundle);
|
||||||
|
fillBundleBtn.addEventListener('click', async () => {
|
||||||
|
if (!currentBundlePath) {
|
||||||
|
showModal('Error', 'No bundle is currently open.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
showSpinner();
|
||||||
|
const result = await ipcRenderer.invoke('get-fillable-targets', currentBundlePath);
|
||||||
|
hideSpinner();
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
showModal('Error', `Failed to get fillable targets: ${result.error}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const targets = result.data;
|
||||||
|
if (Object.keys(targets).length === 0) {
|
||||||
|
showModal('Info', 'The bundle is already full. No new targets to build.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
populateFillTargetsList(targets);
|
||||||
|
fillModal.style.display = 'block';
|
||||||
|
});
|
||||||
|
|
||||||
|
closeFillModalButton.addEventListener('click', () => {
|
||||||
|
fillModal.style.display = 'none';
|
||||||
|
});
|
||||||
|
|
||||||
|
function populateFillTargetsList(plugins) {
|
||||||
|
fillTargetsList.innerHTML = '';
|
||||||
|
for (const [pluginName, targets] of Object.entries(plugins)) {
|
||||||
|
if (targets.length > 0) {
|
||||||
|
const pluginHeader = document.createElement('h4');
|
||||||
|
pluginHeader.textContent = `Plugin: ${pluginName}`;
|
||||||
|
fillTargetsList.appendChild(pluginHeader);
|
||||||
|
|
||||||
|
targets.forEach(target => {
|
||||||
|
const item = document.createElement('div');
|
||||||
|
item.className = 'fill-target-item';
|
||||||
|
const checkbox = document.createElement('input');
|
||||||
|
checkbox.type = 'checkbox';
|
||||||
|
checkbox.checked = true;
|
||||||
|
checkbox.id = `target-${pluginName}-${target.triplet}`;
|
||||||
|
checkbox.dataset.pluginName = pluginName;
|
||||||
|
checkbox.dataset.targetTriplet = target.triplet;
|
||||||
|
checkbox.dataset.targetInfo = JSON.stringify(target);
|
||||||
|
|
||||||
|
const label = document.createElement('label');
|
||||||
|
label.htmlFor = checkbox.id;
|
||||||
|
label.textContent = `${target.triplet} (${target.type})`;
|
||||||
|
|
||||||
|
item.appendChild(checkbox);
|
||||||
|
item.appendChild(label);
|
||||||
|
fillTargetsList.appendChild(item);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Reset view
|
||||||
|
fillModalBody.style.display = 'block';
|
||||||
|
fillProgressView.style.display = 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
startFillButton.addEventListener('click', async () => {
|
||||||
|
const selectedTargets = {};
|
||||||
|
const checkboxes = fillTargetsList.querySelectorAll('input[type="checkbox"]:checked');
|
||||||
|
|
||||||
|
if (checkboxes.length === 0) {
|
||||||
|
showModal('Info', 'No targets selected to fill.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
checkboxes.forEach(cb => {
|
||||||
|
const pluginName = cb.dataset.pluginName;
|
||||||
|
if (!selectedTargets[pluginName]) {
|
||||||
|
selectedTargets[pluginName] = [];
|
||||||
|
}
|
||||||
|
selectedTargets[pluginName].push(JSON.parse(cb.dataset.targetInfo));
|
||||||
|
});
|
||||||
|
|
||||||
|
fillModalBody.style.display = 'none';
|
||||||
|
fillProgressView.style.display = 'block';
|
||||||
|
fillModalTitle.textContent = 'Filling Bundle...';
|
||||||
|
populateFillProgressList(selectedTargets);
|
||||||
|
|
||||||
|
const result = await ipcRenderer.invoke('fill-bundle', {
|
||||||
|
bundlePath: currentBundlePath,
|
||||||
|
targetsToBuild: selectedTargets
|
||||||
|
});
|
||||||
|
|
||||||
|
fillModalTitle.textContent = 'Fill Complete';
|
||||||
|
if (!result.success) {
|
||||||
|
// A final error message if the whole process fails.
|
||||||
|
const p = document.createElement('p');
|
||||||
|
p.style.color = 'var(--error-color)';
|
||||||
|
p.textContent = `Error: ${result.error}`;
|
||||||
|
fillProgressList.appendChild(p);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function populateFillProgressList(plugins) {
|
||||||
|
fillProgressList.innerHTML = '';
|
||||||
|
for (const [pluginName, targets] of Object.entries(plugins)) {
|
||||||
|
targets.forEach(target => {
|
||||||
|
const item = document.createElement('div');
|
||||||
|
item.className = 'fill-target-item';
|
||||||
|
item.id = `progress-${pluginName}-${target.triplet}`;
|
||||||
|
|
||||||
|
const indicator = document.createElement('div');
|
||||||
|
indicator.className = 'progress-indicator';
|
||||||
|
|
||||||
|
const label = document.createElement('span');
|
||||||
|
label.textContent = `${pluginName} - ${target.triplet}`;
|
||||||
|
|
||||||
|
item.appendChild(indicator);
|
||||||
|
item.appendChild(label);
|
||||||
|
fillProgressList.appendChild(item);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ipcRenderer.on('fill-bundle-progress', (event, progress) => {
|
||||||
|
console.log('Progress update:', progress);
|
||||||
|
if (typeof progress === 'object' && progress.status) {
|
||||||
|
const { status, plugin, target, message } = progress;
|
||||||
|
const progressItem = document.getElementById(`progress-${plugin}-${target}`);
|
||||||
|
if (progressItem) {
|
||||||
|
const indicator = progressItem.querySelector('.progress-indicator');
|
||||||
|
indicator.className = 'progress-indicator'; // Reset classes
|
||||||
|
switch (status) {
|
||||||
|
case 'building':
|
||||||
|
indicator.classList.add('spinner-icon');
|
||||||
|
break;
|
||||||
|
case 'success':
|
||||||
|
indicator.classList.add('success-icon');
|
||||||
|
break;
|
||||||
|
case 'failure':
|
||||||
|
indicator.classList.add('failure-icon');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const label = progressItem.querySelector('span');
|
||||||
|
if (message) {
|
||||||
|
label.textContent = `${plugin} - ${target}: ${message}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (typeof progress === 'object' && progress.message) {
|
||||||
|
// Handle final completion message
|
||||||
|
if (progress.message.includes('✅')) {
|
||||||
|
fillModalTitle.textContent = 'Fill Complete!';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Handle simple string progress messages
|
||||||
|
const p = document.createElement('p');
|
||||||
|
p.textContent = progress;
|
||||||
|
fillProgressList.appendChild(p);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- VIEW AND UI LOGIC ---
|
||||||
|
function showView(viewId) {
|
||||||
|
[welcomeScreen, bundleView, createBundleForm].forEach(view => {
|
||||||
|
view.classList.toggle('hidden', view.id !== viewId);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function switchTab(tabId) {
|
||||||
|
tabPanes.forEach(pane => {
|
||||||
|
pane.classList.toggle('active', pane.id === tabId);
|
||||||
|
});
|
||||||
|
tabLinks.forEach(link => {
|
||||||
|
link.classList.toggle('active', link.dataset.tab === tabId);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function showSpinner() {
|
||||||
|
spinner.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
function hideSpinner() {
|
||||||
|
spinner.classList.add('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
function showModal(title, message, type = 'info') {
|
||||||
|
modalTitle.textContent = title;
|
||||||
|
modalMessage.innerHTML = message; // Use innerHTML to allow for formatted messages
|
||||||
|
modal.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
function hideModal() {
|
||||||
|
modal.classList.add('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- BUNDLE ACTIONS HANDLERS ---
|
||||||
|
async function handleOpenBundle() {
|
||||||
|
const bundlePath = await ipcRenderer.invoke('select-file');
|
||||||
|
if (!bundlePath) return;
|
||||||
|
|
||||||
|
showSpinner();
|
||||||
|
showModal('Opening...', `Opening bundle: ${path.basename(bundlePath)}`);
|
||||||
|
const result = await ipcRenderer.invoke('open-bundle', bundlePath);
|
||||||
|
hideSpinner();
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
currentBundle = result;
|
||||||
|
currentBundlePath = bundlePath;
|
||||||
|
displayBundleInfo(result.report);
|
||||||
|
showView('bundle-view');
|
||||||
|
hideModal();
|
||||||
|
} else {
|
||||||
|
showModal('Error Opening Bundle', `Failed to open bundle: ${result ? result.error : 'Unknown error'}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleSignBundle() {
|
||||||
|
if (!currentBundlePath) return;
|
||||||
|
|
||||||
|
const result = await ipcRenderer.invoke('select-private-key');
|
||||||
|
if (result.canceled || !result.filePaths.length) {
|
||||||
|
return; // User canceled the dialog
|
||||||
|
}
|
||||||
|
const privateKeyPath = result.filePaths[0];
|
||||||
|
|
||||||
|
showSpinner();
|
||||||
|
const signResult = await ipcRenderer.invoke('sign-bundle', { bundlePath: currentBundlePath, privateKey: privateKeyPath });
|
||||||
|
hideSpinner();
|
||||||
|
|
||||||
|
if (signResult.success) {
|
||||||
|
showModal('Success', 'Bundle signed successfully. Reloading...');
|
||||||
|
await reloadCurrentBundle();
|
||||||
|
hideModal();
|
||||||
|
} else {
|
||||||
|
showModal('Sign Error', `Failed to sign bundle: ${signResult.error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleValidateBundle() {
|
||||||
|
if (!currentBundlePath) return;
|
||||||
|
|
||||||
|
showSpinner();
|
||||||
|
const result = await ipcRenderer.invoke('validate-bundle', currentBundlePath);
|
||||||
|
hideSpinner();
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
const validation = result.data;
|
||||||
|
const validationIssues = validation.errors.concat(validation.warnings);
|
||||||
|
|
||||||
|
if (validationIssues.length > 0) {
|
||||||
|
validationResults.textContent = validationIssues.join('\n');
|
||||||
|
validationTabLink.classList.remove('hidden');
|
||||||
|
} else {
|
||||||
|
validationResults.textContent = 'Bundle is valid.';
|
||||||
|
validationTabLink.classList.add('hidden');
|
||||||
|
}
|
||||||
|
// Switch to the validation tab to show the results.
|
||||||
|
switchTab('validation-tab');
|
||||||
|
showModal('Validation Complete', 'Validation check has finished.');
|
||||||
|
|
||||||
|
} else {
|
||||||
|
showModal('Validation Error', `Failed to validate bundle: ${result.error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleClearBundle() {
|
||||||
|
if (!currentBundlePath) return;
|
||||||
|
|
||||||
|
showSpinner();
|
||||||
|
const result = await ipcRenderer.invoke('clear-bundle', currentBundlePath);
|
||||||
|
hideSpinner();
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
showModal('Success', 'All binaries have been cleared. Reloading...');
|
||||||
|
await reloadCurrentBundle();
|
||||||
|
hideModal();
|
||||||
|
} else {
|
||||||
|
showModal('Clear Error', `Failed to clear binaries: ${result.error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleFillBundle() {
|
||||||
|
if (!currentBundle) return showModal('Action Canceled', 'Please open a bundle first.');
|
||||||
|
|
||||||
|
showSpinner();
|
||||||
|
showModal('Filling Bundle...', 'Adding local binaries to bundle.');
|
||||||
|
const result = await ipcRenderer.invoke('fill-bundle', currentBundle.bundlePath);
|
||||||
|
hideSpinner();
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
showModal('Success', 'Binaries filled successfully. Reloading...');
|
||||||
|
await reloadCurrentBundle();
|
||||||
|
hideModal();
|
||||||
|
} else {
|
||||||
|
showModal('Fill Error', `Failed to fill bundle: ${result.error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- DATA DISPLAY ---
|
||||||
|
async function reloadCurrentBundle() {
|
||||||
|
if (!currentBundle) return;
|
||||||
|
const reloadResult = await ipcRenderer.invoke('open-bundle', currentBundle.bundlePath);
|
||||||
|
if (reloadResult.success) {
|
||||||
|
currentBundle = reloadResult;
|
||||||
|
displayBundleInfo(reloadResult.report);
|
||||||
|
} else {
|
||||||
|
showModal('Reload Error', `Failed to reload bundle details: ${reloadResult.error}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function displayBundleInfo(report) {
|
||||||
|
if (!report) {
|
||||||
|
showModal('Display Error', 'Could not load bundle information.');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { manifest, signature, validation, plugins } = report;
|
||||||
|
|
||||||
|
// Set bundle title
|
||||||
|
bundleTitle.textContent = manifest.bundleName || 'Untitled Bundle';
|
||||||
|
|
||||||
|
// --- Overview Tab ---
|
||||||
|
const trustStatus = signature.status || 'UNSIGNED';
|
||||||
|
const trustColorClass = {
|
||||||
|
'TRUSTED': 'trusted',
|
||||||
|
'UNTRUSTED': 'untrusted',
|
||||||
|
'INVALID': 'untrusted',
|
||||||
|
'TAMPERED': 'untrusted',
|
||||||
|
'UNSIGNED': 'unsigned',
|
||||||
|
'ERROR': 'untrusted',
|
||||||
|
'UNSUPPORTED': 'warning'
|
||||||
|
}[trustStatus] || 'unsigned';
|
||||||
|
|
||||||
|
manifestDetails.innerHTML = `
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-header">
|
||||||
|
<h3>Trust Status</h3>
|
||||||
|
<div class="trust-indicator-container">
|
||||||
|
<div class="trust-indicator ${trustColorClass}"></div>
|
||||||
|
<span>${trustStatus}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-header"><h3>Manifest Details</h3></div>
|
||||||
|
<div class="card-content">
|
||||||
|
<p><strong>Version:</strong> ${manifest.bundleVersion || 'N/A'}</p>
|
||||||
|
<p><strong>Author:</strong> ${manifest.bundleAuthor || 'N/A'}</p>
|
||||||
|
<p><strong>Bundled On:</strong> ${manifest.bundledOn || 'N/A'}</p>
|
||||||
|
<p><strong>Comment:</strong> ${manifest.bundleComment || 'N/A'}</p>
|
||||||
|
${manifest.bundleAuthorKeyFingerprint ? `<p><strong>Author Key:</strong> ${manifest.bundleAuthorKeyFingerprint}</p>` : ''}
|
||||||
|
${manifest.bundleSignature ? `<p><strong>Signature:</strong> <span class="signature">${manifest.bundleSignature}</span></p>` : ''}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
// --- Plugins Tab ---
|
||||||
|
pluginsList.innerHTML = '';
|
||||||
|
if (plugins && Object.keys(plugins).length > 0) {
|
||||||
|
Object.entries(plugins).forEach(([pluginName, pluginData]) => {
|
||||||
|
const binariesInfo = pluginData.binaries.map(b => {
|
||||||
|
const compatClass = b.is_compatible ? 'compatible' : 'incompatible';
|
||||||
|
const compatText = b.is_compatible ? 'Compatible' : 'Incompatible';
|
||||||
|
const platformTriplet = b.platform && b.platform.triplet ? `(${b.platform.triplet})` : '';
|
||||||
|
return `<li class="binary-info ${compatClass}"><strong>${b.path}</strong> ${platformTriplet} - ${compatText}</li>`;
|
||||||
|
}).join('');
|
||||||
|
|
||||||
|
const pluginCard = document.createElement('div');
|
||||||
|
pluginCard.className = 'card';
|
||||||
|
pluginCard.innerHTML = `
|
||||||
|
<div class="card-header"><h4>${pluginName}</h4></div>
|
||||||
|
<div class="card-content">
|
||||||
|
<p><strong>Source:</strong> ${pluginData.sdist_path}</p>
|
||||||
|
<p><strong>Binaries:</strong></p>
|
||||||
|
<ul>${binariesInfo.length > 0 ? binariesInfo : '<li>No binaries found.</li>'}</ul>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
pluginsList.appendChild(pluginCard);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
pluginsList.innerHTML = '<div class="card"><div class="card-content"><p>No plugins found in this bundle.</p></div></div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Validation Tab ---
|
||||||
|
const validationIssues = validation.errors.concat(validation.warnings);
|
||||||
|
if (validationIssues.length > 0) {
|
||||||
|
validationResults.textContent = validationIssues.join('\n');
|
||||||
|
validationTabLink.classList.remove('hidden');
|
||||||
|
} else {
|
||||||
|
validationResults.textContent = 'Bundle is valid.';
|
||||||
|
validationTabLink.classList.add('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset to overview tab by default
|
||||||
|
switchTab('overview-tab');
|
||||||
|
}
|
||||||
426
electron/styles.css
Normal file
426
electron/styles.css
Normal file
@@ -0,0 +1,426 @@
|
|||||||
|
/* Modern CSS for 4DSTAR Bundle Manager - v2 */
|
||||||
|
|
||||||
|
/* Global Resets and Variables */
|
||||||
|
* {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
:root {
|
||||||
|
--font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Helvetica, Arial, sans-serif;
|
||||||
|
--bg-color: #f4f7fa;
|
||||||
|
--sidebar-bg: #ffffff;
|
||||||
|
--content-bg: #ffffff;
|
||||||
|
--text-color: #2c3e50;
|
||||||
|
--text-light: #7f8c8d;
|
||||||
|
--border-color: #e1e5e8;
|
||||||
|
--primary-color: #3498db;
|
||||||
|
--primary-hover: #2980b9;
|
||||||
|
--danger-color: #e74c3c;
|
||||||
|
--success-color: #27ae60;
|
||||||
|
--warning-color: #f39c12;
|
||||||
|
--sidebar-width: 220px;
|
||||||
|
--header-height: 60px;
|
||||||
|
}
|
||||||
|
|
||||||
|
body.dark-mode {
|
||||||
|
--bg-color: #2c3e50;
|
||||||
|
--sidebar-bg: #34495e;
|
||||||
|
--content-bg: #34495e;
|
||||||
|
--text-color: #ecf0f1;
|
||||||
|
--text-light: #95a5a6;
|
||||||
|
--border-color: #4a6278;
|
||||||
|
--primary-color: #3498db;
|
||||||
|
--primary-hover: #4aa3df;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
font-family: var(--font-family);
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
transition: background-color 0.2s, color 0.2s;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main Layout */
|
||||||
|
.main-container {
|
||||||
|
display: flex;
|
||||||
|
height: 100vh;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar {
|
||||||
|
width: var(--sidebar-width);
|
||||||
|
background-color: var(--sidebar-bg);
|
||||||
|
border-right: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
flex-shrink: 0;
|
||||||
|
transition: background-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-header {
|
||||||
|
padding: 20px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-header h3 {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-nav {
|
||||||
|
padding: 15px 10px;
|
||||||
|
flex-grow: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-button {
|
||||||
|
display: block;
|
||||||
|
width: 100%;
|
||||||
|
padding: 12px 15px;
|
||||||
|
margin-bottom: 8px;
|
||||||
|
border: none;
|
||||||
|
border-radius: 6px;
|
||||||
|
background-color: transparent;
|
||||||
|
color: var(--text-color);
|
||||||
|
font-size: 0.95rem;
|
||||||
|
text-align: left;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: background-color 0.2s, color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-button:hover {
|
||||||
|
background-color: var(--primary-color);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.nav-button.active {
|
||||||
|
background-color: var(--primary-color);
|
||||||
|
color: white;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sidebar-footer {
|
||||||
|
padding: 20px;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 0.8rem;
|
||||||
|
color: var(--text-light);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Content Area */
|
||||||
|
.content-area {
|
||||||
|
flex-grow: 1;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#welcome-screen {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
height: 100%;
|
||||||
|
text-align: center;
|
||||||
|
color: var(--text-light);
|
||||||
|
}
|
||||||
|
|
||||||
|
#welcome-screen h1 {
|
||||||
|
font-size: 2rem;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: 0 25px;
|
||||||
|
height: var(--header-height);
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
background-color: var(--content-bg);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.content-header h2 {
|
||||||
|
font-size: 1.4rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-buttons button {
|
||||||
|
margin-left: 10px;
|
||||||
|
padding: 8px 16px;
|
||||||
|
border-radius: 5px;
|
||||||
|
border: 1px solid var(--primary-color);
|
||||||
|
background-color: transparent;
|
||||||
|
color: var(--primary-color);
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.action-buttons button:hover {
|
||||||
|
background-color: var(--primary-color);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Tabs */
|
||||||
|
.tab-nav {
|
||||||
|
display: flex;
|
||||||
|
padding: 0 25px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
background-color: var(--content-bg);
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-link {
|
||||||
|
padding: 15px 20px;
|
||||||
|
border: none;
|
||||||
|
background: none;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-light);
|
||||||
|
font-size: 1rem;
|
||||||
|
border-bottom: 3px solid transparent;
|
||||||
|
transition: color 0.2s, border-color 0.2s;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-link:hover {
|
||||||
|
color: var(--primary-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-link.active {
|
||||||
|
color: var(--text-color);
|
||||||
|
border-bottom-color: var(--primary-color);
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
#tab-content {
|
||||||
|
padding: 25px;
|
||||||
|
overflow-y: auto;
|
||||||
|
flex-grow: 1;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-pane {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.tab-pane.active {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Card-based info display */
|
||||||
|
.card {
|
||||||
|
background-color: var(--content-bg);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 20px;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
box-shadow: 0 2px 5px rgba(0,0,0,0.05);
|
||||||
|
}
|
||||||
|
|
||||||
|
.card-title {
|
||||||
|
font-size: 1.2rem;
|
||||||
|
font-weight: 600;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
padding-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 150px 1fr;
|
||||||
|
gap: 12px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-grid .label {
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--text-light);
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-grid .value.signature {
|
||||||
|
word-break: break-all;
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Trust Indicator */
|
||||||
|
.trust-indicator-container {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.trust-indicator {
|
||||||
|
width: 12px;
|
||||||
|
height: 12px;
|
||||||
|
border-radius: 50%;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.trust-indicator.trusted { background-color: var(--success-color); }
|
||||||
|
.trust-indicator.untrusted { background-color: var(--danger-color); }
|
||||||
|
.trust-indicator.unsigned { background-color: var(--warning-color); }
|
||||||
|
.trust-indicator.warning { background-color: var(--warning-color); }
|
||||||
|
|
||||||
|
/* Plugins List */
|
||||||
|
#plugins-list .plugin-item {
|
||||||
|
background-color: var(--content-bg);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 20px;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#plugins-list h4 {
|
||||||
|
font-size: 1.1rem;
|
||||||
|
margin-bottom: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Validation Results */
|
||||||
|
#validation-results {
|
||||||
|
background-color: var(--content-bg);
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 20px;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
word-wrap: break-word;
|
||||||
|
font-family: monospace;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Modal */
|
||||||
|
.modal-container {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background-color: rgba(0, 0, 0, 0.6);
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
z-index: 1000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-content {
|
||||||
|
background-color: var(--content-bg);
|
||||||
|
padding: 30px;
|
||||||
|
border-radius: 8px;
|
||||||
|
min-width: 400px;
|
||||||
|
max-width: 600px;
|
||||||
|
box-shadow: 0 5px 15px rgba(0,0,0,0.3);
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-close {
|
||||||
|
position: absolute;
|
||||||
|
top: 15px;
|
||||||
|
right: 15px;
|
||||||
|
font-size: 1.5rem;
|
||||||
|
font-weight: bold;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-light);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-close:hover {
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
#modal-title {
|
||||||
|
font-size: 1.4rem;
|
||||||
|
margin-bottom: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Utility */
|
||||||
|
.hidden {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Fill Modal Specifics */
|
||||||
|
#fill-targets-list,
|
||||||
|
#fill-progress-list {
|
||||||
|
max-height: 300px;
|
||||||
|
overflow-y: auto;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 10px;
|
||||||
|
margin-top: 10px;
|
||||||
|
margin-bottom: 15px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fill-target-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
padding: 8px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.fill-target-item:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fill-target-item label {
|
||||||
|
flex-grow: 1;
|
||||||
|
margin-left: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-indicator {
|
||||||
|
width: 20px;
|
||||||
|
height: 20px;
|
||||||
|
margin-right: 10px;
|
||||||
|
display: inline-block;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-indicator.spinner-icon {
|
||||||
|
border: 2px solid var(--text-color-light);
|
||||||
|
border-top: 2px solid var(--primary-color);
|
||||||
|
border-radius: 50%;
|
||||||
|
width: 16px;
|
||||||
|
height: 16px;
|
||||||
|
animation: spin 1s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-indicator.success-icon::before {
|
||||||
|
content: '✔';
|
||||||
|
color: var(--success-color);
|
||||||
|
font-size: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-indicator.failure-icon::before {
|
||||||
|
content: '✖';
|
||||||
|
color: var(--error-color);
|
||||||
|
font-size: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#start-fill-button {
|
||||||
|
background-color: var(--primary-color);
|
||||||
|
color: white;
|
||||||
|
padding: 10px 15px;
|
||||||
|
border: none;
|
||||||
|
border-radius: 4px;
|
||||||
|
cursor: pointer;
|
||||||
|
font-size: 1em;
|
||||||
|
transition: background-color 0.3s;
|
||||||
|
}
|
||||||
|
|
||||||
|
#start-fill-button:hover {
|
||||||
|
background-color: var(--primary-color-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
.spinner {
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
transform: translate(-50%, -50%);
|
||||||
|
border: 4px solid rgba(0, 0, 0, 0.1);
|
||||||
|
width: 36px;
|
||||||
|
height: 36px;
|
||||||
|
border-radius: 50%;
|
||||||
|
border-left-color: var(--primary-color);
|
||||||
|
animation: spin 1s ease infinite;
|
||||||
|
z-index: 2000;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes spin {
|
||||||
|
0% { transform: rotate(0deg); }
|
||||||
|
100% { transform: rotate(360deg); }
|
||||||
|
}
|
||||||
@@ -1,69 +1,23 @@
|
|||||||
# fourdst/cli/bundle/clear.py
|
# fourdst/cli/bundle/clear.py
|
||||||
import typer
|
import typer
|
||||||
import yaml
|
|
||||||
import zipfile
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tempfile
|
|
||||||
import shutil
|
from fourdst.core.bundle import clear_bundle
|
||||||
|
|
||||||
def bundle_clear(
|
def bundle_clear(
|
||||||
bundle_path: Path = typer.Argument(..., help="The path to the .fbundle file to clear.", exists=True, readable=True, writable=True)
|
bundle_path: Path = typer.Argument(
|
||||||
|
...,
|
||||||
|
help="The path to the .fbundle file to clear.",
|
||||||
|
exists=True,
|
||||||
|
readable=True,
|
||||||
|
writable=True
|
||||||
|
)
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Removes all compiled binaries from a bundle, leaving only the source distributions.
|
Removes all compiled binaries and signatures from a bundle.
|
||||||
"""
|
"""
|
||||||
typer.echo(f"--- Clearing binaries from bundle: {bundle_path.name} ---")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with tempfile.TemporaryDirectory() as temp_dir_str:
|
clear_bundle(bundle_path, progress_callback=typer.echo)
|
||||||
temp_dir = Path(temp_dir_str)
|
|
||||||
|
|
||||||
# 1. Unpack the bundle
|
|
||||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
|
||||||
bundle_zip.extractall(temp_dir)
|
|
||||||
|
|
||||||
# 2. Read the manifest
|
|
||||||
manifest_path = temp_dir / "manifest.yaml"
|
|
||||||
if not manifest_path.is_file():
|
|
||||||
typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
with open(manifest_path, 'r') as f:
|
|
||||||
manifest = yaml.safe_load(f)
|
|
||||||
|
|
||||||
# 3. Clear binaries and signatures
|
|
||||||
typer.echo("Clearing binaries and signature information...")
|
|
||||||
manifest.pop('bundleAuthorKeyFingerprint', None)
|
|
||||||
manifest.pop('checksums', None)
|
|
||||||
|
|
||||||
for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items():
|
|
||||||
if 'binaries' in plugin_data:
|
|
||||||
plugin_data['binaries'] = []
|
|
||||||
|
|
||||||
# 4. Delete the binaries directory and signature file
|
|
||||||
bin_dir = temp_dir / "bin"
|
|
||||||
if bin_dir.is_dir():
|
|
||||||
shutil.rmtree(bin_dir)
|
|
||||||
typer.echo(" - Removed 'bin/' directory.")
|
|
||||||
|
|
||||||
sig_file = temp_dir / "manifest.sig"
|
|
||||||
if sig_file.is_file():
|
|
||||||
sig_file.unlink()
|
|
||||||
typer.echo(" - Removed 'manifest.sig'.")
|
|
||||||
|
|
||||||
# 5. Write the updated manifest
|
|
||||||
with open(manifest_path, 'w') as f:
|
|
||||||
yaml.dump(manifest, f, sort_keys=False)
|
|
||||||
|
|
||||||
# 6. Repack the bundle
|
|
||||||
typer.echo("Repacking the bundle...")
|
|
||||||
with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip:
|
|
||||||
for file_path in temp_dir.rglob('*'):
|
|
||||||
if file_path.is_file():
|
|
||||||
bundle_zip.write(file_path, file_path.relative_to(temp_dir))
|
|
||||||
|
|
||||||
typer.secho(f"\n✅ Bundle '{bundle_path.name}' has been cleared of all binaries.", fg=typer.colors.GREEN)
|
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
typer.secho(f"An unexpected error occurred: {e}", fg=typer.colors.RED)
|
typer.secho(f"An error occurred while clearing the bundle: {e}", fg=typer.colors.RED)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|||||||
@@ -1,148 +1,37 @@
|
|||||||
# fourdst/cli/bundle/create.py
|
# fourdst/cli/bundle/create.py
|
||||||
|
|
||||||
import typer
|
import typer
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import shutil
|
|
||||||
import datetime
|
|
||||||
import yaml
|
|
||||||
import zipfile
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from fourdst.cli.common.utils import get_platform_identifier, get_macos_targeted_platform_identifier, run_command
|
import sys
|
||||||
|
|
||||||
bundle_app = typer.Typer()
|
from fourdst.core.bundle import create_bundle
|
||||||
|
|
||||||
@bundle_app.command("create")
|
|
||||||
def bundle_create(
|
def bundle_create(
|
||||||
plugin_dirs: list[Path] = typer.Argument(..., help="A list of plugin project directories to include.", exists=True, file_okay=False),
|
plugin_dirs: list[Path] = typer.Argument(..., help="A list of plugin project directories to include.", exists=True, file_okay=False),
|
||||||
output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."),
|
output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."),
|
||||||
bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."),
|
bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."),
|
||||||
bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."),
|
bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."),
|
||||||
bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle."),
|
bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle."),
|
||||||
# --- NEW OPTION ---
|
bundle_comment: str = typer.Option(None, "--comment", help="A comment to embed in the bundle."),
|
||||||
target_macos_version: str = typer.Option(None, "--target-macos-version", help="The minimum macOS version to target (e.g., '12.0').")
|
target_macos_version: str = typer.Option(None, "--target-macos-version", help="The minimum macOS version to target (e.g., '12.0').")
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Builds and packages one or more plugin projects into a single .fbundle file.
|
Builds and packages one or more plugin projects into a single .fbundle file.
|
||||||
"""
|
"""
|
||||||
staging_dir = Path("temp_bundle_staging")
|
def progress_callback(message):
|
||||||
if staging_dir.exists():
|
typer.secho(message, fg=typer.colors.BRIGHT_BLUE)
|
||||||
shutil.rmtree(staging_dir)
|
|
||||||
staging_dir.mkdir()
|
|
||||||
|
|
||||||
# --- MODIFIED LOGIC ---
|
try:
|
||||||
# Prepare environment for the build
|
create_bundle(
|
||||||
build_env = os.environ.copy()
|
plugin_dirs=plugin_dirs,
|
||||||
|
output_bundle=output_bundle,
|
||||||
# Determine the host platform identifier based on the target
|
bundle_name=bundle_name,
|
||||||
if sys.platform == "darwin" and target_macos_version:
|
bundle_version=bundle_version,
|
||||||
typer.secho(f"Targeting macOS version: {target_macos_version}", fg=typer.colors.CYAN)
|
bundle_author=bundle_author,
|
||||||
host_platform = get_macos_targeted_platform_identifier(target_macos_version)
|
bundle_comment=bundle_comment,
|
||||||
|
target_macos_version=target_macos_version,
|
||||||
# Set environment variables for Meson to pick up
|
progress_callback=progress_callback
|
||||||
flags = f"-mmacosx-version-min={target_macos_version}"
|
)
|
||||||
build_env["CXXFLAGS"] = f"{build_env.get('CXXFLAGS', '')} {flags}".strip()
|
except Exception as e:
|
||||||
build_env["LDFLAGS"] = f"{build_env.get('LDFLAGS', '')} {flags}".strip()
|
typer.secho(f"Error creating bundle: {e}", fg=typer.colors.RED, err=True)
|
||||||
else:
|
raise typer.Exit(code=1)
|
||||||
# Default behavior for Linux or non-targeted macOS builds
|
|
||||||
host_platform = get_platform_identifier()
|
|
||||||
|
|
||||||
manifest = {
|
|
||||||
"bundleName": bundle_name,
|
|
||||||
"bundleVersion": bundle_version,
|
|
||||||
"bundleAuthor": bundle_author,
|
|
||||||
"bundleComment": "Created with fourdst-cli",
|
|
||||||
"bundledOn": datetime.datetime.now().isoformat(),
|
|
||||||
"bundlePlugins": {}
|
|
||||||
}
|
|
||||||
|
|
||||||
print("Creating bundle...")
|
|
||||||
for plugin_dir in plugin_dirs:
|
|
||||||
plugin_name = plugin_dir.name
|
|
||||||
print(f"--> Processing plugin: {plugin_name}")
|
|
||||||
|
|
||||||
# 1. Build the plugin using the prepared environment
|
|
||||||
print(f" - Compiling for target platform...")
|
|
||||||
build_dir = plugin_dir / "builddir"
|
|
||||||
if build_dir.exists():
|
|
||||||
shutil.rmtree(build_dir) # Reconfigure every time to apply env vars
|
|
||||||
|
|
||||||
# Pass the modified environment to the Meson commands
|
|
||||||
run_command(["meson", "setup", "builddir"], cwd=plugin_dir, env=build_env)
|
|
||||||
run_command(["meson", "compile", "-C", "builddir"], cwd=plugin_dir, env=build_env)
|
|
||||||
|
|
||||||
# 2. Find the compiled artifact
|
|
||||||
compiled_lib = next(build_dir.glob("lib*.so"), None) or next(build_dir.glob("lib*.dylib"), None)
|
|
||||||
if not compiled_lib:
|
|
||||||
print(f"Error: Could not find compiled library for {plugin_name} (expected lib*.so or lib*.dylib)", file=sys.stderr)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
# 3. Package source code (sdist), respecting .gitignore
|
|
||||||
print(" - Packaging source code (respecting .gitignore)...")
|
|
||||||
sdist_path = staging_dir / f"{plugin_name}_src.zip"
|
|
||||||
|
|
||||||
git_check = run_command(["git", "rev-parse", "--is-inside-work-tree"], cwd=plugin_dir, check=False)
|
|
||||||
|
|
||||||
files_to_include = []
|
|
||||||
if git_check.returncode == 0:
|
|
||||||
result = run_command(["git", "ls-files", "--cached", "--others", "--exclude-standard"], cwd=plugin_dir)
|
|
||||||
files_to_include = [plugin_dir / f for f in result.stdout.strip().split('\n') if f]
|
|
||||||
else:
|
|
||||||
typer.secho(f" - Warning: '{plugin_dir.name}' is not a git repository. Packaging all files.", fg=typer.colors.YELLOW)
|
|
||||||
for root, _, files in os.walk(plugin_dir):
|
|
||||||
if 'builddir' in root:
|
|
||||||
continue
|
|
||||||
for file in files:
|
|
||||||
files_to_include.append(Path(root) / file)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(sdist_path, 'w', zipfile.ZIP_DEFLATED) as sdist_zip:
|
|
||||||
for file_path in files_to_include:
|
|
||||||
if file_path.is_file():
|
|
||||||
sdist_zip.write(file_path, file_path.relative_to(plugin_dir))
|
|
||||||
|
|
||||||
# 4. Stage artifacts with ABI-tagged filenames and update manifest
|
|
||||||
binaries_dir = staging_dir / "bin"
|
|
||||||
binaries_dir.mkdir(exist_ok=True)
|
|
||||||
|
|
||||||
base_name = compiled_lib.stem
|
|
||||||
ext = compiled_lib.suffix
|
|
||||||
triplet = host_platform["triplet"]
|
|
||||||
abi_signature = host_platform["abi_signature"]
|
|
||||||
tagged_filename = f"{base_name}.{triplet}.{abi_signature}{ext}"
|
|
||||||
staged_lib_path = binaries_dir / tagged_filename
|
|
||||||
|
|
||||||
print(f" - Staging binary as: {tagged_filename}")
|
|
||||||
shutil.copy(compiled_lib, staged_lib_path)
|
|
||||||
|
|
||||||
manifest["bundlePlugins"][plugin_name] = {
|
|
||||||
"sdist": {
|
|
||||||
"path": sdist_path.name,
|
|
||||||
"sdistBundledOn": datetime.datetime.now().isoformat(),
|
|
||||||
"buildable": True
|
|
||||||
},
|
|
||||||
"binaries": [{
|
|
||||||
"platform": {
|
|
||||||
"triplet": host_platform["triplet"],
|
|
||||||
"abi_signature": host_platform["abi_signature"],
|
|
||||||
# Adding arch separately for clarity, matching 'fill' command
|
|
||||||
"arch": host_platform["arch"]
|
|
||||||
},
|
|
||||||
"path": staged_lib_path.relative_to(staging_dir).as_posix(),
|
|
||||||
"compiledOn": datetime.datetime.now().isoformat()
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
|
|
||||||
# 5. Write manifest and package final bundle
|
|
||||||
manifest_path = staging_dir / "manifest.yaml"
|
|
||||||
with open(manifest_path, 'w') as f:
|
|
||||||
yaml.dump(manifest, f, sort_keys=False)
|
|
||||||
|
|
||||||
print(f"\nPackaging final bundle: {output_bundle}")
|
|
||||||
with zipfile.ZipFile(output_bundle, 'w', zipfile.ZIP_DEFLATED) as bundle_zip:
|
|
||||||
for root, _, files in os.walk(staging_dir):
|
|
||||||
for file in files:
|
|
||||||
file_path = Path(root) / file
|
|
||||||
bundle_zip.write(file_path, file_path.relative_to(staging_dir))
|
|
||||||
|
|
||||||
shutil.rmtree(staging_dir)
|
|
||||||
print("\n✅ Bundle created successfully!")
|
|
||||||
|
|||||||
@@ -1,23 +1,14 @@
|
|||||||
# fourdst/cli/bundle/diff.py
|
# fourdst/cli/bundle/diff.py
|
||||||
import typer
|
import typer
|
||||||
import yaml
|
|
||||||
import zipfile
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tempfile
|
|
||||||
import shutil
|
|
||||||
import difflib
|
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.panel import Panel
|
from rich.panel import Panel
|
||||||
from rich.text import Text
|
from rich.text import Text
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
|
||||||
console = Console()
|
from fourdst.core.bundle import diff_bundle
|
||||||
|
|
||||||
def _get_file_content(directory: Path, filename: str):
|
console = Console()
|
||||||
file_path = directory / filename
|
|
||||||
if not file_path.exists():
|
|
||||||
return None
|
|
||||||
return file_path.read_bytes()
|
|
||||||
|
|
||||||
def bundle_diff(
|
def bundle_diff(
|
||||||
bundle_a_path: Path = typer.Argument(..., help="The first bundle to compare.", exists=True, readable=True),
|
bundle_a_path: Path = typer.Argument(..., help="The first bundle to compare.", exists=True, readable=True),
|
||||||
@@ -28,94 +19,59 @@ def bundle_diff(
|
|||||||
"""
|
"""
|
||||||
console.print(Panel(f"Comparing [bold blue]{bundle_a_path.name}[/bold blue] with [bold blue]{bundle_b_path.name}[/bold blue]"))
|
console.print(Panel(f"Comparing [bold blue]{bundle_a_path.name}[/bold blue] with [bold blue]{bundle_b_path.name}[/bold blue]"))
|
||||||
|
|
||||||
with tempfile.TemporaryDirectory() as temp_a_str, tempfile.TemporaryDirectory() as temp_b_str:
|
try:
|
||||||
temp_a = Path(temp_a_str)
|
results = diff_bundle(bundle_a_path, bundle_b_path, progress_callback=typer.echo)
|
||||||
temp_b = Path(temp_b_str)
|
except Exception as e:
|
||||||
|
typer.secho(f"Error comparing bundles: {e}", fg=typer.colors.RED)
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
# Unpack both bundles
|
# --- 1. Display Signature Differences ---
|
||||||
with zipfile.ZipFile(bundle_a_path, 'r') as z: z.extractall(temp_a)
|
sig_status = results['signature']['status']
|
||||||
with zipfile.ZipFile(bundle_b_path, 'r') as z: z.extractall(temp_b)
|
style_map = {
|
||||||
|
'UNCHANGED': ('[green]UNCHANGED[/green]', 'green'),
|
||||||
|
'REMOVED': ('[yellow]REMOVED[/yellow]', 'yellow'),
|
||||||
|
'ADDED': ('[yellow]ADDED[/yellow]', 'yellow'),
|
||||||
|
'CHANGED': ('[bold red]CHANGED[/bold red]', 'red'),
|
||||||
|
'UNSIGNED': ('[dim]Both Unsigned[/dim]', 'dim'),
|
||||||
|
}
|
||||||
|
sig_text, sig_style = style_map.get(sig_status, (sig_status, 'white'))
|
||||||
|
console.print(Panel(f"Signature Status: {sig_text}", title="[bold]Signature Verification[/bold]", border_style=sig_style, expand=False))
|
||||||
|
|
||||||
# --- 1. Compare Signatures ---
|
# --- 2. Display Manifest Differences ---
|
||||||
sig_a = _get_file_content(temp_a, "manifest.sig")
|
manifest_diff = results['manifest']['diff']
|
||||||
sig_b = _get_file_content(temp_b, "manifest.sig")
|
if manifest_diff:
|
||||||
|
diff_text = Text()
|
||||||
sig_panel_style = "green"
|
for line in manifest_diff:
|
||||||
sig_status = ""
|
if line.startswith('+'):
|
||||||
if sig_a == sig_b and sig_a is not None:
|
diff_text.append(line, style="green")
|
||||||
sig_status = "[green]UNCHANGED[/green]"
|
elif line.startswith('-'):
|
||||||
elif sig_a and not sig_b:
|
diff_text.append(line, style="red")
|
||||||
sig_status = "[yellow]REMOVED[/yellow]"
|
elif line.startswith('^'):
|
||||||
sig_panel_style = "yellow"
|
diff_text.append(line, style="blue")
|
||||||
elif not sig_a and sig_b:
|
else:
|
||||||
sig_status = "[yellow]ADDED[/yellow]"
|
diff_text.append(line)
|
||||||
sig_panel_style = "yellow"
|
console.print(Panel(diff_text, title="[bold]Manifest Differences[/bold]", border_style="yellow"))
|
||||||
elif sig_a and sig_b and sig_a != sig_b:
|
else:
|
||||||
sig_status = "[bold red]CHANGED[/bold red]"
|
console.print(Panel("[green]Manifests are identical.[/green]", title="[bold]Manifest[/bold]", border_style="green"))
|
||||||
sig_panel_style = "red"
|
|
||||||
else:
|
|
||||||
sig_status = "[dim]Both Unsigned[/dim]"
|
|
||||||
sig_panel_style = "dim"
|
|
||||||
|
|
||||||
console.print(Panel(f"Signature Status: {sig_status}", title="[bold]Signature Verification[/bold]", border_style=sig_panel_style, expand=False))
|
|
||||||
|
|
||||||
# --- 2. Compare Manifests ---
|
|
||||||
manifest_a_content = (temp_a / "manifest.yaml").read_text()
|
|
||||||
manifest_b_content = (temp_b / "manifest.yaml").read_text()
|
|
||||||
|
|
||||||
if manifest_a_content != manifest_b_content:
|
|
||||||
diff = difflib.unified_diff(
|
|
||||||
manifest_a_content.splitlines(keepends=True),
|
|
||||||
manifest_b_content.splitlines(keepends=True),
|
|
||||||
fromfile=f"{bundle_a_path.name}/manifest.yaml",
|
|
||||||
tofile=f"{bundle_b_path.name}/manifest.yaml",
|
|
||||||
)
|
|
||||||
|
|
||||||
diff_text = Text()
|
|
||||||
for line in diff:
|
|
||||||
if line.startswith('+'):
|
|
||||||
diff_text.append(line, style="green")
|
|
||||||
elif line.startswith('-'):
|
|
||||||
diff_text.append(line, style="red")
|
|
||||||
elif line.startswith('^'):
|
|
||||||
diff_text.append(line, style="blue")
|
|
||||||
else:
|
|
||||||
diff_text.append(line)
|
|
||||||
|
|
||||||
console.print(Panel(diff_text, title="[bold]Manifest Differences[/bold]", border_style="yellow"))
|
|
||||||
else:
|
|
||||||
console.print(Panel("[green]Manifests are identical.[/green]", title="[bold]Manifest[/bold]", border_style="green"))
|
|
||||||
|
|
||||||
# --- 3. Compare File Contents (via checksums) ---
|
|
||||||
manifest_a = yaml.safe_load(manifest_a_content)
|
|
||||||
manifest_b = yaml.safe_load(manifest_b_content)
|
|
||||||
|
|
||||||
files_a = {p['path']: p.get('checksum') for p in manifest_a.get('bundlePlugins', {}).get(next(iter(manifest_a.get('bundlePlugins', {})), ''), {}).get('binaries', [])}
|
|
||||||
files_b = {p['path']: p.get('checksum') for p in manifest_b.get('bundlePlugins', {}).get(next(iter(manifest_b.get('bundlePlugins', {})), ''), {}).get('binaries', [])}
|
|
||||||
|
|
||||||
|
# --- 3. Display File Content Differences ---
|
||||||
|
file_diffs = results['files']
|
||||||
|
if file_diffs:
|
||||||
table = Table(title="File Content Comparison")
|
table = Table(title="File Content Comparison")
|
||||||
table.add_column("File Path", style="cyan")
|
table.add_column("File Path", style="cyan")
|
||||||
table.add_column("Status", style="magenta")
|
table.add_column("Status", style="magenta")
|
||||||
table.add_column("Details", style="yellow")
|
table.add_column("Details", style="yellow")
|
||||||
|
|
||||||
all_files = sorted(list(set(files_a.keys()) | set(files_b.keys())))
|
status_map = {
|
||||||
has_content_changes = False
|
'REMOVED': '[red]REMOVED[/red]',
|
||||||
|
'ADDED': '[green]ADDED[/green]',
|
||||||
|
'MODIFIED': '[yellow]MODIFIED[/yellow]'
|
||||||
|
}
|
||||||
|
|
||||||
for file in all_files:
|
for diff in file_diffs:
|
||||||
in_a = file in files_a
|
status_text = status_map.get(diff['status'], diff['status'])
|
||||||
in_b = file in files_b
|
table.add_row(diff['path'], status_text, diff['details'])
|
||||||
|
|
||||||
if in_a and not in_b:
|
|
||||||
table.add_row(file, "[red]REMOVED[/red]", "")
|
|
||||||
has_content_changes = True
|
|
||||||
elif not in_a and in_b:
|
|
||||||
table.add_row(file, "[green]ADDED[/green]", "")
|
|
||||||
has_content_changes = True
|
|
||||||
elif files_a[file] != files_b[file]:
|
|
||||||
table.add_row(file, "[yellow]MODIFIED[/yellow]", f"Checksum changed from {files_a.get(file, 'N/A')} to {files_b.get(file, 'N/A')}")
|
|
||||||
has_content_changes = True
|
|
||||||
|
|
||||||
if has_content_changes:
|
console.print(table)
|
||||||
console.print(table)
|
else:
|
||||||
else:
|
console.print(Panel("[green]All file contents are identical.[/green]", title="[bold]File Contents[/bold]", border_style="green"))
|
||||||
console.print(Panel("[green]All file contents are identical.[/green]", title="[bold]File Contents[/bold]", border_style="green"))
|
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ import yaml
|
|||||||
import zipfile
|
import zipfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import questionary
|
import questionary
|
||||||
|
from prompt_toolkit.key_binding import KeyBindings
|
||||||
|
from questionary.prompts.checkbox import checkbox
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
@@ -21,11 +23,66 @@ from rich.panel import Panel
|
|||||||
|
|
||||||
console = Console()
|
console = Console()
|
||||||
|
|
||||||
from fourdst.cli.common.utils import get_available_build_targets, _build_plugin_in_docker, _build_plugin_for_target
|
from fourdst.core.bundle import get_fillable_targets, fill_bundle
|
||||||
|
from fourdst.cli.common.utils import run_command_rich # Keep for progress display if needed
|
||||||
|
|
||||||
bundle_app = typer.Typer()
|
custom_key_bindings = KeyBindings()
|
||||||
|
|
||||||
|
def _is_arch(target_info, arch_keywords):
|
||||||
|
"""Helper to check if a target's info contains architecture keywords."""
|
||||||
|
# Combine all relevant string values from the target dict to check against.
|
||||||
|
text_to_check = ""
|
||||||
|
if 'triplet' in target_info:
|
||||||
|
text_to_check += target_info['triplet'].lower()
|
||||||
|
if 'docker_image' in target_info:
|
||||||
|
text_to_check += target_info['docker_image'].lower()
|
||||||
|
if 'cross_file' in target_info:
|
||||||
|
# Convert path to string for searching
|
||||||
|
text_to_check += str(target_info['cross_file']).lower()
|
||||||
|
|
||||||
|
if not text_to_check:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return any(keyword in text_to_check for keyword in arch_keywords)
|
||||||
|
|
||||||
|
@custom_key_bindings.add('c-a')
|
||||||
|
def _(event):
|
||||||
|
"""
|
||||||
|
Handler for Ctrl+A. Selects all ARM targets.
|
||||||
|
"""
|
||||||
|
control = event.app.layout.current_control
|
||||||
|
# Keywords to identify ARM architectures
|
||||||
|
arm_keywords = ['aarch64', 'arm64']
|
||||||
|
|
||||||
|
for i, choice in enumerate(control.choices):
|
||||||
|
# The choice.value is the dictionary we passed to questionary.Choice
|
||||||
|
target_info = choice.value.get('target', {})
|
||||||
|
if _is_arch(target_info, arm_keywords):
|
||||||
|
# Add the index to the set of selected items
|
||||||
|
if i not in control.selected_indexes:
|
||||||
|
control.selected_indexes.add(i)
|
||||||
|
|
||||||
|
# Redraw the UI to show the new selections
|
||||||
|
event.app.invalidate()
|
||||||
|
|
||||||
|
|
||||||
|
@custom_key_bindings.add('c-x')
|
||||||
|
def _(event):
|
||||||
|
"""
|
||||||
|
Handler for Ctrl+X. Selects all x86 targets.
|
||||||
|
"""
|
||||||
|
control = event.app.layout.current_control
|
||||||
|
# Keywords to identify x86 architectures
|
||||||
|
x86_keywords = ['x86_64', 'x86', 'amd64'] # 'amd64' is a common alias in Docker
|
||||||
|
|
||||||
|
for i, choice in enumerate(control.choices):
|
||||||
|
target_info = choice.value.get('target', {})
|
||||||
|
if _is_arch(target_info, x86_keywords):
|
||||||
|
if i not in control.selected_indexes:
|
||||||
|
control.selected_indexes.add(i)
|
||||||
|
|
||||||
|
event.app.invalidate()
|
||||||
|
|
||||||
@bundle_app.command("fill")
|
|
||||||
def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file to fill with new binaries.", exists=True)):
|
def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file to fill with new binaries.", exists=True)):
|
||||||
"""
|
"""
|
||||||
Builds new binaries for the current host or cross-targets from the bundle's source.
|
Builds new binaries for the current host or cross-targets from the bundle's source.
|
||||||
@@ -34,138 +91,95 @@ def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file
|
|||||||
if staging_dir.exists():
|
if staging_dir.exists():
|
||||||
shutil.rmtree(staging_dir)
|
shutil.rmtree(staging_dir)
|
||||||
|
|
||||||
|
console.print(Panel(f"[bold]Filling Bundle:[/bold] {bundle_path.name}", expand=False, border_style="blue"))
|
||||||
|
|
||||||
|
# 1. Find available targets and missing binaries using the core function
|
||||||
try:
|
try:
|
||||||
# 1. Unpack and load manifest
|
fillable_targets = get_fillable_targets(bundle_path)
|
||||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
except Exception as e:
|
||||||
bundle_zip.extractall(staging_dir)
|
console.print(f"[red]Error analyzing bundle: {e}[/red]")
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
manifest_path = staging_dir / "manifest.yaml"
|
if not fillable_targets:
|
||||||
if not manifest_path.exists():
|
console.print("[green]✅ Bundle is already full for all available build targets.[/green]")
|
||||||
typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED)
|
raise typer.Exit()
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
# 2. Create interactive choices for the user
|
||||||
|
build_options = []
|
||||||
|
BOLD = "\033[1m"
|
||||||
|
RESET = "\033[0m"
|
||||||
|
CYAN = "\033[36m"
|
||||||
|
for plugin_name, targets in fillable_targets.items():
|
||||||
|
for target in targets:
|
||||||
|
if target['type'] == 'docker':
|
||||||
|
display_name = f"Docker: {target['docker_image']}"
|
||||||
|
elif target['type'] == 'cross':
|
||||||
|
display_name = f"Cross-compile: {Path(target['cross_file']).name}"
|
||||||
|
else: # native
|
||||||
|
display_name = f"Native: {target['triplet']}"
|
||||||
|
|
||||||
|
build_options.append({
|
||||||
|
"name": f"Build {plugin_name} for {display_name}",
|
||||||
|
"value": {"plugin_name": plugin_name, "target": target}
|
||||||
|
})
|
||||||
|
|
||||||
with open(manifest_path, 'r') as f:
|
# 3. Prompt user to select which targets to build
|
||||||
manifest = yaml.safe_load(f)
|
if not build_options:
|
||||||
|
console.print("[yellow]No buildable targets found.[/yellow]")
|
||||||
|
raise typer.Exit()
|
||||||
|
|
||||||
# 2. Find available targets and missing binaries
|
choices = [
|
||||||
available_targets = get_available_build_targets()
|
questionary.Choice(title=opt['name'], value=opt['value'])
|
||||||
build_options = []
|
for opt in build_options
|
||||||
|
]
|
||||||
for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items():
|
|
||||||
if "sdist" not in plugin_data:
|
|
||||||
continue # Cannot build without source
|
|
||||||
|
|
||||||
existing_abis = {b['platform']['abi_signature'] for b in plugin_data.get('binaries', [])}
|
|
||||||
|
|
||||||
for target in available_targets:
|
|
||||||
# Use a more descriptive name for the choice
|
|
||||||
if target.get('docker_image', None):
|
|
||||||
display_name = f"Docker: {target['docker_image']}"
|
|
||||||
elif target.get('cross_file', None):
|
|
||||||
display_name = f"Cross: {Path(target['cross_file']).name}"
|
|
||||||
else:
|
|
||||||
display_name = f"Native: {target['abi_signature']} (Local System)"
|
|
||||||
|
|
||||||
if target['abi_signature'] not in existing_abis:
|
message = (
|
||||||
build_options.append({
|
"Select which missing binaries to build:\n"
|
||||||
"name": f"Build '{plugin_name}' for {display_name}",
|
" (Press [Ctrl+A] to select all ARM, [Ctrl+X] to select all x86)"
|
||||||
"plugin_name": plugin_name,
|
)
|
||||||
"target": target
|
|
||||||
})
|
|
||||||
|
|
||||||
if not build_options:
|
|
||||||
typer.secho("✅ Bundle is already full for all available build targets.", fg=typer.colors.GREEN)
|
|
||||||
raise typer.Exit()
|
|
||||||
|
|
||||||
# 3. Prompt user to select which targets to build
|
|
||||||
choices = [opt['name'] for opt in build_options]
|
|
||||||
selected_builds = questionary.checkbox(
|
|
||||||
"Select which missing binaries to build:",
|
|
||||||
choices=choices
|
|
||||||
).ask()
|
|
||||||
|
|
||||||
if not selected_builds:
|
|
||||||
typer.echo("No binaries selected to build. Exiting.")
|
|
||||||
raise typer.Exit()
|
|
||||||
|
|
||||||
# 4. Build selected targets
|
|
||||||
for build_name in selected_builds:
|
|
||||||
build_job = next(opt for opt in build_options if opt['name'] == build_name)
|
|
||||||
plugin_name = build_job['plugin_name']
|
|
||||||
target = build_job['target']
|
|
||||||
|
|
||||||
typer.secho(f"\nBuilding {plugin_name} for target '{build_name}'...", bold=True)
|
|
||||||
|
|
||||||
sdist_zip_path = staging_dir / manifest['bundlePlugins'][plugin_name]['sdist']['path']
|
|
||||||
build_temp_dir = staging_dir / f"build_{plugin_name}"
|
|
||||||
|
|
||||||
try:
|
# --- START OF FIX ---
|
||||||
if target['docker_image']:
|
# 1. Instantiate the Checkbox class directly instead of using the shortcut.
|
||||||
if not docker:
|
prompt = checkbox(
|
||||||
typer.secho("Error: Docker is not installed. Please install Docker to build this target.", fg=typer.colors.RED)
|
message,
|
||||||
continue
|
choices=choices,
|
||||||
compiled_lib, final_target = _build_plugin_in_docker(sdist_zip_path, build_temp_dir, target, plugin_name)
|
# key_bindings=custom_key_bindings
|
||||||
else:
|
)
|
||||||
compiled_lib, final_target = _build_plugin_for_target(sdist_zip_path, build_temp_dir, target)
|
|
||||||
|
|
||||||
# Add new binary to bundle
|
|
||||||
abi_tag = final_target["abi_signature"]
|
|
||||||
base_name = compiled_lib.stem
|
|
||||||
ext = compiled_lib.suffix
|
|
||||||
triplet = final_target["triplet"]
|
|
||||||
tagged_filename = f"{base_name}.{triplet}.{abi_tag}{ext}"
|
|
||||||
|
|
||||||
binaries_dir = staging_dir / "bin"
|
|
||||||
binaries_dir.mkdir(exist_ok=True)
|
|
||||||
staged_lib_path = binaries_dir / tagged_filename
|
|
||||||
shutil.move(compiled_lib, staged_lib_path)
|
|
||||||
|
|
||||||
# Update manifest
|
|
||||||
new_binary_entry = {
|
|
||||||
"platform": {
|
|
||||||
"triplet": final_target["triplet"],
|
|
||||||
"abi_signature": abi_tag,
|
|
||||||
"arch": final_target["arch"]
|
|
||||||
},
|
|
||||||
"path": staged_lib_path.relative_to(staging_dir).as_posix(),
|
|
||||||
"compiledOn": datetime.datetime.now().isoformat()
|
|
||||||
}
|
|
||||||
manifest['bundlePlugins'][plugin_name]['binaries'].append(new_binary_entry)
|
|
||||||
typer.secho(f" -> Successfully built and staged {tagged_filename}", fg=typer.colors.GREEN)
|
|
||||||
|
|
||||||
except (FileNotFoundError, subprocess.CalledProcessError) as e:
|
# 2. Use .unsafe_ask() to run the prompt object.
|
||||||
typer.secho(f" -> Failed to build {plugin_name} for target '{build_name}': {e}", fg=typer.colors.RED)
|
selected_jobs = prompt.unsafe_ask()
|
||||||
|
# --- END OF FIX ---
|
||||||
tb_str = traceback.format_exc()
|
|
||||||
console.print(Panel(
|
if not selected_jobs:
|
||||||
tb_str,
|
console.print("No binaries selected to build. Exiting.")
|
||||||
title="Traceback",
|
raise typer.Exit()
|
||||||
border_style="yellow",
|
|
||||||
expand=False
|
|
||||||
))
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if build_temp_dir.exists():
|
|
||||||
shutil.rmtree(build_temp_dir)
|
|
||||||
|
|
||||||
# 5. Repackage the bundle
|
targets_to_build = {}
|
||||||
# Invalidate any old signature
|
for job in selected_jobs:
|
||||||
if "bundleAuthorKeyFingerprint" in manifest:
|
plugin_name = job['plugin_name']
|
||||||
del manifest["bundleAuthorKeyFingerprint"]
|
target = job['target']
|
||||||
if (staging_dir / "manifest.sig").exists():
|
if plugin_name not in targets_to_build:
|
||||||
(staging_dir / "manifest.sig").unlink()
|
targets_to_build[plugin_name] = []
|
||||||
typer.secho("\n⚠️ Bundle signature has been invalidated by this operation. Please re-sign the bundle.", fg=typer.colors.YELLOW)
|
targets_to_build[plugin_name].append(target)
|
||||||
|
|
||||||
with open(manifest_path, 'w') as f:
|
try:
|
||||||
yaml.dump(manifest, f, sort_keys=False)
|
console.print("--- Starting build process ---")
|
||||||
|
fill_bundle(
|
||||||
with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip:
|
bundle_path,
|
||||||
for file_path in staging_dir.rglob('*'):
|
targets_to_build,
|
||||||
if file_path.is_file():
|
progress_callback=lambda msg: console.print(f"[dim] {msg}[/dim]")
|
||||||
bundle_zip.write(file_path, file_path.relative_to(staging_dir))
|
)
|
||||||
|
console.print("--- Build process finished ---")
|
||||||
typer.secho(f"\n✅ Bundle '{bundle_path.name}' has been filled successfully.", fg=typer.colors.GREEN)
|
console.print(f"[green]✅ Bundle '{bundle_path.name}' has been filled successfully.[/green]")
|
||||||
|
console.print("[yellow]⚠️ If the bundle was signed, the signature is now invalid. Please re-sign.[/yellow]")
|
||||||
|
|
||||||
finally:
|
except Exception as e:
|
||||||
if staging_dir.exists():
|
console.print(f"[red]An error occurred during the build process: {e}[/red]")
|
||||||
shutil.rmtree(staging_dir)
|
tb_str = traceback.format_exc()
|
||||||
|
console.print(Panel(
|
||||||
|
tb_str,
|
||||||
|
title="Traceback",
|
||||||
|
border_style="red",
|
||||||
|
expand=False
|
||||||
|
))
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|||||||
@@ -1,206 +1,119 @@
|
|||||||
# fourdst/cli/bundle/inspect.py
|
# fourdst/cli/bundle/inspect.py
|
||||||
|
|
||||||
import typer
|
import typer
|
||||||
import sys
|
|
||||||
import shutil
|
|
||||||
import yaml
|
|
||||||
import zipfile
|
|
||||||
import hashlib
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.panel import Panel
|
||||||
|
from rich.table import Table
|
||||||
|
from rich.text import Text
|
||||||
|
|
||||||
from cryptography.hazmat.primitives import serialization, hashes
|
from fourdst.core.bundle import inspect_bundle
|
||||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519
|
|
||||||
from cryptography.exceptions import InvalidSignature
|
|
||||||
|
|
||||||
from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH
|
console = Console()
|
||||||
from fourdst.cli.common.utils import get_platform_identifier, calculate_sha256, is_abi_compatible
|
|
||||||
|
|
||||||
bundle_app = typer.Typer()
|
def display_inspection_report(report: dict):
|
||||||
|
|
||||||
def _reconstruct_canonical_checksum_list(staging_dir: Path, manifest: dict) -> tuple[str, list[str], list[str]]:
|
|
||||||
"""
|
"""
|
||||||
Reconstructs the canonical checksum list from the files on disk
|
Displays the inspection report using rich components.
|
||||||
and compares them against the checksums listed in the manifest.
|
|
||||||
|
|
||||||
Returns a tuple containing:
|
|
||||||
1. The canonical string of actual checksums to verify against the signature.
|
|
||||||
2. A list of files with checksum mismatches.
|
|
||||||
3. A list of files that are listed in the manifest but missing from the disk.
|
|
||||||
"""
|
"""
|
||||||
checksum_map = {}
|
manifest = report.get('manifest', {})
|
||||||
mismatch_errors = []
|
host_info = report.get('host_info', {})
|
||||||
missing_files = []
|
validation = report.get('validation', {})
|
||||||
|
signature = report.get('signature', {})
|
||||||
|
plugins = report.get('plugins', {})
|
||||||
|
|
||||||
all_files_in_manifest = []
|
# --- Header ---
|
||||||
# Gather all file paths from the manifest
|
console.print(Panel(f"Inspection Report for [bold blue]{manifest.get('bundleName', 'N/A')}[/bold blue]", expand=False))
|
||||||
for plugin_data in manifest.get('bundlePlugins', {}).values():
|
|
||||||
if 'sdist' in plugin_data and 'path' in plugin_data['sdist']:
|
meta_table = Table.grid(padding=(0, 2))
|
||||||
all_files_in_manifest.append(plugin_data['sdist'])
|
meta_table.add_column()
|
||||||
if 'binaries' in plugin_data:
|
meta_table.add_column()
|
||||||
all_files_in_manifest.extend(plugin_data['binaries'])
|
meta_table.add_row("Name:", manifest.get('bundleName', 'N/A'))
|
||||||
|
meta_table.add_row("Version:", manifest.get('bundleVersion', 'N/A'))
|
||||||
|
meta_table.add_row("Author:", manifest.get('bundleAuthor', 'N/A'))
|
||||||
|
meta_table.add_row("Bundled On:", manifest.get('bundledOn', 'N/A'))
|
||||||
|
meta_table.add_row("Host ABI:", Text(host_info.get('abi_signature', 'N/A'), style="dim"))
|
||||||
|
meta_table.add_row("Host Arch:", Text(host_info.get('triplet', 'N/A'), style="dim"))
|
||||||
|
console.print(meta_table)
|
||||||
|
console.print("─" * 50)
|
||||||
|
|
||||||
for file_info in all_files_in_manifest:
|
# --- Trust Status ---
|
||||||
path_str = file_info.get('path')
|
status = signature.get('status', 'UNKNOWN')
|
||||||
if not path_str:
|
if status == 'TRUSTED':
|
||||||
continue
|
console.print(Panel(f"[bold green]✅ Trust Status: SIGNED and TRUSTED[/bold green]\nKey: [dim]{signature.get('key_path')}[/dim]", expand=False, border_style="green"))
|
||||||
|
elif status == 'UNSIGNED':
|
||||||
|
console.print(Panel("[bold yellow]🟡 Trust Status: UNSIGNED[/bold yellow]", expand=False, border_style="yellow"))
|
||||||
|
elif status == 'UNTRUSTED':
|
||||||
|
console.print(Panel(f"[bold yellow]⚠️ Trust Status: SIGNED but UNTRUSTED AUTHOR[/bold yellow]\nFingerprint: [dim]{signature.get('fingerprint')}[/dim]", expand=False, border_style="yellow"))
|
||||||
|
elif status == 'INVALID':
|
||||||
|
console.print(Panel(f"[bold red]❌ Trust Status: INVALID SIGNATURE[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||||
|
elif status == 'TAMPERED':
|
||||||
|
console.print(Panel(f"[bold red]❌ Trust Status: TAMPERED[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||||
|
elif status == 'UNSUPPORTED':
|
||||||
|
console.print(Panel(f"[bold red]❌ Trust Status: CRYPTOGRAPHY NOT SUPPORTED[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||||
|
else:
|
||||||
|
console.print(Panel(f"[bold red]❌ Trust Status: ERROR[/bold red]\n{signature.get('reason')}", expand=False, border_style="red"))
|
||||||
|
|
||||||
file_path = staging_dir / path_str
|
# --- Validation Issues ---
|
||||||
expected_checksum = file_info.get('checksum')
|
errors = validation.get('errors', [])
|
||||||
|
warnings = validation.get('warnings', [])
|
||||||
|
if errors or warnings:
|
||||||
|
console.print("─" * 50)
|
||||||
|
console.print("[bold]Validation Issues:[/bold]")
|
||||||
|
for error in errors:
|
||||||
|
console.print(Text(f" - [red]Error:[/red] {error}"))
|
||||||
|
for warning in warnings:
|
||||||
|
console.print(Text(f" - [yellow]Warning:[/yellow] {warning}"))
|
||||||
|
|
||||||
if not file_path.exists():
|
# --- Plugin Details ---
|
||||||
missing_files.append(path_str)
|
console.print("─" * 50)
|
||||||
continue
|
console.print("[bold]Available Plugins:[/bold]")
|
||||||
|
if not plugins:
|
||||||
|
console.print(" No plugins found in bundle.")
|
||||||
|
|
||||||
# Calculate actual checksum from the file on disk
|
for name, data in plugins.items():
|
||||||
actual_checksum = "sha256:" + calculate_sha256(file_path)
|
console.print(Panel(f"Plugin: [bold]{name}[/bold]", expand=False, border_style="blue"))
|
||||||
checksum_map[path_str] = actual_checksum
|
console.print(f" Source Dist: [dim]{data.get('sdist_path', 'N/A')}[/dim]")
|
||||||
|
|
||||||
|
binaries = data.get('binaries', [])
|
||||||
|
if not binaries:
|
||||||
|
console.print(" Binaries: None")
|
||||||
|
else:
|
||||||
|
bin_table = Table(title="Binaries", show_header=True, header_style="bold magenta")
|
||||||
|
bin_table.add_column("Path")
|
||||||
|
bin_table.add_column("Architecture")
|
||||||
|
bin_table.add_column("ABI")
|
||||||
|
bin_table.add_column("Host Compatible?", style="cyan")
|
||||||
|
bin_table.add_column("Reason for Incompatibility", style="red")
|
||||||
|
|
||||||
# Compare with the checksum listed in the manifest
|
for b in binaries:
|
||||||
if expected_checksum and actual_checksum != expected_checksum:
|
plat = b.get('platform', {})
|
||||||
mismatch_errors.append(path_str)
|
style = "green" if b.get('is_compatible') else "default"
|
||||||
|
compat_text = "✅ Yes" if b.get('is_compatible') else "No"
|
||||||
|
reason = b.get('incompatibility_reason', '') or ''
|
||||||
|
bin_table.add_row(
|
||||||
|
Text(b.get('path', 'N/A'), style=style),
|
||||||
|
Text(plat.get('triplet', 'N/A'), style=style),
|
||||||
|
Text(plat.get('abi_signature', 'N/A'), style=style),
|
||||||
|
Text(compat_text, style="cyan"),
|
||||||
|
Text(reason, style="red")
|
||||||
|
)
|
||||||
|
console.print(bin_table)
|
||||||
|
|
||||||
# Create the canonical string for signature verification from the actual file checksums
|
if not data.get('compatible_found'):
|
||||||
sorted_paths = sorted(checksum_map.keys())
|
console.print(Text(" Note: No compatible binary found for the current system.", style="yellow"))
|
||||||
canonical_list = [f"{path}:{checksum_map[path]}" for path in sorted_paths]
|
console.print(Text(" Run 'fourdst bundle fill' to build one.", style="yellow"))
|
||||||
data_to_verify = "\n".join(canonical_list)
|
|
||||||
|
|
||||||
return data_to_verify, mismatch_errors, missing_files
|
def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True, resolve_path=True)):
|
||||||
|
|
||||||
|
|
||||||
@bundle_app.command("inspect")
|
|
||||||
def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True)):
|
|
||||||
"""
|
"""
|
||||||
Inspects a bundle, validating its contents and cryptographic signature.
|
Inspects a bundle, validating its contents and cryptographic signature.
|
||||||
"""
|
"""
|
||||||
staging_dir = Path(f"temp_inspect_{bundle_path.stem}")
|
|
||||||
if staging_dir.exists():
|
|
||||||
shutil.rmtree(staging_dir)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
host_platform = get_platform_identifier()
|
report = inspect_bundle(bundle_path)
|
||||||
|
display_inspection_report(report)
|
||||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
# Exit with an error code if validation failed, to support scripting
|
||||||
bundle_zip.extractall(staging_dir)
|
if report.get('validation', {}).get('status') != 'passed':
|
||||||
|
|
||||||
manifest_path = staging_dir / "manifest.yaml"
|
|
||||||
if not manifest_path.exists():
|
|
||||||
typer.secho("Error: Bundle is invalid. Missing manifest.yaml.", fg=typer.colors.RED)
|
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
except Exception:
|
||||||
with open(manifest_path, 'r') as f:
|
console.print_exception(show_locals=True)
|
||||||
manifest = yaml.safe_load(f)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
typer.secho(f"--- Bundle Inspection Report for: {bundle_path.name} ---", bold=True)
|
|
||||||
# ... (header printing code is unchanged) ...
|
|
||||||
typer.echo(f"Name: {manifest.get('bundleName', 'N/A')}")
|
|
||||||
typer.echo(f"Version: {manifest.get('bundleVersion', 'N/A')}")
|
|
||||||
typer.echo(f"Author: {manifest.get('bundleAuthor', 'N/A')}")
|
|
||||||
typer.echo(f"Bundled: {manifest.get('bundledOn', 'N/A')}")
|
|
||||||
typer.secho(f"Host ABI: {host_platform['abi_signature']}", dim=True)
|
|
||||||
typer.secho(f"Host Arch: {host_platform['triplet']}", dim=True)
|
|
||||||
typer.echo("-" * 50)
|
|
||||||
|
|
||||||
|
|
||||||
# 3. Signature and Trust Verification
|
|
||||||
fingerprint = manifest.get('bundleAuthorKeyFingerprint')
|
|
||||||
sig_path = staging_dir / "manifest.sig"
|
|
||||||
|
|
||||||
if not fingerprint or not sig_path.exists():
|
|
||||||
typer.secho("Trust Status: 🟡 UNSIGNED", fg=typer.colors.YELLOW)
|
|
||||||
else:
|
|
||||||
trusted_key_path = None
|
|
||||||
if LOCAL_TRUST_STORE_PATH.exists():
|
|
||||||
# Find the key in the local trust store
|
|
||||||
# ... (key finding logic is unchanged) ...
|
|
||||||
for key_file in LOCAL_TRUST_STORE_PATH.rglob("*.pem"):
|
|
||||||
try:
|
|
||||||
pub_der = (serialization.load_pem_public_key(key_file.read_bytes())
|
|
||||||
.public_bytes(
|
|
||||||
encoding=serialization.Encoding.DER,
|
|
||||||
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
|
||||||
))
|
|
||||||
pub_key_fingerprint = "sha256:" + hashlib.sha256(pub_der).hexdigest()
|
|
||||||
if pub_key_fingerprint == fingerprint:
|
|
||||||
trusted_key_path = key_file
|
|
||||||
break
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not trusted_key_path:
|
|
||||||
typer.secho(f"Trust Status: ⚠️ SIGNED but UNTRUSTED AUTHOR ({fingerprint})", fg=typer.colors.YELLOW)
|
|
||||||
else:
|
|
||||||
# --- MODIFIED VERIFICATION LOGIC ---
|
|
||||||
try:
|
|
||||||
pub_key_obj = serialization.load_pem_public_key(trusted_key_path.read_bytes())
|
|
||||||
signature = sig_path.read_bytes()
|
|
||||||
|
|
||||||
# Reconstruct the data that was originally signed
|
|
||||||
data_to_verify, checksum_errors, missing_files = _reconstruct_canonical_checksum_list(staging_dir, manifest)
|
|
||||||
with open("data_to_verify.bin", "wb") as f:
|
|
||||||
f.write(data_to_verify.encode('utf-8'))
|
|
||||||
|
|
||||||
# Verify the signature against the reconstructed data
|
|
||||||
if isinstance(pub_key_obj, ed25519.Ed25519PublicKey):
|
|
||||||
pub_key_obj.verify(signature, data_to_verify.encode('utf-8'))
|
|
||||||
elif isinstance(pub_key_obj, rsa.RSAPublicKey):
|
|
||||||
pub_key_obj.verify(
|
|
||||||
signature,
|
|
||||||
data_to_verify.encode('utf-8'),
|
|
||||||
padding.PKCS1v15(),
|
|
||||||
hashes.SHA256()
|
|
||||||
)
|
|
||||||
|
|
||||||
# If we reach here, the signature is cryptographically valid.
|
|
||||||
# Now we check if the manifest's checksums match the actual file checksums.
|
|
||||||
if checksum_errors or missing_files:
|
|
||||||
typer.secho(f"Trust Status: ❌ INVALID - Files have been tampered with after signing.", fg=typer.colors.RED)
|
|
||||||
for f in missing_files:
|
|
||||||
typer.echo(f" - Missing file listed in manifest: {f}")
|
|
||||||
for f in checksum_errors:
|
|
||||||
typer.echo(f" - Checksum mismatch for: {f}")
|
|
||||||
else:
|
|
||||||
typer.secho(f"Trust Status: ✅ SIGNED and TRUSTED ({trusted_key_path.relative_to(LOCAL_TRUST_STORE_PATH)})", fg=typer.colors.GREEN)
|
|
||||||
|
|
||||||
except InvalidSignature:
|
|
||||||
typer.secho(f"Trust Status: ❌ INVALID SIGNATURE - The bundle's integrity is compromised.", fg=typer.colors.RED)
|
|
||||||
|
|
||||||
typer.echo("-" * 50)
|
|
||||||
|
|
||||||
# ... (Plugin Details section is unchanged) ...
|
|
||||||
typer.secho("Available Plugins:", bold=True)
|
|
||||||
for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items():
|
|
||||||
typer.echo(f"\n Plugin: {plugin_name}")
|
|
||||||
typer.echo(f" Source Dist: {plugin_data.get('sdist', {}).get('path', 'N/A')}")
|
|
||||||
binaries = plugin_data.get('binaries', [])
|
|
||||||
|
|
||||||
host_compatible_binary_found = False
|
|
||||||
if not binaries:
|
|
||||||
typer.echo(" Binaries: None")
|
|
||||||
else:
|
|
||||||
typer.echo(" Binaries:")
|
|
||||||
for b in binaries:
|
|
||||||
plat = b.get('platform', {})
|
|
||||||
is_compatible = (plat.get('triplet') == host_platform['triplet'] and
|
|
||||||
is_abi_compatible(host_platform['abi_signature'], plat.get('abi_signature', '')))
|
|
||||||
|
|
||||||
color = typer.colors.GREEN if is_compatible else None
|
|
||||||
if is_compatible:
|
|
||||||
host_compatible_binary_found = True
|
|
||||||
|
|
||||||
typer.secho(f" - Path: {b.get('path', 'N/A')}", fg=color)
|
|
||||||
typer.secho(f" ABI: {plat.get('abi_signature', 'N/A')}", fg=color, dim=True)
|
|
||||||
typer.secho(f" Arch: {plat.get('triplet', 'N/A')}", fg=color, dim=True)
|
|
||||||
|
|
||||||
if not host_compatible_binary_found:
|
|
||||||
typer.secho(
|
|
||||||
f" Note: No compatible binary found for the current system ({host_platform['triplet']}).",
|
|
||||||
fg=typer.colors.YELLOW
|
|
||||||
)
|
|
||||||
typer.secho(
|
|
||||||
" Run 'fourdst-cli bundle fill' to build one.",
|
|
||||||
fg=typer.colors.YELLOW
|
|
||||||
)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if staging_dir.exists():
|
|
||||||
shutil.rmtree(staging_dir)
|
|
||||||
|
|||||||
@@ -1,151 +1,26 @@
|
|||||||
# fourdst/cli/bundle/sign.py
|
# fourdst/cli/bundle/sign.py
|
||||||
|
|
||||||
import typer
|
import typer
|
||||||
import shutil
|
|
||||||
import yaml
|
|
||||||
import zipfile
|
|
||||||
import hashlib
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from fourdst.cli.common.utils import calculate_sha256
|
from fourdst.core.bundle import sign_bundle
|
||||||
|
|
||||||
bundle_app = typer.Typer()
|
|
||||||
|
|
||||||
def _create_canonical_checksum_list(staging_dir: Path, manifest: dict) -> str:
|
|
||||||
"""
|
|
||||||
Creates a deterministic, sorted string of all file paths and their checksums.
|
|
||||||
This string is the actual data that will be signed.
|
|
||||||
"""
|
|
||||||
checksum_map = {}
|
|
||||||
|
|
||||||
# Iterate through all plugins to find all files to be checksummed
|
|
||||||
for plugin_data in manifest.get('bundlePlugins', {}).values():
|
|
||||||
# Add sdist (source code zip) to the list
|
|
||||||
sdist_info = plugin_data.get('sdist', {})
|
|
||||||
if 'path' in sdist_info:
|
|
||||||
file_path = staging_dir / sdist_info['path']
|
|
||||||
if file_path.exists():
|
|
||||||
checksum = "sha256:" + calculate_sha256(file_path)
|
|
||||||
# Also update the manifest with the sdist checksum
|
|
||||||
sdist_info['checksum'] = checksum
|
|
||||||
checksum_map[sdist_info['path']] = checksum
|
|
||||||
else:
|
|
||||||
# This case should ideally be caught by a validation step
|
|
||||||
typer.secho(f"Warning: sdist file not found: {sdist_info['path']}", fg=typer.colors.YELLOW)
|
|
||||||
|
|
||||||
|
|
||||||
# Add all binaries to the list
|
|
||||||
for binary in plugin_data.get('binaries', []):
|
|
||||||
if 'path' in binary:
|
|
||||||
file_path = staging_dir / binary['path']
|
|
||||||
if file_path.exists():
|
|
||||||
checksum = "sha256:" + calculate_sha256(file_path)
|
|
||||||
# Update the manifest with the binary checksum
|
|
||||||
binary['checksum'] = checksum
|
|
||||||
checksum_map[binary['path']] = checksum
|
|
||||||
else:
|
|
||||||
typer.secho(f"Warning: Binary file not found: {binary['path']}", fg=typer.colors.YELLOW)
|
|
||||||
|
|
||||||
# Sort the file paths to ensure a deterministic order
|
|
||||||
sorted_paths = sorted(checksum_map.keys())
|
|
||||||
|
|
||||||
# Create the final canonical string (e.g., "path1:checksum1\npath2:checksum2")
|
|
||||||
canonical_list = [f"{path}:{checksum_map[path]}" for path in sorted_paths]
|
|
||||||
|
|
||||||
return "\n".join(canonical_list)
|
|
||||||
|
|
||||||
|
|
||||||
@bundle_app.command("sign")
|
|
||||||
def bundle_sign(
|
def bundle_sign(
|
||||||
bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True),
|
bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True),
|
||||||
private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True)
|
private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True)
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Signs a bundle with an author's private key.
|
Signs a bundle with an author's private key.
|
||||||
|
|
||||||
This process calculates checksums for all source and binary files,
|
|
||||||
adds them to the manifest, and then signs a canonical list of these
|
|
||||||
checksums to ensure the integrity of the entire bundle.
|
|
||||||
"""
|
"""
|
||||||
print(f"Signing bundle: {bundle_path}")
|
def progress_callback(message):
|
||||||
staging_dir = Path("temp_sign_staging")
|
typer.secho(message, fg=typer.colors.BRIGHT_BLUE)
|
||||||
if staging_dir.exists():
|
|
||||||
shutil.rmtree(staging_dir)
|
|
||||||
|
|
||||||
# 1. Unpack the bundle
|
|
||||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
|
||||||
bundle_zip.extractall(staging_dir)
|
|
||||||
|
|
||||||
manifest_path = staging_dir / "manifest.yaml"
|
|
||||||
if not manifest_path.exists():
|
|
||||||
print("Error: manifest.yaml not found in bundle.", file=sys.stderr)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
# 2. Ensure PEM private key and derive public key fingerprint via openssl
|
|
||||||
if private_key.suffix.lower() != ".pem":
|
|
||||||
typer.secho("Error: Private key must be a .pem file.", fg=typer.colors.RED)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
typer.echo(" - Deriving public key fingerprint via openssl...")
|
|
||||||
try:
|
try:
|
||||||
proc = subprocess.run(
|
sign_bundle(
|
||||||
["openssl", "pkey", "-in", str(private_key), "-pubout", "-outform", "DER"],
|
bundle_path=bundle_path,
|
||||||
capture_output=True, check=True
|
private_key=private_key,
|
||||||
|
progress_callback=progress_callback
|
||||||
)
|
)
|
||||||
pub_der = proc.stdout
|
except Exception as e:
|
||||||
fingerprint = "sha256:" + hashlib.sha256(pub_der).hexdigest()
|
typer.secho(f"Error signing bundle: {e}", fg=typer.colors.RED, err=True)
|
||||||
typer.echo(f" - Signing with key fingerprint: {fingerprint}")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
typer.secho(f"Error extracting public key: {e.stderr.decode().strip()}", fg=typer.colors.RED)
|
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
# 3. Load manifest and generate the canonical checksum list
|
|
||||||
with open(manifest_path, 'r') as f:
|
|
||||||
manifest = yaml.safe_load(f)
|
|
||||||
|
|
||||||
print(" - Calculating checksums for all source and binary files...")
|
|
||||||
# This function now also modifies the manifest in-place to add the checksums
|
|
||||||
data_to_sign = _create_canonical_checksum_list(staging_dir, manifest)
|
|
||||||
|
|
||||||
# Add the key fingerprint to the manifest
|
|
||||||
manifest['bundleAuthorKeyFingerprint'] = fingerprint
|
|
||||||
|
|
||||||
# 4. Write the updated manifest back to the staging directory
|
|
||||||
with open(manifest_path, 'w') as f:
|
|
||||||
yaml.dump(manifest, f, sort_keys=False)
|
|
||||||
print(" - Added file checksums and key fingerprint to manifest.")
|
|
||||||
|
|
||||||
# 5. Sign the canonical checksum list
|
|
||||||
typer.echo(" - Signing the canonical checksum list...")
|
|
||||||
canonical_temp_data_file = staging_dir / "canonical_checksums.txt"
|
|
||||||
canonical_temp_data_file.write_text(data_to_sign, encoding='utf-8')
|
|
||||||
sig_path = staging_dir / "manifest.sig"
|
|
||||||
try:
|
|
||||||
# We sign the string data directly, not the manifest file
|
|
||||||
cmd_list = [
|
|
||||||
"openssl",
|
|
||||||
"pkeyutl",
|
|
||||||
"-sign",
|
|
||||||
"-in", str(canonical_temp_data_file),
|
|
||||||
"-inkey", str(private_key),
|
|
||||||
"-out", str(sig_path)
|
|
||||||
]
|
|
||||||
subprocess.run(
|
|
||||||
cmd_list,
|
|
||||||
check=True,
|
|
||||||
capture_output=True
|
|
||||||
)
|
|
||||||
typer.echo(f" - Created manifest.sig (> $ {' '.join(cmd_list)} ")
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
typer.secho(f"Error signing manifest: {e.stderr.decode().strip()}", fg=typer.colors.RED)
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
# 6. Repackage the bundle
|
|
||||||
with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip:
|
|
||||||
for file_path in staging_dir.rglob('*'):
|
|
||||||
if file_path.is_file():
|
|
||||||
bundle_zip.write(file_path, file_path.relative_to(staging_dir))
|
|
||||||
|
|
||||||
shutil.rmtree(staging_dir)
|
|
||||||
print("\n✅ Bundle signed successfully!")
|
|
||||||
|
|||||||
@@ -1,211 +1,80 @@
|
|||||||
# fourdst/cli/bundle/validate.py
|
# fourdst/cli/bundle/validate.py
|
||||||
import typer
|
import typer
|
||||||
import yaml
|
|
||||||
import zipfile
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import tempfile
|
|
||||||
import shutil
|
|
||||||
import hashlib
|
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.panel import Panel
|
from rich.panel import Panel
|
||||||
from rich.text import Text
|
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
from rich.text import Text
|
||||||
|
|
||||||
|
from fourdst.core.bundle import validate_bundle
|
||||||
|
|
||||||
console = Console()
|
console = Console()
|
||||||
|
|
||||||
def _calculate_sha256(file_path: Path) -> str:
|
|
||||||
"""Calculates the SHA256 checksum of a file."""
|
|
||||||
sha256_hash = hashlib.sha256()
|
|
||||||
with open(file_path, "rb") as f:
|
|
||||||
for byte_block in iter(lambda: f.read(4096), b""):
|
|
||||||
sha256_hash.update(byte_block)
|
|
||||||
return sha256_hash.hexdigest()
|
|
||||||
|
|
||||||
def _validate_bundle_directory(path: Path, is_temp: bool = False, display_name: str = None):
|
|
||||||
"""Validates a directory that is structured like an unpacked bundle."""
|
|
||||||
title = "Validating Pre-Bundle Directory" if not is_temp else "Validating Bundle Contents"
|
|
||||||
name = display_name or path.name
|
|
||||||
console.print(Panel(f"{title}: [bold]{name}[/bold]", border_style="blue"))
|
|
||||||
|
|
||||||
errors = 0
|
|
||||||
warnings = 0
|
|
||||||
|
|
||||||
# Section 1: Manifest file check
|
|
||||||
console.print(Panel("1. Manifest File Check", border_style="cyan"))
|
|
||||||
|
|
||||||
def check(condition, success_msg, error_msg, is_warning=False):
|
|
||||||
nonlocal errors, warnings
|
|
||||||
if condition:
|
|
||||||
console.print(Text(f"✅ {success_msg}", style="green"))
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
if is_warning:
|
|
||||||
console.print(Text(f"⚠️ {error_msg}", style="yellow"))
|
|
||||||
warnings += 1
|
|
||||||
else:
|
|
||||||
console.print(Text(f"❌ {error_msg}", style="red"))
|
|
||||||
errors += 1
|
|
||||||
return False
|
|
||||||
|
|
||||||
# 1. Check for manifest
|
|
||||||
manifest_file = path / "manifest.yaml"
|
|
||||||
if not check(manifest_file.is_file(), "Found manifest.yaml.", "Missing manifest.yaml file."):
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
try:
|
|
||||||
manifest = yaml.safe_load(manifest_file.read_text())
|
|
||||||
check(True, "Manifest file is valid YAML.", "")
|
|
||||||
except yaml.YAMLError as e:
|
|
||||||
check(False, "", f"Manifest file is not valid YAML: {e}")
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
# 2. Check manifest content
|
|
||||||
console.print(Panel("2. Manifest Content Validation", border_style="cyan"))
|
|
||||||
check(manifest is not None, "Manifest is not empty.", "Manifest file is empty.", is_warning=True)
|
|
||||||
check('bundleName' in manifest, "Manifest contains 'bundleName'.", "Manifest is missing 'bundleName'.")
|
|
||||||
check('bundleVersion' in manifest, "Manifest contains 'bundleVersion'.", "Manifest is missing 'bundleVersion'.")
|
|
||||||
|
|
||||||
plugins = manifest.get('bundlePlugins', {})
|
|
||||||
check(plugins, "Manifest contains 'bundlePlugins' section.", "Manifest is missing 'bundlePlugins' section.")
|
|
||||||
|
|
||||||
# Build Manifest Validation table
|
|
||||||
manifest_table = Table(title="Manifest Validation")
|
|
||||||
manifest_table.add_column("Check")
|
|
||||||
manifest_table.add_column("Status")
|
|
||||||
manifest_table.add_row("manifest.yaml exists", "✅" if manifest_file.is_file() else "❌")
|
|
||||||
# YAML parse status already captured by exception above
|
|
||||||
manifest_table.add_row("Manifest parses as YAML", "✅")
|
|
||||||
manifest_table.add_row("Manifest not empty", "✅" if manifest is not None else "⚠️")
|
|
||||||
manifest_table.add_row("bundleName present", "✅" if 'bundleName' in manifest else "❌")
|
|
||||||
manifest_table.add_row("bundleVersion present", "✅" if 'bundleVersion' in manifest else "❌")
|
|
||||||
has_plugins = bool(manifest.get('bundlePlugins'))
|
|
||||||
manifest_table.add_row("bundlePlugins section", "✅" if has_plugins else "❌")
|
|
||||||
console.print(manifest_table)
|
|
||||||
plugins = manifest.get('bundlePlugins', {})
|
|
||||||
|
|
||||||
# 3. Check files listed in manifest
|
|
||||||
console.print(Panel("3. Plugin Validation", border_style="magenta"))
|
|
||||||
for name, data in plugins.items():
|
|
||||||
console.print(Panel(f"Plugin: [bold cyan]{name}[/bold cyan]", border_style="magenta"))
|
|
||||||
sdist_info = data.get('sdist', {})
|
|
||||||
sdist_path_str = sdist_info.get('path')
|
|
||||||
|
|
||||||
if check(sdist_path_str, "sdist path is defined.", f"sdist path not defined for plugin '{name}'."):
|
|
||||||
sdist_path = path / sdist_path_str
|
|
||||||
check(sdist_path.exists(), f"sdist file found: {sdist_path_str}", f"sdist file not found: {sdist_path_str}")
|
|
||||||
|
|
||||||
for binary in data.get('binaries', []):
|
|
||||||
bin_path_str = binary.get('path')
|
|
||||||
if not check(bin_path_str, "Binary path is defined.", "Binary entry is missing a 'path'."):
|
|
||||||
continue
|
|
||||||
|
|
||||||
bin_path = path / bin_path_str
|
|
||||||
if check(bin_path.exists(), f"Binary file found: {bin_path_str}", f"Binary file not found: {bin_path_str}"):
|
|
||||||
expected_checksum = binary.get('checksum')
|
|
||||||
if check(expected_checksum, "Checksum is defined.", f"Checksum not defined for binary '{bin_path_str}'.", is_warning=True):
|
|
||||||
actual_checksum = "sha256:" + _calculate_sha256(bin_path)
|
|
||||||
check(
|
|
||||||
actual_checksum == expected_checksum,
|
|
||||||
f"Checksum matches for {bin_path_str}",
|
|
||||||
f"Checksum mismatch for {bin_path_str}.\n Expected: {expected_checksum}\n Actual: {actual_checksum}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build Plugin Validation table
|
|
||||||
plugin_table = Table(title="Plugin Validation")
|
|
||||||
plugin_table.add_column("Plugin")
|
|
||||||
plugin_table.add_column("Sdist Defined")
|
|
||||||
plugin_table.add_column("Sdist Exists")
|
|
||||||
plugin_table.add_column("Binaries OK")
|
|
||||||
plugin_table.add_column("Checksums OK")
|
|
||||||
for name, data in plugins.items():
|
|
||||||
# sdist checks
|
|
||||||
sdist_path_str = data.get('sdist', {}).get('path')
|
|
||||||
sdist_defined = bool(sdist_path_str)
|
|
||||||
sdist_exists = sdist_defined and (path/ sdist_path_str).exists()
|
|
||||||
# binary & checksum checks
|
|
||||||
binaries = data.get('binaries', [])
|
|
||||||
binaries_ok = all(b.get('path') and (path/ b['path']).exists() for b in binaries)
|
|
||||||
checksums_ok = all(('checksum' in b and ("sha256:"+_calculate_sha256(path/ b['path']))==b['checksum']) for b in binaries)
|
|
||||||
plugin_table.add_row(
|
|
||||||
name,
|
|
||||||
"✅" if sdist_defined else "❌",
|
|
||||||
"✅" if sdist_exists else "❌",
|
|
||||||
"✅" if binaries_ok else "❌",
|
|
||||||
"✅" if checksums_ok else "❌"
|
|
||||||
)
|
|
||||||
console.print(plugin_table)
|
|
||||||
|
|
||||||
# 4. Check for signature
|
|
||||||
console.print(Panel("4. Signature Check", border_style="yellow"))
|
|
||||||
check((path / "manifest.sig").exists(), "Signature file 'manifest.sig' found.", "Signature file 'manifest.sig' is missing.", is_warning=True)
|
|
||||||
|
|
||||||
# Build Signature Check table
|
|
||||||
sig_table = Table(title="Signature Validation")
|
|
||||||
sig_table.add_column("Item")
|
|
||||||
sig_table.add_column("Status")
|
|
||||||
sig_exists = (path / "manifest.sig").exists()
|
|
||||||
sig_table.add_row(
|
|
||||||
"manifest.sig",
|
|
||||||
"✅" if sig_exists else "⚠️"
|
|
||||||
)
|
|
||||||
console.print(sig_table)
|
|
||||||
|
|
||||||
# Final summary
|
|
||||||
console.print("-" * 40)
|
|
||||||
# Display summary in a table
|
|
||||||
|
|
||||||
summary_table = Table(title="Validation Summary")
|
|
||||||
summary_table.add_column("Result")
|
|
||||||
summary_table.add_column("Errors", justify="right")
|
|
||||||
summary_table.add_column("Warnings", justify="right")
|
|
||||||
|
|
||||||
if errors == 0:
|
|
||||||
result = "Passed"
|
|
||||||
style = "green"
|
|
||||||
else:
|
|
||||||
result = "Failed"
|
|
||||||
style = "red"
|
|
||||||
|
|
||||||
summary_table.add_row(
|
|
||||||
f"[bold {style}]{result}[/bold {style}]",
|
|
||||||
str(errors),
|
|
||||||
str(warnings)
|
|
||||||
)
|
|
||||||
console.print(summary_table)
|
|
||||||
if errors != 0:
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
def _validate_bundle_file(bundle_path: Path):
|
|
||||||
"""Unpacks a .fbundle file and runs directory validation on its contents."""
|
|
||||||
with tempfile.TemporaryDirectory() as temp_dir_str:
|
|
||||||
temp_dir = Path(temp_dir_str)
|
|
||||||
try:
|
|
||||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
|
||||||
bundle_zip.extractall(temp_dir)
|
|
||||||
_validate_bundle_directory(temp_dir, is_temp=True, display_name=bundle_path.name)
|
|
||||||
except zipfile.BadZipFile:
|
|
||||||
console.print(Panel(f"[red]Error: '{bundle_path.name}' is not a valid zip file.[/red]", title="Validation Error"))
|
|
||||||
raise typer.Exit(code=1)
|
|
||||||
|
|
||||||
def bundle_validate(
|
def bundle_validate(
|
||||||
path: Path = typer.Argument(
|
bundle_path: Path = typer.Argument(
|
||||||
".",
|
...,
|
||||||
help="The path to the .fbundle file or pre-bundle directory to validate.",
|
help="The .fbundle file to validate.",
|
||||||
exists=True,
|
exists=True,
|
||||||
resolve_path=True
|
resolve_path=True,
|
||||||
|
file_okay=True,
|
||||||
|
dir_okay=False
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Validates a packed .fbundle or a directory ready to be packed.
|
Validates the integrity and checksums of a .fbundle file.
|
||||||
|
|
||||||
- If a directory is provided, it checks for a valid manifest and that all referenced files exist.
|
|
||||||
- If a .fbundle file is provided, it unpacks it and runs the same validation checks.
|
|
||||||
"""
|
"""
|
||||||
if path.is_dir():
|
def progress_callback(message):
|
||||||
_validate_bundle_directory(path)
|
# For a CLI, we can choose to show progress or just wait for the final report.
|
||||||
elif path.is_file():
|
# In this case, the final report is more structured and useful.
|
||||||
_validate_bundle_file(path)
|
pass
|
||||||
else:
|
|
||||||
# This case should not be reached due to `exists=True`
|
try:
|
||||||
console.print(Panel("[red]Error: Path is not a file or directory.[/red]", title="Validation Error"))
|
results = validate_bundle(
|
||||||
|
bundle_path=bundle_path,
|
||||||
|
progress_callback=progress_callback
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(Panel(f"Validation Report for: [bold]{bundle_path.name}[/bold]", border_style="blue"))
|
||||||
|
|
||||||
|
if results['errors']:
|
||||||
|
console.print(Panel("Errors", border_style="red", expand=False))
|
||||||
|
for error in results['errors']:
|
||||||
|
console.print(Text(f"❌ {error}", style="red"))
|
||||||
|
|
||||||
|
if results['warnings']:
|
||||||
|
console.print(Panel("Warnings", border_style="yellow", expand=False))
|
||||||
|
for warning in results['warnings']:
|
||||||
|
console.print(Text(f"⚠️ {warning}", style="yellow"))
|
||||||
|
|
||||||
|
# Summary Table
|
||||||
|
summary_table = Table(title="Validation Summary")
|
||||||
|
summary_table.add_column("Result")
|
||||||
|
summary_table.add_column("Errors", justify="right")
|
||||||
|
summary_table.add_column("Warnings", justify="right")
|
||||||
|
|
||||||
|
status = results.get('status', 'failed')
|
||||||
|
summary = results.get('summary', {'errors': len(results['errors']), 'warnings': len(results['warnings'])})
|
||||||
|
|
||||||
|
if status == 'passed':
|
||||||
|
result_text = "Passed"
|
||||||
|
style = "green"
|
||||||
|
else:
|
||||||
|
result_text = "Failed"
|
||||||
|
style = "red"
|
||||||
|
|
||||||
|
summary_table.add_row(
|
||||||
|
f"[bold {style}]{result_text}[/bold {style}]",
|
||||||
|
str(summary['errors']),
|
||||||
|
str(summary['warnings'])
|
||||||
|
)
|
||||||
|
console.print(summary_table)
|
||||||
|
|
||||||
|
if status != 'passed':
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
else:
|
||||||
|
console.print("\n[bold green]✅ Bundle is valid.[/bold green]")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Catch exceptions from the core function itself
|
||||||
|
console.print(Panel(f"[bold red]An unexpected error occurred:[/bold red]\n{e}", title="Validation Error"))
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|||||||
@@ -1,16 +1,11 @@
|
|||||||
# fourdst/cli/common/config.py
|
# fourdst/cli/common/config.py
|
||||||
|
|
||||||
from pathlib import Path
|
# This file is now a proxy for the core config to maintain compatibility.
|
||||||
|
from fourdst.core.config import (
|
||||||
FOURDST_CONFIG_DIR = Path.home() / ".config" / "fourdst"
|
FOURDST_CONFIG_DIR,
|
||||||
LOCAL_TRUST_STORE_PATH = FOURDST_CONFIG_DIR / "keys"
|
LOCAL_TRUST_STORE_PATH,
|
||||||
CROSS_FILES_PATH = FOURDST_CONFIG_DIR / "cross"
|
CROSS_FILES_PATH,
|
||||||
CACHE_PATH = FOURDST_CONFIG_DIR / "cache"
|
CACHE_PATH,
|
||||||
ABI_CACHE_FILE = CACHE_PATH / "abi_identifier.json"
|
ABI_CACHE_FILE,
|
||||||
DOCKER_BUILD_IMAGES = {
|
DOCKER_BUILD_IMAGES
|
||||||
"x86_64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_x86_64",
|
)
|
||||||
"aarch64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_aarch64",
|
|
||||||
"i686 (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_i686",
|
|
||||||
"ppc64le (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_ppc64le",
|
|
||||||
"s390x (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_s390x"
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -3,27 +3,49 @@
|
|||||||
import typer
|
import typer
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
|
||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import importlib.resources
|
import importlib.resources
|
||||||
import json
|
|
||||||
import platform
|
|
||||||
import zipfile
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
try:
|
|
||||||
import docker
|
|
||||||
except ImportError:
|
|
||||||
docker = None
|
|
||||||
|
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.panel import Panel
|
from rich.panel import Panel
|
||||||
|
|
||||||
console = Console()
|
console = Console()
|
||||||
|
|
||||||
from fourdst.cli.common.config import CACHE_PATH, ABI_CACHE_FILE, CROSS_FILES_PATH, DOCKER_BUILD_IMAGES
|
def run_command_rich(command: list[str], cwd: Path = None, check=True, env: dict = None):
|
||||||
from fourdst.cli.common.templates import ABI_DETECTOR_CPP_SRC, ABI_DETECTOR_MESON_SRC
|
"""
|
||||||
|
Runs a command and displays its output live using rich.
|
||||||
|
"""
|
||||||
|
command_str = ' '.join(command)
|
||||||
|
console.print(Panel(f"Running: [bold cyan]{command_str}[/bold cyan]", title="Command", border_style="blue"))
|
||||||
|
|
||||||
|
process = subprocess.Popen(
|
||||||
|
command,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
text=True,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env,
|
||||||
|
bufsize=1, # line-buffered
|
||||||
|
universal_newlines=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read and print stdout and stderr line by line
|
||||||
|
if process.stdout:
|
||||||
|
for line in iter(process.stdout.readline, ''):
|
||||||
|
console.print(line.strip())
|
||||||
|
|
||||||
|
if process.stderr:
|
||||||
|
for line in iter(process.stderr.readline, ''):
|
||||||
|
console.print(f"[yellow]{line.strip()}[/yellow]")
|
||||||
|
|
||||||
|
process.wait()
|
||||||
|
|
||||||
|
if check and process.returncode != 0:
|
||||||
|
console.print(Panel(f"Command failed with exit code {process.returncode}", title="[bold red]Error[/bold red]", border_style="red"))
|
||||||
|
raise subprocess.CalledProcessError(process.returncode, command)
|
||||||
|
|
||||||
|
return process
|
||||||
|
|
||||||
def get_template_content(template_name: str) -> str:
|
def get_template_content(template_name: str) -> str:
|
||||||
"""Safely reads content from a template file packaged with the CLI."""
|
"""Safely reads content from a template file packaged with the CLI."""
|
||||||
@@ -38,7 +60,6 @@ def run_command(command: list[str], cwd: Path = None, check=True, display_output
|
|||||||
command_str = ' '.join(command)
|
command_str = ' '.join(command)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Pass the env dictionary to subprocess.run
|
|
||||||
result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd, env=env)
|
result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd, env=env)
|
||||||
|
|
||||||
if display_output and (result.stdout or result.stderr):
|
if display_output and (result.stdout or result.stderr):
|
||||||
@@ -73,324 +94,6 @@ def run_command(command: list[str], cwd: Path = None, check=True, display_output
|
|||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
return e
|
return e
|
||||||
|
|
||||||
def _detect_and_cache_abi():
|
|
||||||
"""
|
|
||||||
Compiles and runs a C++ program to detect the compiler ABI, then caches it.
|
|
||||||
"""
|
|
||||||
print("Performing one-time native C++ ABI detection...")
|
|
||||||
temp_dir = CACHE_PATH / "abi_detector"
|
|
||||||
if temp_dir.exists():
|
|
||||||
shutil.rmtree(temp_dir)
|
|
||||||
temp_dir.mkdir(parents=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
(temp_dir / "main.cpp").write_text(ABI_DETECTOR_CPP_SRC)
|
|
||||||
(temp_dir / "meson.build").write_text(ABI_DETECTOR_MESON_SRC)
|
|
||||||
|
|
||||||
print(" - Configuring detector...")
|
|
||||||
run_command(["meson", "setup", "build"], cwd=temp_dir, display_output=True)
|
|
||||||
print(" - Compiling detector...")
|
|
||||||
run_command(["meson", "compile", "-C", "build"], cwd=temp_dir, display_output=True)
|
|
||||||
|
|
||||||
detector_exe = temp_dir / "build" / "detector"
|
|
||||||
print(" - Running detector...")
|
|
||||||
proc = subprocess.run([str(detector_exe)], check=True, capture_output=True, text=True)
|
|
||||||
|
|
||||||
abi_details = {}
|
|
||||||
for line in proc.stdout.strip().split('\n'):
|
|
||||||
if '=' in line:
|
|
||||||
key, value = line.split('=', 1)
|
|
||||||
abi_details[key.strip()] = value.strip()
|
|
||||||
|
|
||||||
compiler = abi_details.get('compiler', 'unk_compiler')
|
|
||||||
stdlib = abi_details.get('stdlib', 'unk_stdlib')
|
|
||||||
|
|
||||||
# --- MODIFIED LOGIC FOR MACOS VERSIONING ---
|
|
||||||
# On macOS, the OS version is more useful than the internal libc++ version.
|
|
||||||
# But for the generic host detection, we still use the detected version.
|
|
||||||
# The targeting logic will override this.
|
|
||||||
if sys.platform == "darwin":
|
|
||||||
# The C++ detector provides the internal _LIBCPP_VERSION
|
|
||||||
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
|
||||||
detected_os = "macos"
|
|
||||||
else:
|
|
||||||
# On Linux, this will be the glibc version
|
|
||||||
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
|
||||||
detected_os = abi_details.get("os", "linux")
|
|
||||||
|
|
||||||
abi = abi_details.get('abi', 'unk_abi')
|
|
||||||
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
|
||||||
|
|
||||||
arch = platform.machine()
|
|
||||||
|
|
||||||
platform_identifier = {
|
|
||||||
"triplet": f"{arch}-{detected_os}",
|
|
||||||
"abi_signature": abi_string,
|
|
||||||
"details": abi_details,
|
|
||||||
"is_native": True,
|
|
||||||
"cross_file": None,
|
|
||||||
"docker_image": None,
|
|
||||||
"arch": arch
|
|
||||||
}
|
|
||||||
|
|
||||||
with open(ABI_CACHE_FILE, 'w') as f:
|
|
||||||
json.dump(platform_identifier, f, indent=2)
|
|
||||||
|
|
||||||
print(f"✅ Native ABI detected and cached: {abi_string}")
|
|
||||||
return platform_identifier
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if temp_dir.exists():
|
|
||||||
shutil.rmtree(temp_dir)
|
|
||||||
|
|
||||||
def get_platform_identifier() -> dict:
|
|
||||||
"""
|
|
||||||
Gets the native platform identifier, using a cached value if available.
|
|
||||||
"""
|
|
||||||
if ABI_CACHE_FILE.exists():
|
|
||||||
with open(ABI_CACHE_FILE, 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
else:
|
|
||||||
return _detect_and_cache_abi()
|
|
||||||
|
|
||||||
def get_macos_targeted_platform_identifier(target_version: str) -> dict:
|
|
||||||
"""
|
|
||||||
Generates a platform identifier for a specific target macOS version.
|
|
||||||
This bypasses host detection for the version string.
|
|
||||||
"""
|
|
||||||
# We still need the host's compiler info, so we run detection if not cached.
|
|
||||||
host_platform = get_platform_identifier()
|
|
||||||
host_details = host_platform['details']
|
|
||||||
|
|
||||||
compiler = host_details.get('compiler', 'clang')
|
|
||||||
stdlib = host_details.get('stdlib', 'libc++')
|
|
||||||
abi = host_details.get('abi', 'libc++_abi')
|
|
||||||
arch = platform.machine()
|
|
||||||
|
|
||||||
abi_string = f"{compiler}-{stdlib}-{target_version}-{abi}"
|
|
||||||
|
|
||||||
return {
|
|
||||||
"triplet": f"{arch}-macos",
|
|
||||||
"abi_signature": abi_string,
|
|
||||||
"details": {
|
|
||||||
"os": "macos",
|
|
||||||
"compiler": compiler,
|
|
||||||
"compiler_version": host_details.get('compiler_version'),
|
|
||||||
"stdlib": stdlib,
|
|
||||||
"stdlib_version": target_version, # The key change is here
|
|
||||||
"abi": abi,
|
|
||||||
},
|
|
||||||
"is_native": True,
|
|
||||||
"cross_file": None,
|
|
||||||
"docker_image": None,
|
|
||||||
"arch": arch
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_available_build_targets() -> list:
|
|
||||||
"""Gets native, cross-compilation, and Docker build targets."""
|
|
||||||
targets = [get_platform_identifier()]
|
|
||||||
|
|
||||||
# Add cross-file targets
|
|
||||||
CROSS_FILES_PATH.mkdir(exist_ok=True)
|
|
||||||
for cross_file in CROSS_FILES_PATH.glob("*.cross"):
|
|
||||||
triplet = cross_file.stem
|
|
||||||
targets.append({
|
|
||||||
"triplet": triplet,
|
|
||||||
"abi_signature": f"cross-{triplet}",
|
|
||||||
"is_native": False,
|
|
||||||
"cross_file": str(cross_file.resolve()),
|
|
||||||
"docker_image": None
|
|
||||||
})
|
|
||||||
|
|
||||||
# Add Docker targets if Docker is available
|
|
||||||
if docker:
|
|
||||||
try:
|
|
||||||
client = docker.from_env()
|
|
||||||
client.ping()
|
|
||||||
for name, image in DOCKER_BUILD_IMAGES.items():
|
|
||||||
arch = name.split(' ')[0]
|
|
||||||
targets.append({
|
|
||||||
"triplet": f"{arch}-linux",
|
|
||||||
"abi_signature": f"docker-{image}",
|
|
||||||
"is_native": False,
|
|
||||||
"cross_file": None,
|
|
||||||
"docker_image": image,
|
|
||||||
"arch": arch
|
|
||||||
})
|
|
||||||
except Exception:
|
|
||||||
typer.secho("Warning: Docker is installed but the daemon is not running. Docker targets are unavailable.", fg=typer.colors.YELLOW)
|
|
||||||
|
|
||||||
return targets
|
|
||||||
|
|
||||||
def _build_plugin_for_target(sdist_path: Path, build_dir: Path, target: dict):
|
|
||||||
"""Builds a plugin natively or with a cross file."""
|
|
||||||
source_dir = build_dir / "src"
|
|
||||||
if source_dir.exists():
|
|
||||||
shutil.rmtree(source_dir)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(sdist_path, 'r') as sdist_zip:
|
|
||||||
sdist_zip.extractall(source_dir)
|
|
||||||
|
|
||||||
|
|
||||||
setup_cmd = ["meson", "setup"]
|
|
||||||
if target["cross_file"]:
|
|
||||||
setup_cmd.extend(["--cross-file", target["cross_file"]])
|
|
||||||
setup_cmd.append("build")
|
|
||||||
|
|
||||||
run_command(setup_cmd, cwd=source_dir, display_output=True)
|
|
||||||
run_command(["meson", "compile", "-C", "build"], cwd=source_dir, display_output=True)
|
|
||||||
|
|
||||||
meson_build_dir = source_dir / "build"
|
|
||||||
compiled_lib = next(meson_build_dir.rglob("lib*.so"), None) or next(meson_build_dir.rglob("lib*.dylib"), None)
|
|
||||||
if not compiled_lib:
|
|
||||||
raise FileNotFoundError("Could not find compiled library after build.")
|
|
||||||
|
|
||||||
return compiled_lib, target # Return target as ABI is pre-determined
|
|
||||||
|
|
||||||
def _build_plugin_in_docker(sdist_path: Path, build_dir: Path, target: dict, plugin_name: str):
|
|
||||||
"""Builds a plugin inside a Docker container."""
|
|
||||||
client = docker.from_env()
|
|
||||||
image_name = target["docker_image"]
|
|
||||||
|
|
||||||
# Find arch from DOCKER_BUILD_IMAGES to create a clean triplet later
|
|
||||||
arch = "unknown_arch"
|
|
||||||
for name, img in DOCKER_BUILD_IMAGES.items():
|
|
||||||
if img == image_name:
|
|
||||||
arch = name.split(' ')[0]
|
|
||||||
break
|
|
||||||
|
|
||||||
typer.echo(f" - Pulling Docker image '{image_name}' (if necessary)...")
|
|
||||||
client.images.pull(image_name)
|
|
||||||
|
|
||||||
source_dir = build_dir / "src"
|
|
||||||
if source_dir.exists():
|
|
||||||
shutil.rmtree(source_dir)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(sdist_path, 'r') as sdist_zip:
|
|
||||||
sdist_zip.extractall(source_dir)
|
|
||||||
|
|
||||||
# This script will be run inside the container
|
|
||||||
build_script = f"""
|
|
||||||
set -e
|
|
||||||
echo "--- Installing build dependencies ---"
|
|
||||||
export PATH="/opt/python/cp313-cp313/bin:$PATH"
|
|
||||||
pip install meson ninja cmake
|
|
||||||
|
|
||||||
echo " -> ℹ meson version: $(meson --version) [$(which meson)]"
|
|
||||||
echo " -> ℹ ninja version: $(ninja --version) [$(which ninja)]"
|
|
||||||
echo " -> ℹ cmake version: $(cmake --version) [$(which cmake)]"
|
|
||||||
|
|
||||||
echo "--- Configuring with Meson ---"
|
|
||||||
meson setup /build/meson_build
|
|
||||||
echo "--- Compiling with Meson ---"
|
|
||||||
meson compile -C /build/meson_build
|
|
||||||
echo "--- Running ABI detector ---"
|
|
||||||
# We need to build and run the ABI detector inside the container too
|
|
||||||
mkdir /tmp/abi && cd /tmp/abi
|
|
||||||
echo "{ABI_DETECTOR_CPP_SRC.replace('"', '\\"')}" > main.cpp
|
|
||||||
echo "{ABI_DETECTOR_MESON_SRC.replace('"', '\\"')}" > meson.build
|
|
||||||
meson setup build && meson compile -C build
|
|
||||||
./build/detector > /build/abi_details.txt
|
|
||||||
"""
|
|
||||||
|
|
||||||
container_build_dir = Path("/build")
|
|
||||||
|
|
||||||
typer.echo(" - Running build container...")
|
|
||||||
container = client.containers.run(
|
|
||||||
image=image_name,
|
|
||||||
command=["/bin/sh", "-c", build_script],
|
|
||||||
volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'rw'}},
|
|
||||||
working_dir=str(container_build_dir),
|
|
||||||
detach=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Stream logs
|
|
||||||
for line in container.logs(stream=True, follow=True):
|
|
||||||
typer.echo(f" [docker] {line.decode('utf-8').strip()}")
|
|
||||||
|
|
||||||
result = container.wait()
|
|
||||||
if result["StatusCode"] != 0:
|
|
||||||
# The container is stopped, but we can still inspect its filesystem by restarting it briefly.
|
|
||||||
log_output = container.logs()
|
|
||||||
container.remove() # Clean up before raising
|
|
||||||
typer.secho(f"Build failed inside Docker. Full log:\n{log_output.decode('utf-8')}", fg=typer.colors.RED)
|
|
||||||
raise subprocess.CalledProcessError(result["StatusCode"], "Build inside Docker failed.")
|
|
||||||
|
|
||||||
# Retrieve artifacts by searching inside the container's filesystem
|
|
||||||
typer.echo(" - Locating compiled library in container...")
|
|
||||||
meson_build_dir_str = (container_build_dir / "meson_build").as_posix()
|
|
||||||
expected_lib_name = f"lib{plugin_name}.so"
|
|
||||||
|
|
||||||
find_cmd = f"find {meson_build_dir_str} -name {expected_lib_name}"
|
|
||||||
|
|
||||||
# We need to run the find command in the now-stopped container.
|
|
||||||
# We can't use exec_run on a stopped container, but we can create a new
|
|
||||||
# one that uses the same filesystem (volume) to find the file.
|
|
||||||
try:
|
|
||||||
find_output = client.containers.run(
|
|
||||||
image=image_name,
|
|
||||||
command=["/bin/sh", "-c", find_cmd],
|
|
||||||
volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'ro'}},
|
|
||||||
remove=True, # Clean up the find container immediately
|
|
||||||
detach=False
|
|
||||||
)
|
|
||||||
found_path_str = find_output.decode('utf-8').strip()
|
|
||||||
if not found_path_str:
|
|
||||||
raise FileNotFoundError("Find command returned no path.")
|
|
||||||
compiled_lib = Path(found_path_str)
|
|
||||||
typer.echo(f" - Found library at: {compiled_lib}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
typer.secho(f" - Error: Could not locate '{expected_lib_name}' inside the container.", fg=typer.colors.RED)
|
|
||||||
typer.secho(f" Details: {e}", fg=typer.colors.RED)
|
|
||||||
raise FileNotFoundError("Could not find compiled library in container after a successful build.")
|
|
||||||
|
|
||||||
# Get the ABI details from the container
|
|
||||||
abi_details_content = ""
|
|
||||||
bits, _ = container.get_archive(str(container_build_dir / "abi_details.txt"))
|
|
||||||
for chunk in bits:
|
|
||||||
abi_details_content += chunk.decode('utf-8')
|
|
||||||
|
|
||||||
# We need to find the actual file content within the tar stream
|
|
||||||
# This is a simplification; a real implementation would use the `tarfile` module
|
|
||||||
actual_content = abi_details_content.split('\n', 1)[1] if '\n' in abi_details_content else abi_details_content
|
|
||||||
actual_content = actual_content.split('main.cpp')[1].strip() if 'main.cpp' in actual_content else actual_content
|
|
||||||
actual_content = actual_content.rsplit('0755', 1)[0].strip() if '0755' in actual_content else actual_content
|
|
||||||
|
|
||||||
|
|
||||||
abi_details = {}
|
|
||||||
for line in actual_content.strip().split('\n'):
|
|
||||||
if '=' in line:
|
|
||||||
key, value = line.split('=', 1)
|
|
||||||
abi_details[key.strip()] = value.strip()
|
|
||||||
|
|
||||||
compiler = abi_details.get('compiler', 'unk_compiler')
|
|
||||||
stdlib = abi_details.get('stdlib', 'unk_stdlib')
|
|
||||||
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
|
||||||
abi = abi_details.get('abi', 'unk_abi')
|
|
||||||
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
|
||||||
|
|
||||||
final_target = {
|
|
||||||
"triplet": f"{arch}-{abi_details.get('os', 'linux')}",
|
|
||||||
"abi_signature": abi_string,
|
|
||||||
"is_native": False,
|
|
||||||
"cross_file": None,
|
|
||||||
"docker_image": image_name,
|
|
||||||
"arch": arch
|
|
||||||
}
|
|
||||||
|
|
||||||
# Copy the binary out
|
|
||||||
local_lib_path = build_dir / compiled_lib.name
|
|
||||||
bits, _ = container.get_archive(str(compiled_lib))
|
|
||||||
with open(local_lib_path, 'wb') as f:
|
|
||||||
for chunk in bits:
|
|
||||||
f.write(chunk)
|
|
||||||
|
|
||||||
container.remove()
|
|
||||||
|
|
||||||
return local_lib_path, final_target
|
|
||||||
|
|
||||||
|
|
||||||
def is_abi_compatible(host_abi: str, binary_abi: str) -> bool:
|
def is_abi_compatible(host_abi: str, binary_abi: str) -> bool:
|
||||||
"""
|
"""
|
||||||
Checks if a binary's ABI is compatible with the host's ABI.
|
Checks if a binary's ABI is compatible with the host's ABI.
|
||||||
|
|||||||
@@ -39,6 +39,14 @@ app = typer.Typer(
|
|||||||
|
|
||||||
plugin_app = typer.Typer(name="plugin", help="Commands for managing individual fourdst plugins.")
|
plugin_app = typer.Typer(name="plugin", help="Commands for managing individual fourdst plugins.")
|
||||||
bundle_app = typer.Typer(name="bundle", help="Commands for creating, signing, and managing plugin bundles.")
|
bundle_app = typer.Typer(name="bundle", help="Commands for creating, signing, and managing plugin bundles.")
|
||||||
|
|
||||||
|
bundle_app.command("create")(bundle_create)
|
||||||
|
bundle_app.command("fill")(bundle_fill)
|
||||||
|
bundle_app.command("sign")(bundle_sign)
|
||||||
|
bundle_app.command("inspect")(bundle_inspect)
|
||||||
|
bundle_app.command("clear")(bundle_clear)
|
||||||
|
bundle_app.command("diff")(bundle_diff)
|
||||||
|
bundle_app.command("validate")(bundle_validate)
|
||||||
cache_app = typer.Typer(name="cache", help="Commands for managing the local cache.")
|
cache_app = typer.Typer(name="cache", help="Commands for managing the local cache.")
|
||||||
|
|
||||||
keys_app = typer.Typer(name="keys", help="Commands for cryptographic key generation and management.")
|
keys_app = typer.Typer(name="keys", help="Commands for cryptographic key generation and management.")
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
class {class_name} final : public {interface} {{
|
class {class_name} final : public {interface} {{
|
||||||
public:
|
public:
|
||||||
|
using {interface}::{interface};
|
||||||
~{class_name}() override {{
|
~{class_name}() override {{
|
||||||
// Implement any custom destruction logic here
|
// Implement any custom destruction logic here
|
||||||
}}
|
}}
|
||||||
|
|||||||
0
fourdst/core/__init__.py
Normal file
0
fourdst/core/__init__.py
Normal file
212
fourdst/core/build.py
Normal file
212
fourdst/core/build.py
Normal file
@@ -0,0 +1,212 @@
|
|||||||
|
# fourdst/core/build.py
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import zipfile
|
||||||
|
import docker
|
||||||
|
import io
|
||||||
|
import tarfile
|
||||||
|
from pathlib import Path
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
try:
|
||||||
|
import docker
|
||||||
|
except ImportError:
|
||||||
|
docker = None
|
||||||
|
|
||||||
|
from fourdst.core.utils import run_command
|
||||||
|
from fourdst.core.platform import get_platform_identifier, get_macos_targeted_platform_identifier
|
||||||
|
from fourdst.core.config import CROSS_FILES_PATH, DOCKER_BUILD_IMAGES
|
||||||
|
|
||||||
|
def get_available_build_targets(progress_callback=None):
|
||||||
|
"""Gets native, cross-compilation, and Docker build targets."""
|
||||||
|
def report_progress(message):
|
||||||
|
if progress_callback:
|
||||||
|
progress_callback(message)
|
||||||
|
|
||||||
|
targets = [get_platform_identifier()]
|
||||||
|
|
||||||
|
# Add cross-file targets
|
||||||
|
CROSS_FILES_PATH.mkdir(exist_ok=True)
|
||||||
|
for cross_file in CROSS_FILES_PATH.glob("*.cross"):
|
||||||
|
triplet = cross_file.stem
|
||||||
|
targets.append({
|
||||||
|
"triplet": triplet,
|
||||||
|
"abi_signature": f"cross-{triplet}",
|
||||||
|
"is_native": False,
|
||||||
|
"cross_file": str(cross_file.resolve()),
|
||||||
|
"docker_image": None,
|
||||||
|
'type': 'cross'
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add Docker targets if Docker is available
|
||||||
|
if docker:
|
||||||
|
try:
|
||||||
|
client = docker.from_env()
|
||||||
|
client.ping()
|
||||||
|
for name, image in DOCKER_BUILD_IMAGES.items():
|
||||||
|
arch = name.split(' ')[0]
|
||||||
|
targets.append({
|
||||||
|
"triplet": f"{arch}-linux",
|
||||||
|
"abi_signature": f"docker-{image}",
|
||||||
|
"is_native": False,
|
||||||
|
"cross_file": None,
|
||||||
|
"docker_image": image,
|
||||||
|
"arch": arch,
|
||||||
|
'type': 'docker'
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
report_progress("Warning: Docker is installed but the daemon is not running. Docker targets are unavailable.")
|
||||||
|
|
||||||
|
return targets
|
||||||
|
|
||||||
|
def build_plugin_for_target(sdist_path: Path, build_dir: Path, target: dict, progress_callback=None):
|
||||||
|
"""Builds a plugin natively or with a cross file."""
|
||||||
|
def report_progress(message):
|
||||||
|
if progress_callback:
|
||||||
|
progress_callback(message)
|
||||||
|
|
||||||
|
source_dir = build_dir / "src"
|
||||||
|
if source_dir.exists():
|
||||||
|
shutil.rmtree(source_dir)
|
||||||
|
|
||||||
|
with zipfile.ZipFile(sdist_path, 'r') as sdist_zip:
|
||||||
|
sdist_zip.extractall(source_dir)
|
||||||
|
|
||||||
|
setup_cmd = ["meson", "setup"]
|
||||||
|
if target.get("cross_file"):
|
||||||
|
setup_cmd.extend(["--cross-file", target["cross_file"]])
|
||||||
|
setup_cmd.append("build")
|
||||||
|
|
||||||
|
run_command(setup_cmd, cwd=source_dir, progress_callback=progress_callback)
|
||||||
|
run_command(["meson", "compile", "-C", "build"], cwd=source_dir, progress_callback=progress_callback)
|
||||||
|
|
||||||
|
meson_build_dir = source_dir / "build"
|
||||||
|
compiled_lib = next(meson_build_dir.rglob("lib*.so"), None) or next(meson_build_dir.rglob("lib*.dylib"), None)
|
||||||
|
if not compiled_lib:
|
||||||
|
raise FileNotFoundError("Could not find compiled library after build.")
|
||||||
|
|
||||||
|
return compiled_lib, target
|
||||||
|
|
||||||
|
def build_plugin_in_docker(sdist_path: Path, build_dir: Path, target: dict, plugin_name: str, progress_callback=None):
|
||||||
|
"""Builds a plugin inside a Docker container."""
|
||||||
|
def report_progress(message):
|
||||||
|
if progress_callback:
|
||||||
|
progress_callback(message)
|
||||||
|
|
||||||
|
client = docker.from_env()
|
||||||
|
image_name = target["docker_image"]
|
||||||
|
|
||||||
|
arch = target.get("arch", "unknown_arch")
|
||||||
|
|
||||||
|
report_progress(f" - Pulling Docker image '{image_name}' (if necessary)...")
|
||||||
|
client.images.pull(image_name)
|
||||||
|
|
||||||
|
source_dir = build_dir / "src"
|
||||||
|
if source_dir.exists():
|
||||||
|
shutil.rmtree(source_dir)
|
||||||
|
|
||||||
|
with zipfile.ZipFile(sdist_path, 'r') as sdist_zip:
|
||||||
|
sdist_zip.extractall(source_dir)
|
||||||
|
|
||||||
|
from fourdst.core.platform import ABI_DETECTOR_CPP_SRC, ABI_DETECTOR_MESON_SRC
|
||||||
|
build_script = f"""
|
||||||
|
set -e
|
||||||
|
echo \"--- Installing build dependencies ---\"
|
||||||
|
export PATH=\"/opt/python/cp313-cp313/bin:$PATH\"
|
||||||
|
dnf install -y openssl-devel
|
||||||
|
pip install meson ninja cmake
|
||||||
|
|
||||||
|
echo \"--- Configuring with Meson ---\"
|
||||||
|
meson setup /build/meson_build
|
||||||
|
echo \"--- Compiling with Meson ---\"
|
||||||
|
meson compile -C /build/meson_build
|
||||||
|
echo \"--- Running ABI detector ---\"
|
||||||
|
mkdir /tmp/abi && cd /tmp/abi
|
||||||
|
echo \"{ABI_DETECTOR_CPP_SRC.replace('"', '\\"')}\" > main.cpp
|
||||||
|
echo \"{ABI_DETECTOR_MESON_SRC.replace('"', '\\"')}\" > meson.build
|
||||||
|
meson setup build && meson compile -C build
|
||||||
|
./build/detector > /build/abi_details.txt
|
||||||
|
"""
|
||||||
|
|
||||||
|
container_build_dir = Path("/build")
|
||||||
|
|
||||||
|
report_progress(" - Running build container...")
|
||||||
|
container = client.containers.run(
|
||||||
|
image=image_name,
|
||||||
|
command=["/bin/sh", "-c", build_script],
|
||||||
|
volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'rw'}},
|
||||||
|
working_dir=str(container_build_dir),
|
||||||
|
detach=True
|
||||||
|
)
|
||||||
|
|
||||||
|
for line in container.logs(stream=True, follow=True):
|
||||||
|
report_progress(f" [docker] {line.decode('utf-8').strip()}")
|
||||||
|
|
||||||
|
result = container.wait()
|
||||||
|
if result["StatusCode"] != 0:
|
||||||
|
log_output = container.logs()
|
||||||
|
container.remove()
|
||||||
|
raise subprocess.CalledProcessError(result["StatusCode"], f"Build inside Docker failed. Full log:\n{log_output.decode('utf-8')}")
|
||||||
|
|
||||||
|
report_progress(" - Locating compiled library in container...")
|
||||||
|
meson_build_dir_str = (container_build_dir / "meson_build").as_posix()
|
||||||
|
expected_lib_name = f"lib{plugin_name}.so"
|
||||||
|
|
||||||
|
find_cmd = f"find {meson_build_dir_str} -name {expected_lib_name}"
|
||||||
|
|
||||||
|
find_output = client.containers.run(
|
||||||
|
image=image_name,
|
||||||
|
command=["/bin/sh", "-c", find_cmd],
|
||||||
|
volumes={str(source_dir.resolve()): {'bind': str(container_build_dir), 'mode': 'ro'}},
|
||||||
|
remove=True,
|
||||||
|
detach=False
|
||||||
|
)
|
||||||
|
found_path_str = find_output.decode('utf-8').strip()
|
||||||
|
if not found_path_str:
|
||||||
|
raise FileNotFoundError(f"Could not locate '{expected_lib_name}' inside the container.")
|
||||||
|
compiled_lib_path_in_container = Path(found_path_str)
|
||||||
|
|
||||||
|
# Use the tarfile module for robust extraction
|
||||||
|
bits, _ = container.get_archive(str(container_build_dir / "abi_details.txt"))
|
||||||
|
with tarfile.open(fileobj=io.BytesIO(b''.join(bits))) as tar:
|
||||||
|
member = tar.getmembers()[0]
|
||||||
|
extracted_file = tar.extractfile(member)
|
||||||
|
if not extracted_file:
|
||||||
|
raise FileNotFoundError("Could not extract abi_details.txt from container archive.")
|
||||||
|
abi_details_content = extracted_file.read()
|
||||||
|
|
||||||
|
abi_details = {}
|
||||||
|
for line in abi_details_content.decode('utf-8').strip().split('\n'):
|
||||||
|
if '=' in line:
|
||||||
|
key, value = line.split('=', 1)
|
||||||
|
abi_details[key.strip()] = value.strip()
|
||||||
|
|
||||||
|
compiler = abi_details.get('compiler', 'unk_compiler')
|
||||||
|
stdlib = abi_details.get('stdlib', 'unk_stdlib')
|
||||||
|
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
||||||
|
abi = abi_details.get('abi', 'unk_abi')
|
||||||
|
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
||||||
|
|
||||||
|
final_target = {
|
||||||
|
"triplet": f"{arch}-{abi_details.get('os', 'linux')}",
|
||||||
|
"abi_signature": abi_string,
|
||||||
|
"is_native": False,
|
||||||
|
"cross_file": None,
|
||||||
|
"docker_image": image_name,
|
||||||
|
"arch": arch
|
||||||
|
}
|
||||||
|
|
||||||
|
local_lib_path = build_dir / compiled_lib_path_in_container.name
|
||||||
|
bits, _ = container.get_archive(str(compiled_lib_path_in_container))
|
||||||
|
with tarfile.open(fileobj=io.BytesIO(b''.join(bits))) as tar:
|
||||||
|
member = tar.getmembers()[0]
|
||||||
|
extracted_file = tar.extractfile(member)
|
||||||
|
if not extracted_file:
|
||||||
|
raise FileNotFoundError(f"Could not extract {local_lib_path.name} from container archive.")
|
||||||
|
with open(local_lib_path, 'wb') as f:
|
||||||
|
f.write(extracted_file.read())
|
||||||
|
|
||||||
|
container.remove()
|
||||||
|
|
||||||
|
return local_lib_path, final_target
|
||||||
1079
fourdst/core/bundle.py
Normal file
1079
fourdst/core/bundle.py
Normal file
File diff suppressed because it is too large
Load Diff
21
fourdst/core/config.py
Normal file
21
fourdst/core/config.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
# fourdst/core/config.py
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
FOURDST_CONFIG_DIR = Path.home() / ".config" / "fourdst"
|
||||||
|
LOCAL_TRUST_STORE_PATH = FOURDST_CONFIG_DIR / "keys"
|
||||||
|
CROSS_FILES_PATH = FOURDST_CONFIG_DIR / "cross"
|
||||||
|
CACHE_PATH = FOURDST_CONFIG_DIR / "cache"
|
||||||
|
ABI_CACHE_FILE = CACHE_PATH / "abi_identifier.json"
|
||||||
|
DOCKER_BUILD_IMAGES = {
|
||||||
|
"x86_64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_x86_64",
|
||||||
|
"aarch64 (manylinux_2_28)": "quay.io/pypa/manylinux_2_28_aarch64",
|
||||||
|
"i686 (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_i686",
|
||||||
|
"ppc64le (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_ppc64le",
|
||||||
|
"s390x (manylinux_2_28)" : "quay.io/pypa/manylinux_2_28_s390x"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Ensure the necessary directories exist
|
||||||
|
LOCAL_TRUST_STORE_PATH.mkdir(parents=True, exist_ok=True)
|
||||||
|
CROSS_FILES_PATH.mkdir(parents=True, exist_ok=True)
|
||||||
|
CACHE_PATH.mkdir(parents=True, exist_ok=True)
|
||||||
253
fourdst/core/platform.py
Normal file
253
fourdst/core/platform.py
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
# fourdst/core/platform.py
|
||||||
|
|
||||||
|
import json
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from fourdst.core.config import ABI_CACHE_FILE, CACHE_PATH
|
||||||
|
from fourdst.core.utils import run_command
|
||||||
|
|
||||||
|
ABI_DETECTOR_CPP_SRC = """
|
||||||
|
#include <iostream>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#ifdef __GNUC__
|
||||||
|
#if __has_include(<gnu/libc-version.h>)
|
||||||
|
#include <gnu/libc-version.h>
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
int main() {
|
||||||
|
std::string os;
|
||||||
|
std::string compiler;
|
||||||
|
std::string compiler_version;
|
||||||
|
std::string stdlib;
|
||||||
|
std::string stdlib_version;
|
||||||
|
std::string abi;
|
||||||
|
|
||||||
|
#if defined(__APPLE__) && defined(__MACH__)
|
||||||
|
os = "macos";
|
||||||
|
#elif defined(__linux__)
|
||||||
|
os = "linux";
|
||||||
|
#elif defined(_WIN32)
|
||||||
|
os = "windows";
|
||||||
|
#else
|
||||||
|
os = "unknown_os";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(__clang__)
|
||||||
|
compiler = "clang";
|
||||||
|
compiler_version = __clang_version__;
|
||||||
|
#elif defined(__GNUC__)
|
||||||
|
compiler = "gcc";
|
||||||
|
compiler_version = std::to_string(__GNUC__) + "." + std::to_string(__GNUC_MINOR__) + "." + std::to_string(__GNUC_PATCHLEVEL__);
|
||||||
|
#elif defined(_MSC_VER)
|
||||||
|
compiler = "msvc";
|
||||||
|
compiler_version = std::to_string(_MSC_VER);
|
||||||
|
#else
|
||||||
|
compiler = "unknown_compiler";
|
||||||
|
compiler_version = "0";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(_LIBCPP_VERSION)
|
||||||
|
stdlib = "libc++";
|
||||||
|
stdlib_version = std::to_string(_LIBCPP_VERSION);
|
||||||
|
abi = "libc++_abi"; // On libc++, the ABI is tightly coupled with the library itself.
|
||||||
|
#elif defined(__GLIBCXX__)
|
||||||
|
stdlib = "libstdc++";
|
||||||
|
#if defined(_GLIBCXX_USE_CXX11_ABI)
|
||||||
|
abi = _GLIBCXX_USE_CXX11_ABI == 1 ? "cxx11_abi" : "pre_cxx11_abi";
|
||||||
|
#else
|
||||||
|
abi = "pre_cxx11_abi";
|
||||||
|
#endif
|
||||||
|
#if __has_include(<gnu/libc-version.h>)
|
||||||
|
stdlib_version = gnu_get_libc_version();
|
||||||
|
#else
|
||||||
|
stdlib_version = "unknown";
|
||||||
|
#endif
|
||||||
|
#else
|
||||||
|
stdlib = "unknown_stdlib";
|
||||||
|
abi = "unknown_abi";
|
||||||
|
#endif
|
||||||
|
|
||||||
|
std::cout << "os=" << os << std::endl;
|
||||||
|
std::cout << "compiler=" << compiler << std::endl;
|
||||||
|
std::cout << "compiler_version=" << compiler_version << std::endl;
|
||||||
|
std::cout << "stdlib=" << stdlib << std::endl;
|
||||||
|
if (!stdlib_version.empty()) {
|
||||||
|
std::cout << "stdlib_version=" << stdlib_version << std::endl;
|
||||||
|
}
|
||||||
|
// Always print the ABI key for consistent parsing
|
||||||
|
std::cout << "abi=" << abi << std::endl;
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
ABI_DETECTOR_MESON_SRC = """
|
||||||
|
project('abi-detector', 'cpp', default_options : ['cpp_std=c++23'])
|
||||||
|
executable('detector', 'main.cpp')
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _detect_and_cache_abi() -> dict:
|
||||||
|
"""
|
||||||
|
Compiles and runs a C++ program to detect the compiler ABI, then caches it.
|
||||||
|
"""
|
||||||
|
print("Performing one-time native C++ ABI detection...")
|
||||||
|
temp_dir = CACHE_PATH / "abi_detector"
|
||||||
|
if temp_dir.exists():
|
||||||
|
shutil.rmtree(temp_dir)
|
||||||
|
temp_dir.mkdir(parents=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
(temp_dir / "main.cpp").write_text(ABI_DETECTOR_CPP_SRC)
|
||||||
|
(temp_dir / "meson.build").write_text(ABI_DETECTOR_MESON_SRC)
|
||||||
|
|
||||||
|
print(" - Configuring detector...")
|
||||||
|
run_command(["meson", "setup", "build"], cwd=temp_dir)
|
||||||
|
print(" - Compiling detector...")
|
||||||
|
run_command(["meson", "compile", "-C", "build"], cwd=temp_dir)
|
||||||
|
|
||||||
|
detector_exe = temp_dir / "build" / "detector"
|
||||||
|
print(" - Running detector...")
|
||||||
|
proc = subprocess.run([str(detector_exe)], check=True, capture_output=True, text=True)
|
||||||
|
|
||||||
|
abi_details = {}
|
||||||
|
for line in proc.stdout.strip().split('\n'):
|
||||||
|
key, value = line.split('=', 1)
|
||||||
|
abi_details[key] = value.strip()
|
||||||
|
|
||||||
|
arch = platform.machine()
|
||||||
|
stdlib_version = abi_details.get('stdlib_version', 'unknown')
|
||||||
|
abi_string = f"{abi_details['compiler']}-{abi_details['stdlib']}-{stdlib_version}-{abi_details['abi']}"
|
||||||
|
|
||||||
|
platform_data = {
|
||||||
|
"os": abi_details['os'],
|
||||||
|
"arch": arch,
|
||||||
|
"triplet": f"{arch}-{abi_details['os']}",
|
||||||
|
"abi_signature": abi_string,
|
||||||
|
"details": abi_details,
|
||||||
|
"is_native": True,
|
||||||
|
"cross_file": None,
|
||||||
|
"docker_image": None
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(ABI_CACHE_FILE, 'w') as f:
|
||||||
|
json.dump(platform_data, f, indent=4)
|
||||||
|
|
||||||
|
print(f" - ABI details cached to {ABI_CACHE_FILE}")
|
||||||
|
return platform_data
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if temp_dir.exists():
|
||||||
|
shutil.rmtree(temp_dir)
|
||||||
|
|
||||||
|
def get_platform_identifier() -> dict:
|
||||||
|
"""
|
||||||
|
Gets the native platform identifier, using a cached value if available.
|
||||||
|
"""
|
||||||
|
if ABI_CACHE_FILE.exists():
|
||||||
|
with open(ABI_CACHE_FILE, 'r') as f:
|
||||||
|
plat = json.load(f)
|
||||||
|
else:
|
||||||
|
plat = _detect_and_cache_abi()
|
||||||
|
plat['type'] = 'native'
|
||||||
|
return plat
|
||||||
|
|
||||||
|
def _parse_version(version_str: str) -> tuple:
|
||||||
|
"""Parses a version string like '12.3.1' into a tuple of integers."""
|
||||||
|
return tuple(map(int, (version_str.split('.') + ['0', '0'])[:3]))
|
||||||
|
|
||||||
|
def is_abi_compatible(host_platform: dict, binary_platform: dict) -> tuple[bool, str]:
|
||||||
|
"""
|
||||||
|
Checks if a binary's platform is compatible with the host's platform.
|
||||||
|
This is more nuanced than a simple string comparison, allowing for forward compatibility.
|
||||||
|
- macOS: A binary for an older OS version can run on a newer one, if the toolchain matches.
|
||||||
|
- Linux: A binary for an older GLIBC version can run on a newer one.
|
||||||
|
"""
|
||||||
|
required_keys = ['os', 'arch', 'abi_signature']
|
||||||
|
if not all(key in host_platform for key in required_keys):
|
||||||
|
return False, f"Host platform data is malformed. Missing keys: {[k for k in required_keys if k not in host_platform]}"
|
||||||
|
if not all(key in binary_platform for key in required_keys):
|
||||||
|
return False, f"Binary platform data is malformed. Missing keys: {[k for k in required_keys if k not in binary_platform]}"
|
||||||
|
|
||||||
|
host_os = host_platform.get('os') or host_platform.get('details', {}).get('os')
|
||||||
|
binary_os = binary_platform.get('os') or binary_platform.get('details', {}).get('os')
|
||||||
|
host_arch = host_platform.get('arch') or host_platform.get('details', {}).get('arch')
|
||||||
|
binary_arch = binary_platform.get('arch') or binary_platform.get('details', {}).get('arch')
|
||||||
|
|
||||||
|
if host_os != binary_os:
|
||||||
|
return False, f"OS mismatch: host is {host_os}, binary is {binary_os}"
|
||||||
|
if host_arch != binary_arch:
|
||||||
|
return False, f"Architecture mismatch: host is {host_arch}, binary is {binary_arch}"
|
||||||
|
|
||||||
|
host_sig = host_platform['abi_signature']
|
||||||
|
binary_sig = binary_platform['abi_signature']
|
||||||
|
|
||||||
|
try:
|
||||||
|
host_parts = host_sig.split('-')
|
||||||
|
binary_parts = binary_sig.split('-')
|
||||||
|
|
||||||
|
# Find version numbers in any position
|
||||||
|
host_ver_str = next((p for p in host_parts if p[0].isdigit()), None)
|
||||||
|
binary_ver_str = next((p for p in binary_parts if p[0].isdigit()), None)
|
||||||
|
|
||||||
|
if not host_ver_str or not binary_ver_str:
|
||||||
|
return False, "Could not extract version from ABI signature"
|
||||||
|
|
||||||
|
host_ver = _parse_version(host_ver_str)
|
||||||
|
binary_ver = _parse_version(binary_ver_str)
|
||||||
|
|
||||||
|
if host_platform['os'] == 'macos':
|
||||||
|
# For macOS, also check for clang and libc++
|
||||||
|
if 'clang' not in binary_sig:
|
||||||
|
return False, "Toolchain mismatch: 'clang' not in binary signature"
|
||||||
|
if 'libc++' not in binary_sig:
|
||||||
|
return False, "Toolchain mismatch: 'libc++' not in binary signature"
|
||||||
|
if host_ver < binary_ver:
|
||||||
|
return False, f"macOS version too old: host is {host_ver_str}, binary needs {binary_ver_str}"
|
||||||
|
return True, "Compatible"
|
||||||
|
|
||||||
|
elif host_platform['os'] == 'linux':
|
||||||
|
if host_ver < binary_ver:
|
||||||
|
return False, f"GLIBC version too old: host is {host_ver_str}, binary needs {binary_ver_str}"
|
||||||
|
return True, "Compatible"
|
||||||
|
|
||||||
|
except (IndexError, ValueError, StopIteration):
|
||||||
|
return False, "Malformed ABI signature string"
|
||||||
|
|
||||||
|
return False, "Unknown compatibility check failure"
|
||||||
|
|
||||||
|
def get_macos_targeted_platform_identifier(target_version: str) -> dict:
|
||||||
|
"""
|
||||||
|
Generates a platform identifier for a specific target macOS version.
|
||||||
|
"""
|
||||||
|
host_platform = get_platform_identifier()
|
||||||
|
host_details = host_platform['details']
|
||||||
|
|
||||||
|
compiler = host_details.get('compiler', 'clang')
|
||||||
|
stdlib = host_details.get('stdlib', 'libc++')
|
||||||
|
abi = host_details.get('abi', 'libc++_abi')
|
||||||
|
arch = platform.machine()
|
||||||
|
|
||||||
|
abi_string = f"{compiler}-{stdlib}-{target_version}-{abi}"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"triplet": f"{arch}-macos",
|
||||||
|
"abi_signature": abi_string,
|
||||||
|
"details": {
|
||||||
|
"os": "macos",
|
||||||
|
"compiler": compiler,
|
||||||
|
"compiler_version": host_details.get('compiler_version'),
|
||||||
|
"stdlib": stdlib,
|
||||||
|
"stdlib_version": target_version,
|
||||||
|
"abi": abi,
|
||||||
|
},
|
||||||
|
"is_native": True,
|
||||||
|
"cross_file": None,
|
||||||
|
"docker_image": None,
|
||||||
|
"arch": arch
|
||||||
|
}
|
||||||
47
fourdst/core/utils.py
Normal file
47
fourdst/core/utils.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
# fourdst/core/utils.py
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
from pathlib import Path
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
def run_command(command: list[str], cwd: Path = None, check=True, progress_callback=None, input: bytes = None, env: dict = None, binary_output: bool = False):
|
||||||
|
"""Runs a command, optionally reporting progress and using a custom environment."""
|
||||||
|
command_str = ' '.join(command)
|
||||||
|
if progress_callback:
|
||||||
|
progress_callback(f"Running command: {command_str}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
command,
|
||||||
|
check=check,
|
||||||
|
capture_output=True,
|
||||||
|
text=not binary_output,
|
||||||
|
input=input,
|
||||||
|
cwd=cwd,
|
||||||
|
env=env
|
||||||
|
)
|
||||||
|
|
||||||
|
if progress_callback and result.stdout:
|
||||||
|
if binary_output:
|
||||||
|
progress_callback(f" - STDOUT: <binary data>")
|
||||||
|
else:
|
||||||
|
progress_callback(f" - STDOUT: {result.stdout.strip()}")
|
||||||
|
if progress_callback and result.stderr:
|
||||||
|
progress_callback(f" - STDERR: {result.stderr.strip()}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
error_message = f"""Command '{command_str}' failed with exit code {e.returncode}.\n--- STDOUT ---\n{e.stdout.strip()}\n--- STDERR ---\n{e.stderr.strip()}\n"""
|
||||||
|
if progress_callback:
|
||||||
|
progress_callback(error_message)
|
||||||
|
if check:
|
||||||
|
raise Exception(error_message) from e
|
||||||
|
return e
|
||||||
|
|
||||||
|
def calculate_sha256(file_path: Path) -> str:
|
||||||
|
"""Calculates the SHA256 checksum of a file."""
|
||||||
|
sha256_hash = hashlib.sha256()
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
for byte_block in iter(lambda: f.read(4096), b""):
|
||||||
|
sha256_hash.update(byte_block)
|
||||||
|
return sha256_hash.hexdigest()
|
||||||
19
meson.build
19
meson.build
@@ -16,3 +16,22 @@ subdir('build-python')
|
|||||||
|
|
||||||
# Build python bindings
|
# Build python bindings
|
||||||
subdir('src-pybind')
|
subdir('src-pybind')
|
||||||
|
|
||||||
|
# Bundle the Python backend for the Electron app
|
||||||
|
|
||||||
|
if get_option('build-py-backend')
|
||||||
|
pyinstaller_exe = find_program('pyinstaller', required : true)
|
||||||
|
electron_src_dir = meson.current_source_dir() / 'electron'
|
||||||
|
|
||||||
|
custom_target('fourdst-backend',
|
||||||
|
input : electron_src_dir / 'fourdst-backend.spec',
|
||||||
|
# The output is the directory that PyInstaller creates.
|
||||||
|
# We are interested in the executable inside it.
|
||||||
|
output : 'fourdst-backend',
|
||||||
|
# The command to run. We tell PyInstaller where to put the final executable.
|
||||||
|
command : [pyinstaller_exe, '--distpath', meson.current_build_dir() / 'electron/dist', '--workpath', meson.current_build_dir() / 'electron/build', '--noconfirm', '@INPUT@'],
|
||||||
|
# This ensures the backend is built whenever you run 'meson compile'.
|
||||||
|
build_by_default : true
|
||||||
|
)
|
||||||
|
endif
|
||||||
|
|
||||||
|
|||||||
@@ -1,2 +1,3 @@
|
|||||||
option('pkg-config', type: 'boolean', value: false, description: 'generate pkg-config file for all libraries and fourdst (defaults to false to allow easy pip building)')
|
option('pkg-config', type: 'boolean', value: false, description: 'generate pkg-config file for all libraries and fourdst (defaults to false to allow easy pip building)')
|
||||||
|
option('build-py-backend', type: 'boolean', value: false, description: 'use pyinstaller to build the python backend for the electron app')
|
||||||
option('tests', type: 'boolean', value: false, description: 'compile subproject tests')
|
option('tests', type: 'boolean', value: false, description: 'compile subproject tests')
|
||||||
|
|||||||
@@ -21,13 +21,14 @@ maintainers = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"typer[all]",
|
"typer",
|
||||||
"libclang",
|
"libclang",
|
||||||
"questionary",
|
"questionary",
|
||||||
"rich",
|
"rich",
|
||||||
"pyyaml",
|
"pyyaml",
|
||||||
"cryptography",
|
"cryptography",
|
||||||
"pyOpenSSL"
|
"pyOpenSSL",
|
||||||
|
"pyinstaller"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
|
|||||||
Reference in New Issue
Block a user