build(electron): added much more robust dependency checking to electron app
This commit is contained in:
File diff suppressed because one or more lines are too long
|
Before Width: | Height: | Size: 78 KiB After Width: | Height: | Size: 72 KiB |
@@ -70,6 +70,19 @@ async function buildBackend() {
|
||||
execSync(`chmod +x "${backendExecutable}"`);
|
||||
console.log('✅ Backend executable permissions set');
|
||||
}
|
||||
|
||||
// Validate backend dependencies
|
||||
console.log('🔍 Validating backend dependencies...');
|
||||
const { DependencyValidator } = require('./validate-dependencies.js');
|
||||
const validator = new DependencyValidator();
|
||||
|
||||
// Test backend execution to ensure all dependencies are embedded
|
||||
const testResult = await validator.validatePythonBackend();
|
||||
if (!testResult) {
|
||||
throw new Error('Backend dependency validation failed. Check that all Python dependencies are properly bundled.');
|
||||
}
|
||||
console.log('✅ Backend dependency validation passed');
|
||||
|
||||
} else {
|
||||
throw new Error(`Backend executable not found at: ${backendExecutable}`);
|
||||
}
|
||||
|
||||
329
electron/check-runtime-deps.js
Normal file
329
electron/check-runtime-deps.js
Normal file
@@ -0,0 +1,329 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Runtime Dependency Checker for Packaged 4DSTAR App
|
||||
*
|
||||
* This script can be run inside a packaged app to verify all dependencies
|
||||
* are available at runtime. Useful for testing the .dmg on different user accounts.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
class RuntimeDependencyChecker {
|
||||
constructor() {
|
||||
this.isPackaged = process.env.NODE_ENV === 'production' || process.resourcesPath;
|
||||
this.appPath = this.isPackaged ? process.resourcesPath : __dirname;
|
||||
this.results = {
|
||||
environment: {},
|
||||
backend: {},
|
||||
nodeModules: {},
|
||||
permissions: {},
|
||||
errors: [],
|
||||
warnings: []
|
||||
};
|
||||
}
|
||||
|
||||
log(message, type = 'info') {
|
||||
const prefix = {
|
||||
'info': '📋',
|
||||
'success': '✅',
|
||||
'warning': '⚠️',
|
||||
'error': '❌'
|
||||
}[type] || 'ℹ️';
|
||||
|
||||
console.log(`${prefix} ${message}`);
|
||||
}
|
||||
|
||||
checkEnvironment() {
|
||||
this.log('Checking runtime environment...', 'info');
|
||||
|
||||
this.results.environment = {
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
nodeVersion: process.version,
|
||||
electronVersion: process.versions.electron,
|
||||
isPackaged: this.isPackaged,
|
||||
appPath: this.appPath,
|
||||
resourcesPath: process.resourcesPath || 'N/A',
|
||||
execPath: process.execPath,
|
||||
cwd: process.cwd(),
|
||||
user: process.env.USER || process.env.USERNAME || 'unknown',
|
||||
home: process.env.HOME || process.env.USERPROFILE || 'unknown'
|
||||
};
|
||||
|
||||
this.log(`Platform: ${this.results.environment.platform}`, 'info');
|
||||
this.log(`Architecture: ${this.results.environment.arch}`, 'info');
|
||||
this.log(`Packaged: ${this.results.environment.isPackaged}`, 'info');
|
||||
this.log(`User: ${this.results.environment.user}`, 'info');
|
||||
this.log(`App Path: ${this.results.environment.appPath}`, 'info');
|
||||
}
|
||||
|
||||
checkBackendExecutable() {
|
||||
this.log('Checking Python backend executable...', 'info');
|
||||
|
||||
const executableName = process.platform === 'win32' ? 'fourdst-backend.exe' : 'fourdst-backend';
|
||||
let backendPath;
|
||||
|
||||
if (this.isPackaged) {
|
||||
backendPath = path.join(this.appPath, 'backend', executableName);
|
||||
} else {
|
||||
backendPath = path.join(__dirname, '..', 'build', 'electron', 'dist', 'fourdst-backend', executableName);
|
||||
}
|
||||
|
||||
this.results.backend.expectedPath = backendPath;
|
||||
this.results.backend.exists = fs.existsSync(backendPath);
|
||||
|
||||
if (!this.results.backend.exists) {
|
||||
this.results.errors.push(`Backend executable not found: ${backendPath}`);
|
||||
this.log(`Backend executable not found: ${backendPath}`, 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
this.log(`Backend executable found: ${backendPath}`, 'success');
|
||||
|
||||
// Check permissions
|
||||
try {
|
||||
const stats = fs.statSync(backendPath);
|
||||
this.results.backend.size = stats.size;
|
||||
this.results.backend.mode = stats.mode.toString(8);
|
||||
this.results.backend.isExecutable = !!(stats.mode & parseInt('111', 8));
|
||||
|
||||
if (!this.results.backend.isExecutable) {
|
||||
this.results.errors.push('Backend executable lacks execute permissions');
|
||||
this.log('Backend executable lacks execute permissions', 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
this.log(`Backend size: ${this.results.backend.size} bytes`, 'info');
|
||||
this.log(`Backend permissions: ${this.results.backend.mode}`, 'info');
|
||||
|
||||
} catch (e) {
|
||||
this.results.errors.push(`Failed to check backend stats: ${e.message}`);
|
||||
this.log(`Failed to check backend stats: ${e.message}`, 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async testBackendExecution() {
|
||||
if (!this.results.backend.exists) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.log('Testing backend execution...', 'info');
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const testArgs = ['inspect_bundle', JSON.stringify({ bundle_path: '/nonexistent/test.fbundle' })];
|
||||
const backendProcess = spawn(this.results.backend.expectedPath, testArgs, {
|
||||
timeout: 15000,
|
||||
env: { ...process.env, PYTHONPATH: '' } // Clear PYTHONPATH to test self-containment
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
backendProcess.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
backendProcess.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
backendProcess.on('close', (code) => {
|
||||
this.results.backend.testExecution = {
|
||||
exitCode: code,
|
||||
stdoutLength: stdout.length,
|
||||
stderrLength: stderr.length,
|
||||
stdout: stdout.substring(0, 500), // First 500 chars
|
||||
stderr: stderr.substring(0, 500)
|
||||
};
|
||||
|
||||
if (stdout.length > 0) {
|
||||
try {
|
||||
const result = JSON.parse(stdout.trim());
|
||||
this.results.backend.producesValidJSON = true;
|
||||
this.results.backend.jsonResponse = result;
|
||||
|
||||
if (result.success === false && result.error) {
|
||||
this.log('Backend produces valid JSON error responses', 'success');
|
||||
resolve(true);
|
||||
} else {
|
||||
this.log('Backend JSON response format unexpected', 'warning');
|
||||
this.results.warnings.push('Backend JSON response format unexpected');
|
||||
resolve(true);
|
||||
}
|
||||
} catch (e) {
|
||||
this.results.backend.producesValidJSON = false;
|
||||
this.results.errors.push(`Backend output is not valid JSON: ${e.message}`);
|
||||
this.log(`Backend output is not valid JSON: ${e.message}`, 'error');
|
||||
this.log(`Raw stdout (first 200 chars): "${stdout.substring(0, 200)}"`, 'error');
|
||||
resolve(false);
|
||||
}
|
||||
} else {
|
||||
this.results.backend.producesValidJSON = false;
|
||||
this.results.errors.push('Backend produced no stdout output');
|
||||
this.log('Backend produced no stdout output', 'error');
|
||||
if (stderr.length > 0) {
|
||||
this.log(`Stderr: ${stderr.substring(0, 200)}`, 'error');
|
||||
}
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
|
||||
backendProcess.on('error', (err) => {
|
||||
this.results.backend.executionError = err.message;
|
||||
this.results.errors.push(`Failed to execute backend: ${err.message}`);
|
||||
this.log(`Failed to execute backend: ${err.message}`, 'error');
|
||||
resolve(false);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
checkNodeModules() {
|
||||
this.log('Checking Node.js modules...', 'info');
|
||||
|
||||
const requiredModules = [
|
||||
'fs-extra',
|
||||
'js-yaml',
|
||||
'adm-zip',
|
||||
'@electron/remote',
|
||||
'python-shell',
|
||||
'plotly.js-dist',
|
||||
'electron-squirrel-startup'
|
||||
];
|
||||
|
||||
this.results.nodeModules.checked = {};
|
||||
|
||||
for (const moduleName of requiredModules) {
|
||||
try {
|
||||
const modulePath = require.resolve(moduleName);
|
||||
this.results.nodeModules.checked[moduleName] = {
|
||||
available: true,
|
||||
path: modulePath
|
||||
};
|
||||
this.log(`✓ ${moduleName}`, 'success');
|
||||
} catch (e) {
|
||||
this.results.nodeModules.checked[moduleName] = {
|
||||
available: false,
|
||||
error: e.message
|
||||
};
|
||||
this.results.errors.push(`Module ${moduleName} not available: ${e.message}`);
|
||||
this.log(`✗ ${moduleName}: ${e.message}`, 'error');
|
||||
}
|
||||
}
|
||||
|
||||
return Object.values(this.results.nodeModules.checked).every(mod => mod.available);
|
||||
}
|
||||
|
||||
checkFilePermissions() {
|
||||
this.log('Checking file permissions...', 'info');
|
||||
|
||||
const testPaths = [
|
||||
this.appPath,
|
||||
path.join(this.appPath, 'backend'),
|
||||
this.results.backend.expectedPath
|
||||
];
|
||||
|
||||
this.results.permissions.paths = {};
|
||||
|
||||
for (const testPath of testPaths) {
|
||||
try {
|
||||
if (fs.existsSync(testPath)) {
|
||||
const stats = fs.statSync(testPath);
|
||||
this.results.permissions.paths[testPath] = {
|
||||
readable: true,
|
||||
mode: stats.mode.toString(8),
|
||||
isDirectory: stats.isDirectory(),
|
||||
isFile: stats.isFile()
|
||||
};
|
||||
this.log(`✓ ${testPath} (${stats.mode.toString(8)})`, 'success');
|
||||
} else {
|
||||
this.results.permissions.paths[testPath] = {
|
||||
readable: false,
|
||||
exists: false
|
||||
};
|
||||
this.log(`✗ ${testPath} does not exist`, 'warning');
|
||||
}
|
||||
} catch (e) {
|
||||
this.results.permissions.paths[testPath] = {
|
||||
readable: false,
|
||||
error: e.message
|
||||
};
|
||||
this.results.errors.push(`Cannot access ${testPath}: ${e.message}`);
|
||||
this.log(`✗ ${testPath}: ${e.message}`, 'error');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async runFullCheck() {
|
||||
this.log('Starting runtime dependency check...', 'info');
|
||||
|
||||
this.checkEnvironment();
|
||||
const backendExists = this.checkBackendExecutable();
|
||||
const backendWorks = backendExists ? await this.testBackendExecution() : false;
|
||||
const nodeModulesOk = this.checkNodeModules();
|
||||
this.checkFilePermissions();
|
||||
|
||||
// Generate summary
|
||||
this.log('\n=== RUNTIME DEPENDENCY CHECK SUMMARY ===', 'info');
|
||||
|
||||
const checks = {
|
||||
'Environment': true, // Always passes
|
||||
'Backend Executable': backendExists,
|
||||
'Backend Execution': backendWorks,
|
||||
'Node Modules': nodeModulesOk,
|
||||
'File Permissions': this.results.errors.filter(e => e.includes('Cannot access')).length === 0
|
||||
};
|
||||
|
||||
let allPassed = true;
|
||||
for (const [check, passed] of Object.entries(checks)) {
|
||||
const status = passed ? '✅ PASS' : '❌ FAIL';
|
||||
this.log(`${check}: ${status}`, passed ? 'success' : 'error');
|
||||
if (!passed) allPassed = false;
|
||||
}
|
||||
|
||||
if (this.results.warnings.length > 0) {
|
||||
this.log(`\n⚠️ ${this.results.warnings.length} warnings:`, 'warning');
|
||||
this.results.warnings.forEach(warning => this.log(` - ${warning}`, 'warning'));
|
||||
}
|
||||
|
||||
if (this.results.errors.length > 0) {
|
||||
this.log(`\n❌ ${this.results.errors.length} errors:`, 'error');
|
||||
this.results.errors.forEach(error => this.log(` - ${error}`, 'error'));
|
||||
}
|
||||
|
||||
if (allPassed && this.results.errors.length === 0) {
|
||||
this.log('\n🎉 All runtime dependencies are available! App should work correctly.', 'success');
|
||||
} else {
|
||||
this.log('\n💥 Runtime dependency issues found. App may not work correctly.', 'error');
|
||||
}
|
||||
|
||||
// Save results to file for debugging
|
||||
const resultsPath = path.join(process.cwd(), 'runtime-check-results.json');
|
||||
try {
|
||||
fs.writeFileSync(resultsPath, JSON.stringify(this.results, null, 2));
|
||||
this.log(`\n📄 Detailed results saved to: ${resultsPath}`, 'info');
|
||||
} catch (e) {
|
||||
this.log(`Failed to save results: ${e.message}`, 'warning');
|
||||
}
|
||||
|
||||
return allPassed && this.results.errors.length === 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Run check if called directly
|
||||
if (require.main === module) {
|
||||
const checker = new RuntimeDependencyChecker();
|
||||
checker.runFullCheck().then(success => {
|
||||
process.exit(success ? 0 : 1);
|
||||
}).catch(error => {
|
||||
console.error('Runtime check failed with error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { RuntimeDependencyChecker };
|
||||
142
electron/debug-packaged-app.js
Normal file
142
electron/debug-packaged-app.js
Normal file
@@ -0,0 +1,142 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Debug script to test the packaged app backend in isolation
|
||||
* This helps identify issues with the backend executable in different user environments
|
||||
*/
|
||||
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
function testBackendExecutable(backendPath, testBundlePath) {
|
||||
console.log(`\n=== Testing Backend Executable ===`);
|
||||
console.log(`Backend path: ${backendPath}`);
|
||||
console.log(`Test bundle: ${testBundlePath}`);
|
||||
console.log(`Backend exists: ${fs.existsSync(backendPath)}`);
|
||||
|
||||
if (!fs.existsSync(backendPath)) {
|
||||
console.error(`❌ Backend executable not found at: ${backendPath}`);
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
// Test with inspect_bundle command (same as open-bundle)
|
||||
const args = ['inspect_bundle', JSON.stringify({ bundle_path: testBundlePath })];
|
||||
console.log(`Command: ${backendPath} ${args.join(' ')}`);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const process = spawn(backendPath, args);
|
||||
let stdoutBuffer = '';
|
||||
let stderrBuffer = '';
|
||||
|
||||
process.stdout.on('data', (data) => {
|
||||
stdoutBuffer += data.toString();
|
||||
});
|
||||
|
||||
process.stderr.on('data', (data) => {
|
||||
stderrBuffer += data.toString();
|
||||
});
|
||||
|
||||
process.on('close', (code) => {
|
||||
console.log(`\n--- Backend Test Results ---`);
|
||||
console.log(`Exit code: ${code}`);
|
||||
console.log(`Stdout length: ${stdoutBuffer.length}`);
|
||||
console.log(`Stderr length: ${stderrBuffer.length}`);
|
||||
|
||||
if (stdoutBuffer.length > 0) {
|
||||
console.log(`\nStdout first 500 chars:`);
|
||||
console.log(`"${stdoutBuffer.substring(0, 500)}"`);
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(stdoutBuffer.trim());
|
||||
console.log(`✅ JSON parsing successful`);
|
||||
console.log(`Success: ${parsed.success}`);
|
||||
} catch (e) {
|
||||
console.log(`❌ JSON parsing failed: ${e.message}`);
|
||||
console.log(`First problematic character: "${stdoutBuffer.charAt(0)}" (${stdoutBuffer.charCodeAt(0)})`);
|
||||
}
|
||||
}
|
||||
|
||||
if (stderrBuffer.length > 0) {
|
||||
console.log(`\nStderr output:`);
|
||||
console.log(stderrBuffer);
|
||||
}
|
||||
|
||||
resolve(code === 0);
|
||||
});
|
||||
|
||||
process.on('error', (err) => {
|
||||
console.error(`❌ Failed to start backend process: ${err.message}`);
|
||||
resolve(false);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('=== 4DSTAR Packaged App Debug Tool ===');
|
||||
console.log(`Platform: ${process.platform}`);
|
||||
console.log(`Architecture: ${process.arch}`);
|
||||
console.log(`Node version: ${process.version}`);
|
||||
console.log(`Working directory: ${process.cwd()}`);
|
||||
|
||||
// Get test bundle path from command line or use default
|
||||
const testBundlePath = process.argv[2] || '/path/to/test.fbundle';
|
||||
|
||||
if (!fs.existsSync(testBundlePath)) {
|
||||
console.error(`❌ Test bundle not found: ${testBundlePath}`);
|
||||
console.log(`Usage: node debug-packaged-app.js <path-to-test-bundle>`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test different backend paths
|
||||
const backendPaths = [
|
||||
// Development path
|
||||
path.resolve(__dirname, '..', 'build', 'electron', 'dist', 'fourdst-backend', 'fourdst-backend'),
|
||||
// Packaged app path (if running from within app)
|
||||
path.join(process.resourcesPath || '', 'backend', 'fourdst-backend'),
|
||||
// Alternative packaged paths
|
||||
path.join(__dirname, '..', 'resources', 'backend', 'fourdst-backend'),
|
||||
path.join(__dirname, 'backend', 'fourdst-backend'),
|
||||
];
|
||||
|
||||
console.log(`\n=== Testing Backend Paths ===`);
|
||||
for (const backendPath of backendPaths) {
|
||||
console.log(`\nTesting: ${backendPath}`);
|
||||
const success = await testBackendExecutable(backendPath, testBundlePath);
|
||||
if (success) {
|
||||
console.log(`✅ Backend test successful!`);
|
||||
break;
|
||||
} else {
|
||||
console.log(`❌ Backend test failed`);
|
||||
}
|
||||
}
|
||||
|
||||
// Environment diagnostics
|
||||
console.log(`\n=== Environment Diagnostics ===`);
|
||||
console.log(`USER: ${process.env.USER || 'unknown'}`);
|
||||
console.log(`HOME: ${process.env.HOME || 'unknown'}`);
|
||||
console.log(`PATH: ${process.env.PATH || 'unknown'}`);
|
||||
console.log(`PYTHONPATH: ${process.env.PYTHONPATH || 'not set'}`);
|
||||
|
||||
// Check permissions
|
||||
console.log(`\n=== Permission Check ===`);
|
||||
for (const backendPath of backendPaths) {
|
||||
if (fs.existsSync(backendPath)) {
|
||||
try {
|
||||
const stats = fs.statSync(backendPath);
|
||||
console.log(`${backendPath}:`);
|
||||
console.log(` Executable: ${!!(stats.mode & parseInt('111', 8))}`);
|
||||
console.log(` Mode: ${stats.mode.toString(8)}`);
|
||||
console.log(` Size: ${stats.size} bytes`);
|
||||
} catch (e) {
|
||||
console.log(`${backendPath}: Permission error - ${e.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main().catch(console.error);
|
||||
}
|
||||
|
||||
module.exports = { testBackendExecutable };
|
||||
45
electron/entitlements.mac.plist
Normal file
45
electron/entitlements.mac.plist
Normal file
@@ -0,0 +1,45 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<!-- Allow JIT compilation for V8 JavaScript engine -->
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow unsigned executable memory for native modules -->
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
|
||||
<!-- Disable library validation to allow loading of bundled libraries -->
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow arbitrary loads for network requests -->
|
||||
<key>com.apple.security.network.client</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow outgoing network connections -->
|
||||
<key>com.apple.security.network.server</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow file system access -->
|
||||
<key>com.apple.security.files.user-selected.read-write</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow access to Downloads folder -->
|
||||
<key>com.apple.security.files.downloads.read-write</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow subprocess execution for Python backend -->
|
||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||
<true/>
|
||||
|
||||
<!-- Allow camera access (if needed for future features) -->
|
||||
<key>com.apple.security.device.camera</key>
|
||||
<false/>
|
||||
|
||||
<!-- Allow microphone access (if needed for future features) -->
|
||||
<key>com.apple.security.device.microphone</key>
|
||||
<false/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -29,10 +29,66 @@ analysis = Analysis(['bridge.py'],
|
||||
pathex=[str(project_root)],
|
||||
binaries=[],
|
||||
# Add any modules that PyInstaller might not find automatically.
|
||||
hiddenimports=['docker'],
|
||||
hiddenimports=[
|
||||
# Core dependencies
|
||||
'docker',
|
||||
'docker.api',
|
||||
'docker.client',
|
||||
'docker.errors',
|
||||
'docker.models',
|
||||
'docker.types',
|
||||
'docker.utils',
|
||||
# Cryptography dependencies
|
||||
'cryptography',
|
||||
'cryptography.hazmat',
|
||||
'cryptography.hazmat.primitives',
|
||||
'cryptography.hazmat.primitives.asymmetric',
|
||||
'cryptography.hazmat.primitives.asymmetric.rsa',
|
||||
'cryptography.hazmat.primitives.asymmetric.ed25519',
|
||||
'cryptography.hazmat.primitives.asymmetric.padding',
|
||||
'cryptography.hazmat.primitives.hashes',
|
||||
'cryptography.hazmat.primitives.serialization',
|
||||
'cryptography.exceptions',
|
||||
# YAML dependencies
|
||||
'yaml',
|
||||
'yaml.loader',
|
||||
'yaml.dumper',
|
||||
# Core Python modules that might be missed
|
||||
'pathlib',
|
||||
'tempfile',
|
||||
'zipfile',
|
||||
'tarfile',
|
||||
'hashlib',
|
||||
'datetime',
|
||||
'json',
|
||||
'subprocess',
|
||||
'shutil',
|
||||
'logging',
|
||||
# Platform-specific modules
|
||||
'platform',
|
||||
'os',
|
||||
'sys',
|
||||
# fourdst modules
|
||||
'fourdst',
|
||||
'fourdst.core',
|
||||
'fourdst.core.bundle',
|
||||
'fourdst.core.build',
|
||||
'fourdst.core.config',
|
||||
'fourdst.core.platform',
|
||||
'fourdst.core.utils'
|
||||
],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
excludes=[
|
||||
# Exclude unnecessary modules to reduce size
|
||||
'tkinter',
|
||||
'matplotlib',
|
||||
'numpy',
|
||||
'scipy',
|
||||
'pandas',
|
||||
'jupyter',
|
||||
'IPython'
|
||||
],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=None,
|
||||
|
||||
@@ -109,6 +109,13 @@ function runPythonCommand(command, kwargs, event) {
|
||||
|
||||
process.on('close', (code) => {
|
||||
console.log(`[MAIN_PROCESS] Backend process exited with code ${code}`);
|
||||
console.log(`[MAIN_PROCESS] Backend path used: ${backendPath}`);
|
||||
console.log(`[MAIN_PROCESS] App packaged: ${app.isPackaged}`);
|
||||
console.log(`[MAIN_PROCESS] Resources path: ${process.resourcesPath || 'N/A'}`);
|
||||
console.log(`[MAIN_PROCESS] Raw stdout buffer length: ${stdoutBuffer.length}`);
|
||||
console.log(`[MAIN_PROCESS] Raw stdout first 200 chars: "${stdoutBuffer.substring(0, 200)}"`);
|
||||
console.log(`[MAIN_PROCESS] Error output: "${errorOutput}"`);
|
||||
|
||||
let resultData = null;
|
||||
|
||||
try {
|
||||
@@ -122,7 +129,11 @@ function runPythonCommand(command, kwargs, event) {
|
||||
resultData = {
|
||||
success: false,
|
||||
error: `JSON parsing failed: ${e.message}`,
|
||||
raw_output: stdoutBuffer
|
||||
raw_output: stdoutBuffer,
|
||||
backend_path: backendPath,
|
||||
is_packaged: app.isPackaged,
|
||||
exit_code: code,
|
||||
stderr_output: errorOutput
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -6,10 +6,13 @@
|
||||
"scripts": {
|
||||
"start": "electron .",
|
||||
"dev": "electron .",
|
||||
"prebuild": "node build-backend.js",
|
||||
"validate": "node validate-dependencies.js",
|
||||
"prebuild": "node build-backend.js && node validate-dependencies.js",
|
||||
"build": "electron-builder",
|
||||
"prepack": "node build-backend.js",
|
||||
"pack": "electron-builder --dir"
|
||||
"prepack": "node build-backend.js && node validate-dependencies.js",
|
||||
"pack": "electron-builder --dir",
|
||||
"postbuild": "node validate-dependencies.js",
|
||||
"clean": "rm -rf dist build node_modules/.cache"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -47,7 +50,13 @@
|
||||
"**/*",
|
||||
"node_modules/**/*",
|
||||
"!node_modules/electron/**/*",
|
||||
"!node_modules/electron-builder/**/*"
|
||||
"!node_modules/electron-builder/**/*",
|
||||
"!node_modules/.cache/**/*",
|
||||
"!**/*.map",
|
||||
"!**/test/**/*",
|
||||
"!**/tests/**/*",
|
||||
"!**/*.test.js",
|
||||
"!**/*.spec.js"
|
||||
],
|
||||
"extraResources": [
|
||||
{
|
||||
@@ -56,12 +65,26 @@
|
||||
"filter": ["**/*"]
|
||||
}
|
||||
],
|
||||
"asarUnpack": [
|
||||
"node_modules/plotly.js-dist/**/*",
|
||||
"resources/backend/**/*"
|
||||
],
|
||||
"mac": {
|
||||
"category": "public.app-category.developer-tools",
|
||||
"target": [
|
||||
"dmg",
|
||||
"zip"
|
||||
]
|
||||
{
|
||||
"target": "dmg",
|
||||
"arch": ["x64", "arm64"]
|
||||
},
|
||||
{
|
||||
"target": "zip",
|
||||
"arch": ["x64", "arm64"]
|
||||
}
|
||||
],
|
||||
"hardenedRuntime": true,
|
||||
"gatekeeperAssess": false,
|
||||
"entitlements": "entitlements.mac.plist",
|
||||
"entitlementsInherit": "entitlements.mac.plist"
|
||||
},
|
||||
"linux": {
|
||||
"target": [
|
||||
@@ -70,6 +93,10 @@
|
||||
"rpm"
|
||||
],
|
||||
"category": "Development"
|
||||
},
|
||||
"nsis": {
|
||||
"oneClick": false,
|
||||
"allowToChangeInstallationDirectory": true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
344
electron/validate-dependencies.js
Normal file
344
electron/validate-dependencies.js
Normal file
@@ -0,0 +1,344 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Dependency Validation Script for 4DSTAR Electron App
|
||||
*
|
||||
* This script validates that all runtime dependencies are properly embedded
|
||||
* and available in the packaged application.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
class DependencyValidator {
|
||||
constructor() {
|
||||
this.errors = [];
|
||||
this.warnings = [];
|
||||
this.projectRoot = path.resolve(__dirname, '..');
|
||||
this.buildDir = path.join(this.projectRoot, 'build');
|
||||
this.electronDir = __dirname;
|
||||
}
|
||||
|
||||
log(message, type = 'info') {
|
||||
const timestamp = new Date().toISOString();
|
||||
const prefix = {
|
||||
'info': '📋',
|
||||
'success': '✅',
|
||||
'warning': '⚠️',
|
||||
'error': '❌'
|
||||
}[type] || 'ℹ️';
|
||||
|
||||
console.log(`${prefix} [${timestamp}] ${message}`);
|
||||
|
||||
if (type === 'error') {
|
||||
this.errors.push(message);
|
||||
} else if (type === 'warning') {
|
||||
this.warnings.push(message);
|
||||
}
|
||||
}
|
||||
|
||||
async validatePythonBackend() {
|
||||
this.log('Validating Python backend dependencies...', 'info');
|
||||
|
||||
// Check if backend executable exists
|
||||
const executableName = process.platform === 'win32' ? 'fourdst-backend.exe' : 'fourdst-backend';
|
||||
const backendPath = path.join(this.buildDir, 'electron', 'dist', 'fourdst-backend', executableName);
|
||||
|
||||
if (!fs.existsSync(backendPath)) {
|
||||
this.log(`Backend executable not found: ${backendPath}`, 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
this.log(`Backend executable found: ${backendPath}`, 'success');
|
||||
|
||||
// Check backend executable permissions
|
||||
try {
|
||||
const stats = fs.statSync(backendPath);
|
||||
const isExecutable = !!(stats.mode & parseInt('111', 8));
|
||||
if (!isExecutable) {
|
||||
this.log('Backend executable lacks execute permissions', 'error');
|
||||
return false;
|
||||
}
|
||||
this.log('Backend executable has proper permissions', 'success');
|
||||
} catch (e) {
|
||||
this.log(`Failed to check backend permissions: ${e.message}`, 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Test backend execution
|
||||
return new Promise((resolve) => {
|
||||
this.log('Testing backend execution...', 'info');
|
||||
|
||||
const testArgs = ['inspect_bundle', JSON.stringify({ bundle_path: '/nonexistent/test.fbundle' })];
|
||||
const process = spawn(backendPath, testArgs, { timeout: 10000 });
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
process.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
process.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
process.on('close', (code) => {
|
||||
// We expect this to fail since the bundle doesn't exist,
|
||||
// but it should fail gracefully with JSON output
|
||||
if (stdout.length > 0) {
|
||||
try {
|
||||
const result = JSON.parse(stdout.trim());
|
||||
if (result.success === false && result.error) {
|
||||
this.log('Backend produces valid JSON error responses', 'success');
|
||||
resolve(true);
|
||||
} else {
|
||||
this.log('Backend JSON response format unexpected', 'warning');
|
||||
resolve(true);
|
||||
}
|
||||
} catch (e) {
|
||||
this.log(`Backend output is not valid JSON: ${e.message}`, 'error');
|
||||
this.log(`Raw stdout: "${stdout.substring(0, 200)}"`, 'error');
|
||||
resolve(false);
|
||||
}
|
||||
} else {
|
||||
this.log('Backend produced no stdout output', 'error');
|
||||
this.log(`Stderr: ${stderr}`, 'error');
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
|
||||
process.on('error', (err) => {
|
||||
this.log(`Failed to execute backend: ${err.message}`, 'error');
|
||||
resolve(false);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
validateNodeDependencies() {
|
||||
this.log('Validating Node.js dependencies...', 'info');
|
||||
|
||||
const packageJsonPath = path.join(this.electronDir, 'package.json');
|
||||
if (!fs.existsSync(packageJsonPath)) {
|
||||
this.log('package.json not found', 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||
const dependencies = { ...packageJson.dependencies, ...packageJson.devDependencies };
|
||||
|
||||
let allFound = true;
|
||||
|
||||
for (const [dep, version] of Object.entries(dependencies)) {
|
||||
const depPath = path.join(this.electronDir, 'node_modules', dep);
|
||||
if (fs.existsSync(depPath)) {
|
||||
this.log(`✓ ${dep}@${version}`, 'success');
|
||||
} else {
|
||||
this.log(`✗ ${dep}@${version} not found in node_modules`, 'error');
|
||||
allFound = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for native modules that might need special handling
|
||||
const nativeModules = ['@electron/remote', 'python-shell'];
|
||||
for (const mod of nativeModules) {
|
||||
if (dependencies[mod]) {
|
||||
const modPath = path.join(this.electronDir, 'node_modules', mod);
|
||||
if (fs.existsSync(modPath)) {
|
||||
// Check for native binaries
|
||||
const hasNativeBinaries = this.findNativeBinaries(modPath);
|
||||
if (hasNativeBinaries.length > 0) {
|
||||
this.log(`Native binaries found in ${mod}: ${hasNativeBinaries.join(', ')}`, 'info');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return allFound;
|
||||
}
|
||||
|
||||
findNativeBinaries(dir) {
|
||||
const nativeExtensions = ['.node', '.so', '.dylib', '.dll'];
|
||||
const binaries = [];
|
||||
|
||||
try {
|
||||
const files = fs.readdirSync(dir, { recursive: true });
|
||||
for (const file of files) {
|
||||
const ext = path.extname(file);
|
||||
if (nativeExtensions.includes(ext)) {
|
||||
binaries.push(file);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Directory might not exist or be accessible
|
||||
}
|
||||
|
||||
return binaries;
|
||||
}
|
||||
|
||||
validateElectronBuild() {
|
||||
this.log('Validating Electron build configuration...', 'info');
|
||||
|
||||
const packageJsonPath = path.join(this.electronDir, 'package.json');
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||
|
||||
const buildConfig = packageJson.build;
|
||||
if (!buildConfig) {
|
||||
this.log('No build configuration found in package.json', 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check extraResources configuration
|
||||
if (!buildConfig.extraResources || !Array.isArray(buildConfig.extraResources)) {
|
||||
this.log('No extraResources configuration found', 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate backend resource mapping
|
||||
const backendResource = buildConfig.extraResources.find(res => res.to === 'backend/');
|
||||
if (!backendResource) {
|
||||
this.log('Backend resource mapping not found in extraResources', 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if source directory exists
|
||||
const backendSourcePath = path.resolve(this.electronDir, backendResource.from);
|
||||
if (!fs.existsSync(backendSourcePath)) {
|
||||
this.log(`Backend source directory not found: ${backendSourcePath}`, 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
this.log('Electron build configuration validated', 'success');
|
||||
return true;
|
||||
}
|
||||
|
||||
validatePyInstallerSpec() {
|
||||
this.log('Validating PyInstaller spec file...', 'info');
|
||||
|
||||
const specPath = path.join(this.electronDir, 'fourdst-backend.spec');
|
||||
if (!fs.existsSync(specPath)) {
|
||||
this.log('PyInstaller spec file not found', 'error');
|
||||
return false;
|
||||
}
|
||||
|
||||
const specContent = fs.readFileSync(specPath, 'utf8');
|
||||
|
||||
// Check for essential hidden imports
|
||||
const requiredImports = [
|
||||
'docker',
|
||||
'cryptography',
|
||||
'yaml',
|
||||
'fourdst.core'
|
||||
];
|
||||
|
||||
for (const imp of requiredImports) {
|
||||
if (!specContent.includes(`'${imp}'`)) {
|
||||
this.log(`Missing hidden import in spec: ${imp}`, 'warning');
|
||||
} else {
|
||||
this.log(`✓ Hidden import found: ${imp}`, 'success');
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
validateFileStructure() {
|
||||
this.log('Validating project file structure...', 'info');
|
||||
|
||||
const requiredFiles = [
|
||||
'package.json',
|
||||
'main-refactored.js',
|
||||
'bridge.py',
|
||||
'fourdst-backend.spec',
|
||||
'entitlements.mac.plist'
|
||||
];
|
||||
|
||||
let allFound = true;
|
||||
|
||||
for (const file of requiredFiles) {
|
||||
const filePath = path.join(this.electronDir, file);
|
||||
if (fs.existsSync(filePath)) {
|
||||
this.log(`✓ ${file}`, 'success');
|
||||
} else {
|
||||
this.log(`✗ ${file} not found`, 'error');
|
||||
allFound = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for main modules
|
||||
const mainModulesDir = path.join(this.electronDir, 'main');
|
||||
if (fs.existsSync(mainModulesDir)) {
|
||||
this.log('✓ Main process modules directory found', 'success');
|
||||
} else {
|
||||
this.log('✗ Main process modules directory not found', 'error');
|
||||
allFound = false;
|
||||
}
|
||||
|
||||
// Check for renderer modules
|
||||
const rendererModulesDir = path.join(this.electronDir, 'renderer');
|
||||
if (fs.existsSync(rendererModulesDir)) {
|
||||
this.log('✓ Renderer process modules directory found', 'success');
|
||||
} else {
|
||||
this.log('✗ Renderer process modules directory not found', 'error');
|
||||
allFound = false;
|
||||
}
|
||||
|
||||
return allFound;
|
||||
}
|
||||
|
||||
async runValidation() {
|
||||
this.log('Starting comprehensive dependency validation...', 'info');
|
||||
this.log(`Project root: ${this.projectRoot}`, 'info');
|
||||
this.log(`Electron directory: ${this.electronDir}`, 'info');
|
||||
this.log(`Build directory: ${this.buildDir}`, 'info');
|
||||
|
||||
const results = {
|
||||
fileStructure: this.validateFileStructure(),
|
||||
nodeDependencies: this.validateNodeDependencies(),
|
||||
electronBuild: this.validateElectronBuild(),
|
||||
pyinstallerSpec: this.validatePyInstallerSpec(),
|
||||
pythonBackend: await this.validatePythonBackend()
|
||||
};
|
||||
|
||||
this.log('\n=== VALIDATION SUMMARY ===', 'info');
|
||||
|
||||
let allPassed = true;
|
||||
for (const [test, passed] of Object.entries(results)) {
|
||||
const status = passed ? '✅ PASS' : '❌ FAIL';
|
||||
this.log(`${test}: ${status}`, passed ? 'success' : 'error');
|
||||
if (!passed) allPassed = false;
|
||||
}
|
||||
|
||||
if (this.warnings.length > 0) {
|
||||
this.log(`\n⚠️ ${this.warnings.length} warnings found:`, 'warning');
|
||||
this.warnings.forEach(warning => this.log(` - ${warning}`, 'warning'));
|
||||
}
|
||||
|
||||
if (this.errors.length > 0) {
|
||||
this.log(`\n❌ ${this.errors.length} errors found:`, 'error');
|
||||
this.errors.forEach(error => this.log(` - ${error}`, 'error'));
|
||||
}
|
||||
|
||||
if (allPassed && this.errors.length === 0) {
|
||||
this.log('\n🎉 All validations passed! The app should be fully self-contained.', 'success');
|
||||
return true;
|
||||
} else {
|
||||
this.log('\n💥 Validation failed. Please fix the issues above before packaging.', 'error');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run validation if called directly
|
||||
if (require.main === module) {
|
||||
const validator = new DependencyValidator();
|
||||
validator.runValidation().then(success => {
|
||||
process.exit(success ? 0 : 1);
|
||||
}).catch(error => {
|
||||
console.error('Validation failed with error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { DependencyValidator };
|
||||
@@ -105,7 +105,7 @@ def create_bundle(
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
print(message)
|
||||
logging.info(message)
|
||||
|
||||
staging_dir = Path(tempfile.mkdtemp(prefix="fourdst_create_"))
|
||||
|
||||
@@ -349,7 +349,7 @@ def sign_bundle(bundle_path: Path, private_key: Path, progress_callback: Optiona
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
print(message)
|
||||
logging.info(message)
|
||||
|
||||
report_progress(f"Signing bundle: {bundle_path}")
|
||||
staging_dir = Path(tempfile.mkdtemp(prefix="fourdst_sign_"))
|
||||
@@ -728,7 +728,7 @@ def clear_bundle(bundle_path: Path, progress_callback: Optional[Callable] = None
|
||||
if progress_callback:
|
||||
progress_callback(message)
|
||||
else:
|
||||
print(message)
|
||||
logging.info(message)
|
||||
|
||||
report_progress(f"Clearing binaries from bundle: {bundle_path.name}")
|
||||
staging_dir = Path(tempfile.mkdtemp(prefix="fourdst_clear_"))
|
||||
|
||||
@@ -95,8 +95,22 @@ executable('detector', 'main.cpp')
|
||||
def _detect_and_cache_abi() -> dict:
|
||||
"""
|
||||
Compiles and runs a C++ program to detect the compiler ABI, then caches it.
|
||||
Falls back to platform-based detection if meson is not available (e.g., in packaged apps).
|
||||
"""
|
||||
print("Performing one-time native C++ ABI detection...")
|
||||
import sys
|
||||
import logging
|
||||
|
||||
# Use logging instead of print to avoid stdout contamination
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Performing one-time native C++ ABI detection...")
|
||||
|
||||
# Check if meson is available
|
||||
meson_available = shutil.which("meson") is not None
|
||||
|
||||
if not meson_available:
|
||||
logger.warning("Meson not available, using fallback platform detection")
|
||||
return _fallback_platform_detection()
|
||||
|
||||
temp_dir = CACHE_PATH / "abi_detector"
|
||||
if temp_dir.exists():
|
||||
shutil.rmtree(temp_dir)
|
||||
@@ -106,19 +120,20 @@ def _detect_and_cache_abi() -> dict:
|
||||
(temp_dir / "main.cpp").write_text(ABI_DETECTOR_CPP_SRC)
|
||||
(temp_dir / "meson.build").write_text(ABI_DETECTOR_MESON_SRC)
|
||||
|
||||
print(" - Configuring detector...")
|
||||
logger.info(" - Configuring detector...")
|
||||
run_command(["meson", "setup", "build"], cwd=temp_dir)
|
||||
print(" - Compiling detector...")
|
||||
logger.info(" - Compiling detector...")
|
||||
run_command(["meson", "compile", "-C", "build"], cwd=temp_dir)
|
||||
|
||||
detector_exe = temp_dir / "build" / "detector"
|
||||
print(" - Running detector...")
|
||||
logger.info(" - Running detector...")
|
||||
proc = subprocess.run([str(detector_exe)], check=True, capture_output=True, text=True)
|
||||
|
||||
abi_details = {}
|
||||
for line in proc.stdout.strip().split('\n'):
|
||||
key, value = line.split('=', 1)
|
||||
abi_details[key] = value.strip()
|
||||
if '=' in line:
|
||||
key, value = line.split('=', 1)
|
||||
abi_details[key] = value.strip()
|
||||
|
||||
arch = platform.machine()
|
||||
stdlib_version = abi_details.get('stdlib_version', 'unknown')
|
||||
@@ -138,13 +153,108 @@ def _detect_and_cache_abi() -> dict:
|
||||
with open(ABI_CACHE_FILE, 'w') as f:
|
||||
json.dump(platform_data, f, indent=4)
|
||||
|
||||
print(f" - ABI details cached to {ABI_CACHE_FILE}")
|
||||
logger.info(f" - ABI details cached to {ABI_CACHE_FILE}")
|
||||
return platform_data
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"ABI detection failed: {e}, falling back to platform detection")
|
||||
return _fallback_platform_detection()
|
||||
finally:
|
||||
if temp_dir.exists():
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
|
||||
def _fallback_platform_detection() -> dict:
|
||||
"""
|
||||
Fallback platform detection that doesn't require external tools.
|
||||
Used when meson is not available (e.g., in packaged applications).
|
||||
"""
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Using fallback platform detection (no external tools required)")
|
||||
|
||||
# Get basic platform information
|
||||
arch = platform.machine()
|
||||
system = platform.system().lower()
|
||||
|
||||
# Map common architectures
|
||||
arch_mapping = {
|
||||
'x86_64': 'x86_64',
|
||||
'AMD64': 'x86_64',
|
||||
'arm64': 'aarch64',
|
||||
'aarch64': 'aarch64',
|
||||
'i386': 'i686',
|
||||
'i686': 'i686'
|
||||
}
|
||||
normalized_arch = arch_mapping.get(arch, arch)
|
||||
|
||||
# Detect compiler and stdlib based on platform
|
||||
if system == 'darwin':
|
||||
# macOS
|
||||
os_name = 'darwin'
|
||||
compiler = 'clang'
|
||||
stdlib = 'libc++'
|
||||
# Get macOS version for stdlib version
|
||||
mac_version = platform.mac_ver()[0]
|
||||
stdlib_version = mac_version.split('.')[0] if mac_version else 'unknown'
|
||||
abi = 'cxx11'
|
||||
elif system == 'linux':
|
||||
# Linux
|
||||
os_name = 'linux'
|
||||
# Try to detect if we're using GCC or Clang
|
||||
compiler = 'gcc' # Default assumption
|
||||
stdlib = 'libstdc++'
|
||||
stdlib_version = '11' # Common default
|
||||
abi = 'cxx11'
|
||||
elif system == 'windows':
|
||||
# Windows
|
||||
os_name = 'windows'
|
||||
compiler = 'msvc'
|
||||
stdlib = 'msvcrt'
|
||||
stdlib_version = 'unknown'
|
||||
abi = 'cxx11'
|
||||
else:
|
||||
# Unknown system
|
||||
os_name = system
|
||||
compiler = 'unknown'
|
||||
stdlib = 'unknown'
|
||||
stdlib_version = 'unknown'
|
||||
abi = 'unknown'
|
||||
|
||||
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
||||
|
||||
platform_data = {
|
||||
"os": os_name,
|
||||
"arch": normalized_arch,
|
||||
"triplet": f"{normalized_arch}-{os_name}",
|
||||
"abi_signature": abi_string,
|
||||
"details": {
|
||||
"compiler": compiler,
|
||||
"stdlib": stdlib,
|
||||
"stdlib_version": stdlib_version,
|
||||
"abi": abi,
|
||||
"os": os_name,
|
||||
"detection_method": "fallback"
|
||||
},
|
||||
"is_native": True,
|
||||
"cross_file": None,
|
||||
"docker_image": None
|
||||
}
|
||||
|
||||
# Cache the result
|
||||
try:
|
||||
CACHE_PATH.mkdir(parents=True, exist_ok=True)
|
||||
with open(ABI_CACHE_FILE, 'w') as f:
|
||||
json.dump(platform_data, f, indent=4)
|
||||
logger.info(f"Fallback platform data cached to {ABI_CACHE_FILE}")
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to cache platform data: {e}")
|
||||
|
||||
return platform_data
|
||||
|
||||
|
||||
def get_platform_identifier() -> dict:
|
||||
"""
|
||||
Gets the native platform identifier, using a cached value if available.
|
||||
|
||||
Reference in New Issue
Block a user