feat(fourdst-cli): cli
This commit is contained in:
158
assets/bundle/fourdst_bundle_icon.svg
Normal file
158
assets/bundle/fourdst_bundle_icon.svg
Normal file
@@ -0,0 +1,158 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
|
<svg
|
||||||
|
width="300mm"
|
||||||
|
height="300mm"
|
||||||
|
viewBox="0 0 300 300"
|
||||||
|
version="1.1"
|
||||||
|
id="svg1"
|
||||||
|
inkscape:export-filename="icon2048.png"
|
||||||
|
inkscape:export-xdpi="173.39734"
|
||||||
|
inkscape:export-ydpi="173.39734"
|
||||||
|
inkscape:version="1.4 (e7c3feb1, 2024-10-09)"
|
||||||
|
sodipodi:docname="fourdst_bundle_icon.svg"
|
||||||
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
|
xmlns:xlink="http://www.w3.org/1999/xlink"
|
||||||
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
|
<sodipodi:namedview
|
||||||
|
id="namedview1"
|
||||||
|
pagecolor="#ffffff"
|
||||||
|
bordercolor="#000000"
|
||||||
|
borderopacity="0.25"
|
||||||
|
inkscape:showpageshadow="2"
|
||||||
|
inkscape:pageopacity="0.0"
|
||||||
|
inkscape:pagecheckerboard="0"
|
||||||
|
inkscape:deskcolor="#d1d1d1"
|
||||||
|
inkscape:document-units="mm"
|
||||||
|
showgrid="false"
|
||||||
|
inkscape:zoom="0.19742369"
|
||||||
|
inkscape:cx="-154.49007"
|
||||||
|
inkscape:cy="985.19077"
|
||||||
|
inkscape:window-width="1728"
|
||||||
|
inkscape:window-height="968"
|
||||||
|
inkscape:window-x="0"
|
||||||
|
inkscape:window-y="38"
|
||||||
|
inkscape:window-maximized="0"
|
||||||
|
inkscape:current-layer="layer1">
|
||||||
|
<inkscape:grid
|
||||||
|
id="grid1"
|
||||||
|
units="px"
|
||||||
|
originx="0"
|
||||||
|
originy="0"
|
||||||
|
spacingx="1"
|
||||||
|
spacingy="1"
|
||||||
|
empcolor="#0099e5"
|
||||||
|
empopacity="0.30196078"
|
||||||
|
color="#0099e5"
|
||||||
|
opacity="0.14901961"
|
||||||
|
empspacing="5"
|
||||||
|
enabled="true"
|
||||||
|
visible="false" />
|
||||||
|
</sodipodi:namedview>
|
||||||
|
<defs
|
||||||
|
id="defs1">
|
||||||
|
<linearGradient
|
||||||
|
id="linearGradient6">
|
||||||
|
<stop
|
||||||
|
style="stop-color:#eeeeee;stop-opacity:1;"
|
||||||
|
offset="0"
|
||||||
|
id="stop9" />
|
||||||
|
<stop
|
||||||
|
style="stop-color:#ffffff;stop-opacity:1;"
|
||||||
|
offset="1"
|
||||||
|
id="stop10" />
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient
|
||||||
|
id="linearGradient6-1"
|
||||||
|
inkscape:label="other">
|
||||||
|
<stop
|
||||||
|
style="stop-color:#e1ddca;stop-opacity:1;"
|
||||||
|
offset="0"
|
||||||
|
id="stop6" />
|
||||||
|
<stop
|
||||||
|
style="stop-color:#ffffff;stop-opacity:1;"
|
||||||
|
offset="1"
|
||||||
|
id="stop7" />
|
||||||
|
</linearGradient>
|
||||||
|
<linearGradient
|
||||||
|
inkscape:collect="always"
|
||||||
|
xlink:href="#linearGradient6-1"
|
||||||
|
id="linearGradient7"
|
||||||
|
x1="42.292004"
|
||||||
|
y1="73.68721"
|
||||||
|
x2="169.77107"
|
||||||
|
y2="225.61084"
|
||||||
|
gradientUnits="userSpaceOnUse"
|
||||||
|
gradientTransform="translate(45.566128)" />
|
||||||
|
</defs>
|
||||||
|
<g
|
||||||
|
inkscape:label="Layer 1"
|
||||||
|
inkscape:groupmode="layer"
|
||||||
|
id="layer1">
|
||||||
|
<path
|
||||||
|
id="rect1"
|
||||||
|
style="fill:url(#linearGradient7);stroke:#000000;stroke-width:1.05833;stroke-dasharray:none"
|
||||||
|
d="M 73.519893,7.7113838 194.17488,7.9273849 208.07531,21.319453 c 0,0 17.49712,17.497087 24.08044,23.64844 l 17.06409,15.944416 0.216,210.913961 c 0.0112,10.94726 -8.81315,19.76042 -19.76042,19.76042 H 73.519893 c -10.947273,0 -19.760419,-8.81315 -19.760419,-19.76042 V 27.471804 c 0,-10.947273 8.813164,-19.7800184 19.760419,-19.7604202 z"
|
||||||
|
sodipodi:nodetypes="sccccssssss" />
|
||||||
|
<path
|
||||||
|
style="fill:none;stroke:#000000;stroke-width:1.05833;stroke-dasharray:none;stroke-opacity:1"
|
||||||
|
d="m 194.8207,8.7052203 c 0,0 0.10492,19.7156627 -0.0425,28.9586697 -0.0964,6.043642 -0.31445,12.15661 4.97306,17.437969 4.57274,4.567432 9.32915,4.928134 14.40725,4.951712 11.22158,0.0521 34.19919,0.100814 34.19919,0.100814 z"
|
||||||
|
id="path1"
|
||||||
|
sodipodi:nodetypes="cssscc" />
|
||||||
|
<rect
|
||||||
|
style="fill:#2a676d;fill-opacity:1;stroke:none;stroke-width:2.68947;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||||
|
id="rect7"
|
||||||
|
width="128.54131"
|
||||||
|
height="128.54131"
|
||||||
|
x="86.559303"
|
||||||
|
y="62.109989"
|
||||||
|
ry="32.050087" />
|
||||||
|
<circle
|
||||||
|
style="fill:none;stroke:#000000;stroke-width:2.64583;stroke-dasharray:none;stroke-opacity:1"
|
||||||
|
id="path3"
|
||||||
|
cx="146.09077"
|
||||||
|
cy="125.43342"
|
||||||
|
r="22.951647" />
|
||||||
|
<path
|
||||||
|
style="fill:#ffffff;fill-opacity:1;stroke:#000000;stroke-width:2.58948;stroke-dasharray:none;stroke-opacity:1"
|
||||||
|
d="m 168.52185,182.858 c -2.16121,-0.76152 -3.57061,-3.14506 -5.94651,-10.05662 -2.76275,-8.03694 -4.38861,-11.1132 -6.95849,-13.16613 -1.917,-1.53139 -3.59819,-2.10562 -6.134,-2.09518 -1.75234,0.007 -2.31533,0.12846 -3.73389,0.80411 -3.80947,1.81441 -5.53791,4.36861 -9.96002,14.71844 -2.49684,5.84379 -4.04091,7.998 -6.11191,8.52705 -1.71858,0.43902 -4.72693,-0.91646 -5.72248,-2.57839 -1.20259,-2.00755 -0.56267,-4.60321 3.05497,-12.39166 2.93765,-6.32449 3.62932,-8.40952 3.80422,-11.46766 0.22103,-3.86479 -0.94325,-6.68602 -3.61132,-8.75077 -1.9284,-1.49234 -3.47569,-1.95012 -6.45668,-1.91026 -3.05785,0.0409 -5.07268,0.59331 -11.25941,3.08703 -7.16946,2.88985 -9.34023,3.42419 -11.273098,2.77492 -1.909209,-0.64131 -3.26296,-2.9297 -3.26296,-5.51572 0,-1.23299 0.437745,-2.0754 1.666153,-3.2064 0.899332,-0.82802 5.003735,-2.69857 8.985705,-4.09516 9.53819,-3.34531 13.27906,-6.25357 14.24467,-11.07421 0.41323,-2.06299 0.14404,-4.11409 -0.80218,-6.11211 -1.56705,-3.30895 -4.41672,-5.41748 -11.68559,-8.64641 -6.76809,-3.00647 -8.028552,-3.68694 -9.495758,-5.12637 -1.982636,-1.94508 -2.134693,-3.60405 -0.582756,-6.3579 1.390961,-2.468203 3.661884,-2.954875 7.353794,-1.575962 0.81464,0.304268 3.61988,1.588292 6.23386,2.853402 5.43363,2.62974 7.87029,3.52824 10.62642,3.91841 3.58147,0.50701 6.59331,-0.52509 8.84283,-3.03025 1.86342,-2.07518 2.44928,-3.773496 2.42893,-7.041106 -0.0207,-3.321892 -0.49162,-5.112751 -3.14882,-11.974222 -3.51693,-9.081463 -3.66772,-11.465615 -0.84457,-13.353687 2.25127,-1.505613 4.56233,-1.503972 6.20271,0.0044 1.37878,1.267824 2.50999,3.628536 4.61243,9.625623 2.9238,8.339971 4.53375,11.181328 7.4639,13.172853 1.8631,1.26628 3.59076,1.713264 6.01102,1.555174 5.28811,-0.345412 8.10101,-3.548145 12.593,-14.338226 2.73092,-6.559882 4.38217,-9.009629 6.51036,-9.658606 0.8459,-0.257951 1.2486,-0.234194 2.42745,0.143203 2.53885,0.812786 4.0313,2.562511 4.0313,4.726229 0,1.461711 -1.29229,5.029498 -3.57102,9.858965 -2.45433,5.201625 -3.45845,7.980997 -3.8618,10.689302 -0.70112,4.707638 1.32527,8.834278 5.318,10.829848 1.36723,0.68334 1.6946,0.73628 4.5532,0.73628 3.63559,0 4.33842,-0.19018 12.06575,-3.26484 9.17698,-3.65146 11.50668,-3.6367 13.23603,0.0839 2.13835,4.60054 0.11414,6.66633 -10.02983,10.23586 -8.23087,2.89632 -11.67982,5.13338 -13.4012,8.69225 -0.77515,1.60261 -0.84407,1.94816 -0.84407,4.23237 0,2.15193 0.0927,2.68871 0.6879,3.9811 1.66506,3.61576 4.64533,5.80698 12.53068,9.21307 4.82874,2.08577 7.25342,3.40772 8.73184,4.76061 2.06836,1.89275 2.23005,3.67745 0.5942,6.55854 -0.87927,1.54858 -1.91127,2.13669 -3.75154,2.13789 -1.97029,0.001 -3.72517,-0.63187 -9.51535,-3.43321 -7.03133,-3.40181 -8.8421,-3.98783 -12.36228,-4.0008 -2.68164,-0.01 -2.91555,0.0335 -4.52321,0.83861 -2.00977,1.00649 -3.58653,2.64537 -4.54295,4.72193 -0.61778,1.3413 -0.68198,1.76888 -0.67375,4.48687 0.0107,3.54073 0.36557,4.97679 2.72438,11.02598 2.04129,5.23491 2.71279,7.24395 3.04562,9.11212 0.49916,2.80172 -0.41442,4.65671 -2.79339,5.67183 -1.71901,0.73351 -2.5999,0.83856 -3.72049,0.44371 z M 154.7547,145.19941 c 6.56244,-1.37179 12.09647,-6.4981 14.38598,-13.32608 0.82127,-2.44927 0.86141,-2.74497 0.86274,-6.35677 0.001,-3.59886 -0.0405,-3.91194 -0.83706,-6.26981 -2.91012,-8.61376 -10.05336,-13.84584 -18.90341,-13.84584 -8.85535,0 -15.98436,5.22611 -18.90666,13.86 -0.79264,2.34184 -0.83522,2.66075 -0.83522,6.25565 0,3.59679 0.0423,3.91329 0.83834,6.26981 1.88151,5.56994 5.19166,9.43617 10.17671,11.88633 4.25556,2.09162 8.28811,2.55737 13.21858,1.52671 z"
|
||||||
|
id="path4" />
|
||||||
|
<text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-size:8.81944px;font-family:sans-serif;-inkscape-font-specification:sans-serif;text-align:center;writing-mode:lr-tb;direction:ltr;text-anchor:middle;fill:#d35f5f;stroke:none;stroke-width:2.86782;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||||
|
x="30.913881"
|
||||||
|
y="259.40781"
|
||||||
|
id="text5"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan5"
|
||||||
|
style="stroke:none;stroke-width:2.86782"
|
||||||
|
x="30.913881"
|
||||||
|
y="259.40781" /></text>
|
||||||
|
<text
|
||||||
|
xml:space="preserve"
|
||||||
|
style="font-size:46.2556px;font-family:sans-serif;-inkscape-font-specification:sans-serif;text-align:center;writing-mode:lr-tb;direction:ltr;text-anchor:middle;fill:#d35f5f;stroke:none;stroke-width:5.01364;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||||
|
x="153.43289"
|
||||||
|
y="252.49603"
|
||||||
|
id="text6"><tspan
|
||||||
|
sodipodi:role="line"
|
||||||
|
id="tspan6"
|
||||||
|
style="font-size:46.2556px;fill:#858585;fill-opacity:1;stroke-width:5.01364"
|
||||||
|
x="153.43289"
|
||||||
|
y="252.49603">fbundle</tspan></text>
|
||||||
|
<path
|
||||||
|
style="fill:#f9f9f9;fill-opacity:1;stroke:none;stroke-width:1.09929;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||||
|
d="m 208.41893,58.942628 c -4.18091,-0.756729 -8.56585,-3.864409 -10.65438,-7.550941 -1.6286,-2.874674 -2.06349,-4.854837 -2.20307,-10.030997 l -0.12598,-4.671973 7.32039,-6.134692 7.32039,-6.134694 9.67284,9.463639 c 5.32006,5.205 10.98449,10.686349 12.58762,12.180773 1.60313,1.494425 2.86866,2.801456 2.81228,2.904512 -0.0564,0.103057 -2.83214,2.483701 -6.16837,5.290322 l -6.06588,5.102945 -6.22776,-0.02479 c -3.90945,-0.01556 -6.98728,-0.162271 -8.26808,-0.394091 z"
|
||||||
|
id="path7" />
|
||||||
|
<path
|
||||||
|
style="fill:#f9f9f9;fill-opacity:1;stroke:none;stroke-width:1.09929;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||||
|
d="M 195.48145,23.466792 V 10.379909 l 4.13836,3.947872 c 2.27609,2.17133 5.56629,5.308241 7.31155,6.970913 l 3.1732,3.023041 -7.31156,6.115971 -7.31155,6.115971 z"
|
||||||
|
id="path8" />
|
||||||
|
<path
|
||||||
|
style="fill:#f9f9f9;fill-opacity:1;stroke:none;stroke-width:0.309711;stroke-linecap:round;stroke-linejoin:round;stroke-dasharray:none;stroke-opacity:1;paint-order:markers fill stroke"
|
||||||
|
d="m 228.60488,59.536001 c -16.32692,-0.05564 -17.63801,-0.07865 -19.37835,-0.34004 -1.62726,-0.244409 -3.26921,-0.679037 -3.26921,-0.865367 0,-0.07898 0.0887,-0.06785 0.38266,0.04802 1.82755,0.720352 3.92686,0.889153 12.05023,0.96893 l 4.49655,0.04416 1.31933,-1.105725 c 0.72562,-0.60815 1.73346,-1.455743 2.23964,-1.883545 1.70861,-1.444059 2.83758,-2.395323 3.16605,-2.667706 0.17974,-0.149048 0.74098,-0.624354 1.24721,-1.056233 0.50624,-0.431882 1.31926,-1.123543 1.80674,-1.537025 1.66659,-1.413632 2.48178,-2.140715 2.48178,-2.213528 0,-0.04004 -0.52744,-0.612492 -1.1721,-1.272106 -0.64465,-0.659615 -1.14914,-1.199301 -1.12109,-1.199301 0.0673,0 14.06218,13.067313 14.06218,13.130138 0,0.01791 -0.44424,0.02635 -0.9872,0.01879 -0.54296,-0.0076 -8.33895,-0.03883 -17.32442,-0.06945 z"
|
||||||
|
id="path9" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
After Width: | Height: | Size: 11 KiB |
@@ -8,8 +8,7 @@ import datetime
|
|||||||
import yaml
|
import yaml
|
||||||
import zipfile
|
import zipfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from fourdst.cli.common.utils import get_platform_identifier, get_macos_targeted_platform_identifier, run_command
|
||||||
from fourdst.cli.common.utils import get_platform_identifier, run_command
|
|
||||||
|
|
||||||
bundle_app = typer.Typer()
|
bundle_app = typer.Typer()
|
||||||
|
|
||||||
@@ -19,7 +18,9 @@ def bundle_create(
|
|||||||
output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."),
|
output_bundle: Path = typer.Option("bundle.fbundle", "--out", "-o", help="The path for the output bundle file."),
|
||||||
bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."),
|
bundle_name: str = typer.Option("MyPluginBundle", "--name", help="The name of the bundle."),
|
||||||
bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."),
|
bundle_version: str = typer.Option("0.1.0", "--ver", help="The version of the bundle."),
|
||||||
bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle.")
|
bundle_author: str = typer.Option("Unknown", "--author", help="The author of the bundle."),
|
||||||
|
# --- NEW OPTION ---
|
||||||
|
target_macos_version: str = typer.Option(None, "--target-macos-version", help="The minimum macOS version to target (e.g., '12.0').")
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Builds and packages one or more plugin projects into a single .fbundle file.
|
Builds and packages one or more plugin projects into a single .fbundle file.
|
||||||
@@ -29,8 +30,22 @@ def bundle_create(
|
|||||||
shutil.rmtree(staging_dir)
|
shutil.rmtree(staging_dir)
|
||||||
staging_dir.mkdir()
|
staging_dir.mkdir()
|
||||||
|
|
||||||
# Get the host platform identifier, triggering detection if needed.
|
# --- MODIFIED LOGIC ---
|
||||||
host_platform = get_platform_identifier()
|
# Prepare environment for the build
|
||||||
|
build_env = os.environ.copy()
|
||||||
|
|
||||||
|
# Determine the host platform identifier based on the target
|
||||||
|
if sys.platform == "darwin" and target_macos_version:
|
||||||
|
typer.secho(f"Targeting macOS version: {target_macos_version}", fg=typer.colors.CYAN)
|
||||||
|
host_platform = get_macos_targeted_platform_identifier(target_macos_version)
|
||||||
|
|
||||||
|
# Set environment variables for Meson to pick up
|
||||||
|
flags = f"-mmacosx-version-min={target_macos_version}"
|
||||||
|
build_env["CXXFLAGS"] = f"{build_env.get('CXXFLAGS', '')} {flags}".strip()
|
||||||
|
build_env["LDFLAGS"] = f"{build_env.get('LDFLAGS', '')} {flags}".strip()
|
||||||
|
else:
|
||||||
|
# Default behavior for Linux or non-targeted macOS builds
|
||||||
|
host_platform = get_platform_identifier()
|
||||||
|
|
||||||
manifest = {
|
manifest = {
|
||||||
"bundleName": bundle_name,
|
"bundleName": bundle_name,
|
||||||
@@ -46,12 +61,15 @@ def bundle_create(
|
|||||||
plugin_name = plugin_dir.name
|
plugin_name = plugin_dir.name
|
||||||
print(f"--> Processing plugin: {plugin_name}")
|
print(f"--> Processing plugin: {plugin_name}")
|
||||||
|
|
||||||
# 1. Build the plugin
|
# 1. Build the plugin using the prepared environment
|
||||||
print(f" - Compiling for host platform...")
|
print(f" - Compiling for target platform...")
|
||||||
build_dir = plugin_dir / "builddir"
|
build_dir = plugin_dir / "builddir"
|
||||||
if not build_dir.exists():
|
if build_dir.exists():
|
||||||
run_command(["meson", "setup", "builddir"], cwd=plugin_dir)
|
shutil.rmtree(build_dir) # Reconfigure every time to apply env vars
|
||||||
run_command(["meson", "compile", "-C", "builddir"], cwd=plugin_dir)
|
|
||||||
|
# Pass the modified environment to the Meson commands
|
||||||
|
run_command(["meson", "setup", "builddir"], cwd=plugin_dir, env=build_env)
|
||||||
|
run_command(["meson", "compile", "-C", "builddir"], cwd=plugin_dir, env=build_env)
|
||||||
|
|
||||||
# 2. Find the compiled artifact
|
# 2. Find the compiled artifact
|
||||||
compiled_lib = next(build_dir.glob("lib*.so"), None) or next(build_dir.glob("lib*.dylib"), None)
|
compiled_lib = next(build_dir.glob("lib*.so"), None) or next(build_dir.glob("lib*.dylib"), None)
|
||||||
@@ -63,16 +81,13 @@ def bundle_create(
|
|||||||
print(" - Packaging source code (respecting .gitignore)...")
|
print(" - Packaging source code (respecting .gitignore)...")
|
||||||
sdist_path = staging_dir / f"{plugin_name}_src.zip"
|
sdist_path = staging_dir / f"{plugin_name}_src.zip"
|
||||||
|
|
||||||
# Use git to list files, which automatically respects .gitignore
|
|
||||||
git_check = run_command(["git", "rev-parse", "--is-inside-work-tree"], cwd=plugin_dir, check=False)
|
git_check = run_command(["git", "rev-parse", "--is-inside-work-tree"], cwd=plugin_dir, check=False)
|
||||||
|
|
||||||
files_to_include = []
|
files_to_include = []
|
||||||
if git_check.returncode == 0:
|
if git_check.returncode == 0:
|
||||||
# This is a git repo, use git to list files
|
|
||||||
result = run_command(["git", "ls-files", "--cached", "--others", "--exclude-standard"], cwd=plugin_dir)
|
result = run_command(["git", "ls-files", "--cached", "--others", "--exclude-standard"], cwd=plugin_dir)
|
||||||
files_to_include = [plugin_dir / f for f in result.stdout.strip().split('\n') if f]
|
files_to_include = [plugin_dir / f for f in result.stdout.strip().split('\n') if f]
|
||||||
else:
|
else:
|
||||||
# Not a git repo, fall back to os.walk and warn the user
|
|
||||||
typer.secho(f" - Warning: '{plugin_dir.name}' is not a git repository. Packaging all files.", fg=typer.colors.YELLOW)
|
typer.secho(f" - Warning: '{plugin_dir.name}' is not a git repository. Packaging all files.", fg=typer.colors.YELLOW)
|
||||||
for root, _, files in os.walk(plugin_dir):
|
for root, _, files in os.walk(plugin_dir):
|
||||||
if 'builddir' in root:
|
if 'builddir' in root:
|
||||||
@@ -89,9 +104,8 @@ def bundle_create(
|
|||||||
binaries_dir = staging_dir / "bin"
|
binaries_dir = staging_dir / "bin"
|
||||||
binaries_dir.mkdir(exist_ok=True)
|
binaries_dir.mkdir(exist_ok=True)
|
||||||
|
|
||||||
# Construct new filename with arch, os, and ABI tag
|
base_name = compiled_lib.stem
|
||||||
base_name = compiled_lib.stem # e.g., "libplugin_a"
|
ext = compiled_lib.suffix
|
||||||
ext = compiled_lib.suffix # e.g., ".so"
|
|
||||||
triplet = host_platform["triplet"]
|
triplet = host_platform["triplet"]
|
||||||
abi_signature = host_platform["abi_signature"]
|
abi_signature = host_platform["abi_signature"]
|
||||||
tagged_filename = f"{base_name}.{triplet}.{abi_signature}{ext}"
|
tagged_filename = f"{base_name}.{triplet}.{abi_signature}{ext}"
|
||||||
@@ -109,7 +123,9 @@ def bundle_create(
|
|||||||
"binaries": [{
|
"binaries": [{
|
||||||
"platform": {
|
"platform": {
|
||||||
"triplet": host_platform["triplet"],
|
"triplet": host_platform["triplet"],
|
||||||
"abi_signature": host_platform["abi_signature"]
|
"abi_signature": host_platform["abi_signature"],
|
||||||
|
# Adding arch separately for clarity, matching 'fill' command
|
||||||
|
"arch": host_platform["arch"]
|
||||||
},
|
},
|
||||||
"path": staged_lib_path.relative_to(staging_dir).as_posix(),
|
"path": staged_lib_path.relative_to(staging_dir).as_posix(),
|
||||||
"compiledOn": datetime.datetime.now().isoformat()
|
"compiledOn": datetime.datetime.now().isoformat()
|
||||||
|
|||||||
@@ -168,4 +168,4 @@ def bundle_fill(bundle_path: Path = typer.Argument(..., help="The .fbundle file
|
|||||||
|
|
||||||
finally:
|
finally:
|
||||||
if staging_dir.exists():
|
if staging_dir.exists():
|
||||||
shutil.rmtree(staging_dir)
|
shutil.rmtree(staging_dir)
|
||||||
@@ -8,20 +8,65 @@ import zipfile
|
|||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
try:
|
from cryptography.hazmat.primitives import serialization, hashes
|
||||||
from cryptography.hazmat.primitives import serialization, hashes
|
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519
|
||||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519
|
from cryptography.exceptions import InvalidSignature
|
||||||
from cryptography.exceptions import InvalidSignature
|
|
||||||
except ImportError:
|
|
||||||
print("Error: This CLI now requires 'cryptography'. Please install it.", file=sys.stderr)
|
|
||||||
print("Run: pip install cryptography", file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH
|
from fourdst.cli.common.config import LOCAL_TRUST_STORE_PATH
|
||||||
from fourdst.cli.common.utils import get_platform_identifier, calculate_sha256, is_abi_compatible
|
from fourdst.cli.common.utils import get_platform_identifier, calculate_sha256, is_abi_compatible
|
||||||
|
|
||||||
bundle_app = typer.Typer()
|
bundle_app = typer.Typer()
|
||||||
|
|
||||||
|
def _reconstruct_canonical_checksum_list(staging_dir: Path, manifest: dict) -> tuple[str, list[str], list[str]]:
|
||||||
|
"""
|
||||||
|
Reconstructs the canonical checksum list from the files on disk
|
||||||
|
and compares them against the checksums listed in the manifest.
|
||||||
|
|
||||||
|
Returns a tuple containing:
|
||||||
|
1. The canonical string of actual checksums to verify against the signature.
|
||||||
|
2. A list of files with checksum mismatches.
|
||||||
|
3. A list of files that are listed in the manifest but missing from the disk.
|
||||||
|
"""
|
||||||
|
checksum_map = {}
|
||||||
|
mismatch_errors = []
|
||||||
|
missing_files = []
|
||||||
|
|
||||||
|
all_files_in_manifest = []
|
||||||
|
# Gather all file paths from the manifest
|
||||||
|
for plugin_data in manifest.get('bundlePlugins', {}).values():
|
||||||
|
if 'sdist' in plugin_data and 'path' in plugin_data['sdist']:
|
||||||
|
all_files_in_manifest.append(plugin_data['sdist'])
|
||||||
|
if 'binaries' in plugin_data:
|
||||||
|
all_files_in_manifest.extend(plugin_data['binaries'])
|
||||||
|
|
||||||
|
for file_info in all_files_in_manifest:
|
||||||
|
path_str = file_info.get('path')
|
||||||
|
if not path_str:
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_path = staging_dir / path_str
|
||||||
|
expected_checksum = file_info.get('checksum')
|
||||||
|
|
||||||
|
if not file_path.exists():
|
||||||
|
missing_files.append(path_str)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Calculate actual checksum from the file on disk
|
||||||
|
actual_checksum = "sha256:" + calculate_sha256(file_path)
|
||||||
|
checksum_map[path_str] = actual_checksum
|
||||||
|
|
||||||
|
# Compare with the checksum listed in the manifest
|
||||||
|
if expected_checksum and actual_checksum != expected_checksum:
|
||||||
|
mismatch_errors.append(path_str)
|
||||||
|
|
||||||
|
# Create the canonical string for signature verification from the actual file checksums
|
||||||
|
sorted_paths = sorted(checksum_map.keys())
|
||||||
|
canonical_list = [f"{path}:{checksum_map[path]}" for path in sorted_paths]
|
||||||
|
data_to_verify = "\n".join(canonical_list)
|
||||||
|
|
||||||
|
return data_to_verify, mismatch_errors, missing_files
|
||||||
|
|
||||||
|
|
||||||
@bundle_app.command("inspect")
|
@bundle_app.command("inspect")
|
||||||
def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True)):
|
def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle file to inspect.", exists=True)):
|
||||||
"""
|
"""
|
||||||
@@ -32,12 +77,9 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi
|
|||||||
shutil.rmtree(staging_dir)
|
shutil.rmtree(staging_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Get current system info first
|
|
||||||
host_platform = get_platform_identifier()
|
host_platform = get_platform_identifier()
|
||||||
|
|
||||||
# 1. Unpack and load manifest
|
|
||||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
||||||
archive_files = set(bundle_zip.namelist())
|
|
||||||
bundle_zip.extractall(staging_dir)
|
bundle_zip.extractall(staging_dir)
|
||||||
|
|
||||||
manifest_path = staging_dir / "manifest.yaml"
|
manifest_path = staging_dir / "manifest.yaml"
|
||||||
@@ -48,8 +90,8 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi
|
|||||||
with open(manifest_path, 'r') as f:
|
with open(manifest_path, 'r') as f:
|
||||||
manifest = yaml.safe_load(f)
|
manifest = yaml.safe_load(f)
|
||||||
|
|
||||||
# 2. Print Header
|
|
||||||
typer.secho(f"--- Bundle Inspection Report for: {bundle_path.name} ---", bold=True)
|
typer.secho(f"--- Bundle Inspection Report for: {bundle_path.name} ---", bold=True)
|
||||||
|
# ... (header printing code is unchanged) ...
|
||||||
typer.echo(f"Name: {manifest.get('bundleName', 'N/A')}")
|
typer.echo(f"Name: {manifest.get('bundleName', 'N/A')}")
|
||||||
typer.echo(f"Version: {manifest.get('bundleVersion', 'N/A')}")
|
typer.echo(f"Version: {manifest.get('bundleVersion', 'N/A')}")
|
||||||
typer.echo(f"Author: {manifest.get('bundleAuthor', 'N/A')}")
|
typer.echo(f"Author: {manifest.get('bundleAuthor', 'N/A')}")
|
||||||
@@ -58,6 +100,7 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi
|
|||||||
typer.secho(f"Host Arch: {host_platform['triplet']}", dim=True)
|
typer.secho(f"Host Arch: {host_platform['triplet']}", dim=True)
|
||||||
typer.echo("-" * 50)
|
typer.echo("-" * 50)
|
||||||
|
|
||||||
|
|
||||||
# 3. Signature and Trust Verification
|
# 3. Signature and Trust Verification
|
||||||
fingerprint = manifest.get('bundleAuthorKeyFingerprint')
|
fingerprint = manifest.get('bundleAuthorKeyFingerprint')
|
||||||
sig_path = staging_dir / "manifest.sig"
|
sig_path = staging_dir / "manifest.sig"
|
||||||
@@ -65,76 +108,65 @@ def bundle_inspect(bundle_path: Path = typer.Argument(..., help="The .fbundle fi
|
|||||||
if not fingerprint or not sig_path.exists():
|
if not fingerprint or not sig_path.exists():
|
||||||
typer.secho("Trust Status: 🟡 UNSIGNED", fg=typer.colors.YELLOW)
|
typer.secho("Trust Status: 🟡 UNSIGNED", fg=typer.colors.YELLOW)
|
||||||
else:
|
else:
|
||||||
# Find the key in the local trust store
|
|
||||||
trusted_key_path = None
|
trusted_key_path = None
|
||||||
if LOCAL_TRUST_STORE_PATH.exists():
|
if LOCAL_TRUST_STORE_PATH.exists():
|
||||||
for key_file in LOCAL_TRUST_STORE_PATH.rglob("*.pub"):
|
# Find the key in the local trust store
|
||||||
pub_key = serialization.load_ssh_public_key(key_file.read_bytes())
|
# ... (key finding logic is unchanged) ...
|
||||||
pub_key_bytes = pub_key.public_bytes(
|
for key_file in LOCAL_TRUST_STORE_PATH.rglob("*.pem"):
|
||||||
encoding = serialization.Encoding.OpenSSH,
|
try:
|
||||||
format = serialization.PublicFormat.OpenSSH
|
pub_der = (serialization.load_pem_public_key(key_file.read_bytes())
|
||||||
)
|
.public_bytes(
|
||||||
pub_key_fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest()
|
encoding=serialization.Encoding.DER,
|
||||||
if pub_key_fingerprint == fingerprint:
|
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
||||||
trusted_key_path = key_file
|
))
|
||||||
break
|
pub_key_fingerprint = "sha256:" + hashlib.sha256(pub_der).hexdigest()
|
||||||
|
if pub_key_fingerprint == fingerprint:
|
||||||
|
trusted_key_path = key_file
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
if not trusted_key_path:
|
if not trusted_key_path:
|
||||||
typer.secho(f"Trust Status: ⚠️ SIGNED but UNTRUSTED AUTHOR ({fingerprint})", fg=typer.colors.YELLOW)
|
typer.secho(f"Trust Status: ⚠️ SIGNED but UNTRUSTED AUTHOR ({fingerprint})", fg=typer.colors.YELLOW)
|
||||||
else:
|
else:
|
||||||
|
# --- MODIFIED VERIFICATION LOGIC ---
|
||||||
try:
|
try:
|
||||||
pub_key_obj = serialization.load_ssh_public_key(trusted_key_path.read_bytes())
|
pub_key_obj = serialization.load_pem_public_key(trusted_key_path.read_bytes())
|
||||||
signature = sig_path.read_bytes()
|
signature = sig_path.read_bytes()
|
||||||
manifest_content = manifest_path.read_bytes()
|
|
||||||
|
# Reconstruct the data that was originally signed
|
||||||
|
data_to_verify, checksum_errors, missing_files = _reconstruct_canonical_checksum_list(staging_dir, manifest)
|
||||||
|
with open("data_to_verify.bin", "wb") as f:
|
||||||
|
f.write(data_to_verify.encode('utf-8'))
|
||||||
|
|
||||||
|
# Verify the signature against the reconstructed data
|
||||||
if isinstance(pub_key_obj, ed25519.Ed25519PublicKey):
|
if isinstance(pub_key_obj, ed25519.Ed25519PublicKey):
|
||||||
pub_key_obj.verify(signature, manifest_content)
|
pub_key_obj.verify(signature, data_to_verify.encode('utf-8'))
|
||||||
elif isinstance(pub_key_obj, rsa.RSAPublicKey):
|
elif isinstance(pub_key_obj, rsa.RSAPublicKey):
|
||||||
pub_key_obj.verify(
|
pub_key_obj.verify(
|
||||||
signature,
|
signature,
|
||||||
manifest_content,
|
data_to_verify.encode('utf-8'),
|
||||||
padding.PKCS1v15(),
|
padding.PKCS1v15(),
|
||||||
hashes.SHA256()
|
hashes.SHA256()
|
||||||
)
|
)
|
||||||
typer.secho(f"Trust Status: ✅ SIGNED and TRUSTED ({trusted_key_path.relative_to(LOCAL_TRUST_STORE_PATH)})", fg=typer.colors.GREEN)
|
|
||||||
|
# If we reach here, the signature is cryptographically valid.
|
||||||
|
# Now we check if the manifest's checksums match the actual file checksums.
|
||||||
|
if checksum_errors or missing_files:
|
||||||
|
typer.secho(f"Trust Status: ❌ INVALID - Files have been tampered with after signing.", fg=typer.colors.RED)
|
||||||
|
for f in missing_files:
|
||||||
|
typer.echo(f" - Missing file listed in manifest: {f}")
|
||||||
|
for f in checksum_errors:
|
||||||
|
typer.echo(f" - Checksum mismatch for: {f}")
|
||||||
|
else:
|
||||||
|
typer.secho(f"Trust Status: ✅ SIGNED and TRUSTED ({trusted_key_path.relative_to(LOCAL_TRUST_STORE_PATH)})", fg=typer.colors.GREEN)
|
||||||
|
|
||||||
except InvalidSignature:
|
except InvalidSignature:
|
||||||
typer.secho(f"Trust Status: ❌ INVALID SIGNATURE ({fingerprint})", fg=typer.colors.RED)
|
typer.secho(f"Trust Status: ❌ INVALID SIGNATURE - The bundle's integrity is compromised.", fg=typer.colors.RED)
|
||||||
|
|
||||||
typer.echo("-" * 50)
|
typer.echo("-" * 50)
|
||||||
|
|
||||||
# 4. Content Validation
|
# ... (Plugin Details section is unchanged) ...
|
||||||
typer.echo("Validating bundle contents...")
|
|
||||||
missing_files = []
|
|
||||||
checksum_errors = []
|
|
||||||
|
|
||||||
for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items():
|
|
||||||
sdist_path = plugin_data.get('sdist', {}).get('path')
|
|
||||||
if sdist_path and sdist_path not in archive_files:
|
|
||||||
missing_files.append(sdist_path)
|
|
||||||
|
|
||||||
for binary in plugin_data.get('binaries', []):
|
|
||||||
binary_path_str = binary.get('path')
|
|
||||||
if binary_path_str and binary_path_str not in archive_files:
|
|
||||||
missing_files.append(binary_path_str)
|
|
||||||
elif binary_path_str:
|
|
||||||
# Verify checksum if present
|
|
||||||
expected_checksum = binary.get('checksum')
|
|
||||||
if expected_checksum:
|
|
||||||
actual_checksum = "sha256:" + calculate_sha256(staging_dir / binary_path_str)
|
|
||||||
if actual_checksum != expected_checksum:
|
|
||||||
checksum_errors.append(binary_path_str)
|
|
||||||
|
|
||||||
if not missing_files and not checksum_errors:
|
|
||||||
typer.secho("Content Validation: ✅ OK", fg=typer.colors.GREEN)
|
|
||||||
else:
|
|
||||||
typer.secho("Content Validation: ❌ FAILED", fg=typer.colors.RED)
|
|
||||||
for f in missing_files:
|
|
||||||
typer.echo(f" - Missing file from archive: {f}")
|
|
||||||
for f in checksum_errors:
|
|
||||||
typer.echo(f" - Checksum mismatch for: {f}")
|
|
||||||
|
|
||||||
# 5. Plugin Details
|
|
||||||
typer.echo("-" * 50)
|
|
||||||
typer.secho("Available Plugins:", bold=True)
|
typer.secho("Available Plugins:", bold=True)
|
||||||
for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items():
|
for plugin_name, plugin_data in manifest.get('bundlePlugins', {}).items():
|
||||||
typer.echo(f"\n Plugin: {plugin_name}")
|
typer.echo(f"\n Plugin: {plugin_name}")
|
||||||
|
|||||||
@@ -7,26 +7,67 @@ import zipfile
|
|||||||
import hashlib
|
import hashlib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
|
import subprocess
|
||||||
try:
|
|
||||||
from cryptography.hazmat.primitives import serialization, hashes
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa, ed25519
|
|
||||||
except ImportError:
|
|
||||||
print("Error: This CLI now requires 'cryptography'. Please install it.", file=sys.stderr)
|
|
||||||
print("Run: pip install cryptography", file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
from fourdst.cli.common.utils import calculate_sha256
|
from fourdst.cli.common.utils import calculate_sha256
|
||||||
|
|
||||||
bundle_app = typer.Typer()
|
bundle_app = typer.Typer()
|
||||||
|
|
||||||
|
def _create_canonical_checksum_list(staging_dir: Path, manifest: dict) -> str:
|
||||||
|
"""
|
||||||
|
Creates a deterministic, sorted string of all file paths and their checksums.
|
||||||
|
This string is the actual data that will be signed.
|
||||||
|
"""
|
||||||
|
checksum_map = {}
|
||||||
|
|
||||||
|
# Iterate through all plugins to find all files to be checksummed
|
||||||
|
for plugin_data in manifest.get('bundlePlugins', {}).values():
|
||||||
|
# Add sdist (source code zip) to the list
|
||||||
|
sdist_info = plugin_data.get('sdist', {})
|
||||||
|
if 'path' in sdist_info:
|
||||||
|
file_path = staging_dir / sdist_info['path']
|
||||||
|
if file_path.exists():
|
||||||
|
checksum = "sha256:" + calculate_sha256(file_path)
|
||||||
|
# Also update the manifest with the sdist checksum
|
||||||
|
sdist_info['checksum'] = checksum
|
||||||
|
checksum_map[sdist_info['path']] = checksum
|
||||||
|
else:
|
||||||
|
# This case should ideally be caught by a validation step
|
||||||
|
typer.secho(f"Warning: sdist file not found: {sdist_info['path']}", fg=typer.colors.YELLOW)
|
||||||
|
|
||||||
|
|
||||||
|
# Add all binaries to the list
|
||||||
|
for binary in plugin_data.get('binaries', []):
|
||||||
|
if 'path' in binary:
|
||||||
|
file_path = staging_dir / binary['path']
|
||||||
|
if file_path.exists():
|
||||||
|
checksum = "sha256:" + calculate_sha256(file_path)
|
||||||
|
# Update the manifest with the binary checksum
|
||||||
|
binary['checksum'] = checksum
|
||||||
|
checksum_map[binary['path']] = checksum
|
||||||
|
else:
|
||||||
|
typer.secho(f"Warning: Binary file not found: {binary['path']}", fg=typer.colors.YELLOW)
|
||||||
|
|
||||||
|
# Sort the file paths to ensure a deterministic order
|
||||||
|
sorted_paths = sorted(checksum_map.keys())
|
||||||
|
|
||||||
|
# Create the final canonical string (e.g., "path1:checksum1\npath2:checksum2")
|
||||||
|
canonical_list = [f"{path}:{checksum_map[path]}" for path in sorted_paths]
|
||||||
|
|
||||||
|
return "\n".join(canonical_list)
|
||||||
|
|
||||||
|
|
||||||
@bundle_app.command("sign")
|
@bundle_app.command("sign")
|
||||||
def bundle_sign(
|
def bundle_sign(
|
||||||
bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True),
|
bundle_path: Path = typer.Argument(..., help="The .fbundle file to sign.", exists=True),
|
||||||
private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True)
|
private_key: Path = typer.Option(..., "--key", "-k", help="Path to the author's private signing key.", exists=True)
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Signs a bundle with an author's private key, adding checksums and a signature.
|
Signs a bundle with an author's private key.
|
||||||
|
|
||||||
|
This process calculates checksums for all source and binary files,
|
||||||
|
adds them to the manifest, and then signs a canonical list of these
|
||||||
|
checksums to ensure the integrity of the entire bundle.
|
||||||
"""
|
"""
|
||||||
print(f"Signing bundle: {bundle_path}")
|
print(f"Signing bundle: {bundle_path}")
|
||||||
staging_dir = Path("temp_sign_staging")
|
staging_dir = Path("temp_sign_staging")
|
||||||
@@ -42,57 +83,65 @@ def bundle_sign(
|
|||||||
print("Error: manifest.yaml not found in bundle.", file=sys.stderr)
|
print("Error: manifest.yaml not found in bundle.", file=sys.stderr)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
# 2. Load private key and derive public key to get fingerprint
|
# 2. Ensure PEM private key and derive public key fingerprint via openssl
|
||||||
with open(private_key, "rb") as key_file:
|
if private_key.suffix.lower() != ".pem":
|
||||||
priv_key_obj = serialization.load_ssh_private_key(key_file.read(), password=None)
|
typer.secho("Error: Private key must be a .pem file.", fg=typer.colors.RED)
|
||||||
|
raise typer.Exit(code=1)
|
||||||
pub_key_obj = priv_key_obj.public_key()
|
typer.echo(" - Deriving public key fingerprint via openssl...")
|
||||||
pub_key_bytes = pub_key_obj.public_bytes(
|
try:
|
||||||
encoding=serialization.Encoding.OpenSSH,
|
proc = subprocess.run(
|
||||||
format=serialization.PublicFormat.OpenSSH
|
["openssl", "pkey", "-in", str(private_key), "-pubout", "-outform", "DER"],
|
||||||
)
|
capture_output=True, check=True
|
||||||
fingerprint = "sha256:" + hashlib.sha256(pub_key_bytes).hexdigest()
|
)
|
||||||
print(f" - Signing with key fingerprint: {fingerprint}")
|
pub_der = proc.stdout
|
||||||
|
fingerprint = "sha256:" + hashlib.sha256(pub_der).hexdigest()
|
||||||
|
typer.echo(f" - Signing with key fingerprint: {fingerprint}")
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
typer.secho(f"Error extracting public key: {e.stderr.decode().strip()}", fg=typer.colors.RED)
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
# 3. Update manifest with checksums and fingerprint
|
# 3. Load manifest and generate the canonical checksum list
|
||||||
with open(manifest_path, 'r') as f:
|
with open(manifest_path, 'r') as f:
|
||||||
manifest = yaml.safe_load(f)
|
manifest = yaml.safe_load(f)
|
||||||
|
|
||||||
|
print(" - Calculating checksums for all source and binary files...")
|
||||||
|
# This function now also modifies the manifest in-place to add the checksums
|
||||||
|
data_to_sign = _create_canonical_checksum_list(staging_dir, manifest)
|
||||||
|
|
||||||
|
# Add the key fingerprint to the manifest
|
||||||
manifest['bundleAuthorKeyFingerprint'] = fingerprint
|
manifest['bundleAuthorKeyFingerprint'] = fingerprint
|
||||||
for plugin in manifest['bundlePlugins'].values():
|
|
||||||
for binary in plugin.get('binaries', []):
|
|
||||||
binary_path = staging_dir / binary['path']
|
|
||||||
if binary_path.exists():
|
|
||||||
binary['checksum'] = "sha256:" + calculate_sha256(binary_path)
|
|
||||||
else:
|
|
||||||
binary['checksum'] = "MISSING_FILE"
|
|
||||||
|
|
||||||
|
|
||||||
|
# 4. Write the updated manifest back to the staging directory
|
||||||
with open(manifest_path, 'w') as f:
|
with open(manifest_path, 'w') as f:
|
||||||
yaml.dump(manifest, f, sort_keys=False)
|
yaml.dump(manifest, f, sort_keys=False)
|
||||||
print(" - Added file checksums and key fingerprint to manifest.")
|
print(" - Added file checksums and key fingerprint to manifest.")
|
||||||
|
|
||||||
# 4. Sign the manifest
|
# 5. Sign the canonical checksum list
|
||||||
manifest_content = manifest_path.read_bytes()
|
typer.echo(" - Signing the canonical checksum list...")
|
||||||
|
canonical_temp_data_file = staging_dir / "canonical_checksums.txt"
|
||||||
if isinstance(priv_key_obj, ed25519.Ed25519PrivateKey):
|
canonical_temp_data_file.write_text(data_to_sign, encoding='utf-8')
|
||||||
signature = priv_key_obj.sign(manifest_content)
|
sig_path = staging_dir / "manifest.sig"
|
||||||
elif isinstance(priv_key_obj, rsa.RSAPrivateKey):
|
try:
|
||||||
signature = priv_key_obj.sign(
|
# We sign the string data directly, not the manifest file
|
||||||
manifest_content,
|
cmd_list = [
|
||||||
padding.PKCS1v15(),
|
"openssl",
|
||||||
hashes.SHA256()
|
"pkeyutl",
|
||||||
|
"-sign",
|
||||||
|
"-in", str(canonical_temp_data_file),
|
||||||
|
"-inkey", str(private_key),
|
||||||
|
"-out", str(sig_path)
|
||||||
|
]
|
||||||
|
subprocess.run(
|
||||||
|
cmd_list,
|
||||||
|
check=True,
|
||||||
|
capture_output=True
|
||||||
)
|
)
|
||||||
else:
|
typer.echo(f" - Created manifest.sig (> $ {' '.join(cmd_list)} ")
|
||||||
print("Error: Unsupported private key type for signing.", file=sys.stderr)
|
except subprocess.CalledProcessError as e:
|
||||||
|
typer.secho(f"Error signing manifest: {e.stderr.decode().strip()}", fg=typer.colors.RED)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
|
# 6. Repackage the bundle
|
||||||
sig_path = staging_dir / "manifest.sig"
|
|
||||||
sig_path.write_bytes(signature)
|
|
||||||
print(" - Created manifest.sig.")
|
|
||||||
|
|
||||||
# 5. Repackage the bundle
|
|
||||||
with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip:
|
with zipfile.ZipFile(bundle_path, 'w', zipfile.ZIP_DEFLATED) as bundle_zip:
|
||||||
for file_path in staging_dir.rglob('*'):
|
for file_path in staging_dir.rglob('*'):
|
||||||
if file_path.is_file():
|
if file_path.is_file():
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import hashlib
|
|||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.panel import Panel
|
from rich.panel import Panel
|
||||||
from rich.text import Text
|
from rich.text import Text
|
||||||
|
from rich.table import Table
|
||||||
|
|
||||||
console = Console()
|
console = Console()
|
||||||
|
|
||||||
@@ -20,14 +21,18 @@ def _calculate_sha256(file_path: Path) -> str:
|
|||||||
sha256_hash.update(byte_block)
|
sha256_hash.update(byte_block)
|
||||||
return sha256_hash.hexdigest()
|
return sha256_hash.hexdigest()
|
||||||
|
|
||||||
def _validate_bundle_directory(path: Path, is_temp: bool = False):
|
def _validate_bundle_directory(path: Path, is_temp: bool = False, display_name: str = None):
|
||||||
"""Validates a directory that is structured like an unpacked bundle."""
|
"""Validates a directory that is structured like an unpacked bundle."""
|
||||||
title = "Validating Pre-Bundle Directory" if not is_temp else "Validating Bundle Contents"
|
title = "Validating Pre-Bundle Directory" if not is_temp else "Validating Bundle Contents"
|
||||||
console.print(Panel(f"{title}: [bold]{path.name}[/bold]", border_style="blue"))
|
name = display_name or path.name
|
||||||
|
console.print(Panel(f"{title}: [bold]{name}[/bold]", border_style="blue"))
|
||||||
|
|
||||||
errors = 0
|
errors = 0
|
||||||
warnings = 0
|
warnings = 0
|
||||||
|
|
||||||
|
# Section 1: Manifest file check
|
||||||
|
console.print(Panel("1. Manifest File Check", border_style="cyan"))
|
||||||
|
|
||||||
def check(condition, success_msg, error_msg, is_warning=False):
|
def check(condition, success_msg, error_msg, is_warning=False):
|
||||||
nonlocal errors, warnings
|
nonlocal errors, warnings
|
||||||
if condition:
|
if condition:
|
||||||
@@ -55,6 +60,7 @@ def _validate_bundle_directory(path: Path, is_temp: bool = False):
|
|||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
# 2. Check manifest content
|
# 2. Check manifest content
|
||||||
|
console.print(Panel("2. Manifest Content Validation", border_style="cyan"))
|
||||||
check(manifest is not None, "Manifest is not empty.", "Manifest file is empty.", is_warning=True)
|
check(manifest is not None, "Manifest is not empty.", "Manifest file is empty.", is_warning=True)
|
||||||
check('bundleName' in manifest, "Manifest contains 'bundleName'.", "Manifest is missing 'bundleName'.")
|
check('bundleName' in manifest, "Manifest contains 'bundleName'.", "Manifest is missing 'bundleName'.")
|
||||||
check('bundleVersion' in manifest, "Manifest contains 'bundleVersion'.", "Manifest is missing 'bundleVersion'.")
|
check('bundleVersion' in manifest, "Manifest contains 'bundleVersion'.", "Manifest is missing 'bundleVersion'.")
|
||||||
@@ -62,9 +68,25 @@ def _validate_bundle_directory(path: Path, is_temp: bool = False):
|
|||||||
plugins = manifest.get('bundlePlugins', {})
|
plugins = manifest.get('bundlePlugins', {})
|
||||||
check(plugins, "Manifest contains 'bundlePlugins' section.", "Manifest is missing 'bundlePlugins' section.")
|
check(plugins, "Manifest contains 'bundlePlugins' section.", "Manifest is missing 'bundlePlugins' section.")
|
||||||
|
|
||||||
|
# Build Manifest Validation table
|
||||||
|
manifest_table = Table(title="Manifest Validation")
|
||||||
|
manifest_table.add_column("Check")
|
||||||
|
manifest_table.add_column("Status")
|
||||||
|
manifest_table.add_row("manifest.yaml exists", "✅" if manifest_file.is_file() else "❌")
|
||||||
|
# YAML parse status already captured by exception above
|
||||||
|
manifest_table.add_row("Manifest parses as YAML", "✅")
|
||||||
|
manifest_table.add_row("Manifest not empty", "✅" if manifest is not None else "⚠️")
|
||||||
|
manifest_table.add_row("bundleName present", "✅" if 'bundleName' in manifest else "❌")
|
||||||
|
manifest_table.add_row("bundleVersion present", "✅" if 'bundleVersion' in manifest else "❌")
|
||||||
|
has_plugins = bool(manifest.get('bundlePlugins'))
|
||||||
|
manifest_table.add_row("bundlePlugins section", "✅" if has_plugins else "❌")
|
||||||
|
console.print(manifest_table)
|
||||||
|
plugins = manifest.get('bundlePlugins', {})
|
||||||
|
|
||||||
# 3. Check files listed in manifest
|
# 3. Check files listed in manifest
|
||||||
|
console.print(Panel("3. Plugin Validation", border_style="magenta"))
|
||||||
for name, data in plugins.items():
|
for name, data in plugins.items():
|
||||||
console.print(f"\n--- Validating plugin: [bold cyan]{name}[/bold cyan] ---")
|
console.print(Panel(f"Plugin: [bold cyan]{name}[/bold cyan]", border_style="magenta"))
|
||||||
sdist_info = data.get('sdist', {})
|
sdist_info = data.get('sdist', {})
|
||||||
sdist_path_str = sdist_info.get('path')
|
sdist_path_str = sdist_info.get('path')
|
||||||
|
|
||||||
@@ -88,23 +110,69 @@ def _validate_bundle_directory(path: Path, is_temp: bool = False):
|
|||||||
f"Checksum mismatch for {bin_path_str}.\n Expected: {expected_checksum}\n Actual: {actual_checksum}"
|
f"Checksum mismatch for {bin_path_str}.\n Expected: {expected_checksum}\n Actual: {actual_checksum}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Build Plugin Validation table
|
||||||
|
plugin_table = Table(title="Plugin Validation")
|
||||||
|
plugin_table.add_column("Plugin")
|
||||||
|
plugin_table.add_column("Sdist Defined")
|
||||||
|
plugin_table.add_column("Sdist Exists")
|
||||||
|
plugin_table.add_column("Binaries OK")
|
||||||
|
plugin_table.add_column("Checksums OK")
|
||||||
|
for name, data in plugins.items():
|
||||||
|
# sdist checks
|
||||||
|
sdist_path_str = data.get('sdist', {}).get('path')
|
||||||
|
sdist_defined = bool(sdist_path_str)
|
||||||
|
sdist_exists = sdist_defined and (path/ sdist_path_str).exists()
|
||||||
|
# binary & checksum checks
|
||||||
|
binaries = data.get('binaries', [])
|
||||||
|
binaries_ok = all(b.get('path') and (path/ b['path']).exists() for b in binaries)
|
||||||
|
checksums_ok = all(('checksum' in b and ("sha256:"+_calculate_sha256(path/ b['path']))==b['checksum']) for b in binaries)
|
||||||
|
plugin_table.add_row(
|
||||||
|
name,
|
||||||
|
"✅" if sdist_defined else "❌",
|
||||||
|
"✅" if sdist_exists else "❌",
|
||||||
|
"✅" if binaries_ok else "❌",
|
||||||
|
"✅" if checksums_ok else "❌"
|
||||||
|
)
|
||||||
|
console.print(plugin_table)
|
||||||
|
|
||||||
# 4. Check for signature
|
# 4. Check for signature
|
||||||
|
console.print(Panel("4. Signature Check", border_style="yellow"))
|
||||||
check((path / "manifest.sig").exists(), "Signature file 'manifest.sig' found.", "Signature file 'manifest.sig' is missing.", is_warning=True)
|
check((path / "manifest.sig").exists(), "Signature file 'manifest.sig' found.", "Signature file 'manifest.sig' is missing.", is_warning=True)
|
||||||
|
|
||||||
|
# Build Signature Check table
|
||||||
|
sig_table = Table(title="Signature Validation")
|
||||||
|
sig_table.add_column("Item")
|
||||||
|
sig_table.add_column("Status")
|
||||||
|
sig_exists = (path / "manifest.sig").exists()
|
||||||
|
sig_table.add_row(
|
||||||
|
"manifest.sig",
|
||||||
|
"✅" if sig_exists else "⚠️"
|
||||||
|
)
|
||||||
|
console.print(sig_table)
|
||||||
|
|
||||||
# Final summary
|
# Final summary
|
||||||
console.print("-" * 40)
|
console.print("-" * 40)
|
||||||
|
# Display summary in a table
|
||||||
|
|
||||||
|
summary_table = Table(title="Validation Summary")
|
||||||
|
summary_table.add_column("Result")
|
||||||
|
summary_table.add_column("Errors", justify="right")
|
||||||
|
summary_table.add_column("Warnings", justify="right")
|
||||||
|
|
||||||
if errors == 0:
|
if errors == 0:
|
||||||
console.print(Panel(
|
result = "Passed"
|
||||||
f"[bold green]Validation Passed[/bold green]\nWarnings: {warnings}",
|
style = "green"
|
||||||
title="Result",
|
|
||||||
border_style="green"
|
|
||||||
))
|
|
||||||
else:
|
else:
|
||||||
console.print(Panel(
|
result = "Failed"
|
||||||
f"[bold red]Validation Failed[/bold red]\nErrors: {errors}\nWarnings: {warnings}",
|
style = "red"
|
||||||
title="Result",
|
|
||||||
border_style="red"
|
summary_table.add_row(
|
||||||
))
|
f"[bold {style}]{result}[/bold {style}]",
|
||||||
|
str(errors),
|
||||||
|
str(warnings)
|
||||||
|
)
|
||||||
|
console.print(summary_table)
|
||||||
|
if errors != 0:
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
def _validate_bundle_file(bundle_path: Path):
|
def _validate_bundle_file(bundle_path: Path):
|
||||||
@@ -114,7 +182,7 @@ def _validate_bundle_file(bundle_path: Path):
|
|||||||
try:
|
try:
|
||||||
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
with zipfile.ZipFile(bundle_path, 'r') as bundle_zip:
|
||||||
bundle_zip.extractall(temp_dir)
|
bundle_zip.extractall(temp_dir)
|
||||||
_validate_bundle_directory(temp_dir, is_temp=True)
|
_validate_bundle_directory(temp_dir, is_temp=True, display_name=bundle_path.name)
|
||||||
except zipfile.BadZipFile:
|
except zipfile.BadZipFile:
|
||||||
console.print(Panel(f"[red]Error: '{bundle_path.name}' is not a valid zip file.[/red]", title="Validation Error"))
|
console.print(Panel(f"[red]Error: '{bundle_path.name}' is not a valid zip file.[/red]", title="Validation Error"))
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|||||||
@@ -33,12 +33,13 @@ def get_template_content(template_name: str) -> str:
|
|||||||
print(f"Error: Template file '{template_name}' not found.", file=sys.stderr)
|
print(f"Error: Template file '{template_name}' not found.", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def run_command(command: list[str], cwd: Path = None, check=True, display_output: bool = False):
|
def run_command(command: list[str], cwd: Path = None, check=True, display_output: bool = False, env: dict = None):
|
||||||
"""Runs a command, optionally displaying its output in a formatted box."""
|
"""Runs a command, optionally displaying its output and using a custom environment."""
|
||||||
command_str = ' '.join(command)
|
command_str = ' '.join(command)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd)
|
# Pass the env dictionary to subprocess.run
|
||||||
|
result = subprocess.run(command, check=check, capture_output=True, text=True, cwd=cwd, env=env)
|
||||||
|
|
||||||
if display_output and (result.stdout or result.stderr):
|
if display_output and (result.stdout or result.stderr):
|
||||||
output_text = ""
|
output_text = ""
|
||||||
@@ -72,7 +73,7 @@ def run_command(command: list[str], cwd: Path = None, check=True, display_output
|
|||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
return e
|
return e
|
||||||
|
|
||||||
def _detect_and_cache_abi(cross_file: Path = None):
|
def _detect_and_cache_abi():
|
||||||
"""
|
"""
|
||||||
Compiles and runs a C++ program to detect the compiler ABI, then caches it.
|
Compiles and runs a C++ program to detect the compiler ABI, then caches it.
|
||||||
"""
|
"""
|
||||||
@@ -103,11 +104,23 @@ def _detect_and_cache_abi(cross_file: Path = None):
|
|||||||
|
|
||||||
compiler = abi_details.get('compiler', 'unk_compiler')
|
compiler = abi_details.get('compiler', 'unk_compiler')
|
||||||
stdlib = abi_details.get('stdlib', 'unk_stdlib')
|
stdlib = abi_details.get('stdlib', 'unk_stdlib')
|
||||||
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
|
||||||
|
# --- MODIFIED LOGIC FOR MACOS VERSIONING ---
|
||||||
|
# On macOS, the OS version is more useful than the internal libc++ version.
|
||||||
|
# But for the generic host detection, we still use the detected version.
|
||||||
|
# The targeting logic will override this.
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
# The C++ detector provides the internal _LIBCPP_VERSION
|
||||||
|
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
||||||
|
detected_os = "macos"
|
||||||
|
else:
|
||||||
|
# On Linux, this will be the glibc version
|
||||||
|
stdlib_version = abi_details.get('stdlib_version', 'unk_stdlib_version')
|
||||||
|
detected_os = abi_details.get("os", "linux")
|
||||||
|
|
||||||
abi = abi_details.get('abi', 'unk_abi')
|
abi = abi_details.get('abi', 'unk_abi')
|
||||||
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
abi_string = f"{compiler}-{stdlib}-{stdlib_version}-{abi}"
|
||||||
|
|
||||||
detected_os = abi_details.get("os", "unknown_os")
|
|
||||||
arch = platform.machine()
|
arch = platform.machine()
|
||||||
|
|
||||||
platform_identifier = {
|
platform_identifier = {
|
||||||
@@ -140,6 +153,39 @@ def get_platform_identifier() -> dict:
|
|||||||
else:
|
else:
|
||||||
return _detect_and_cache_abi()
|
return _detect_and_cache_abi()
|
||||||
|
|
||||||
|
def get_macos_targeted_platform_identifier(target_version: str) -> dict:
|
||||||
|
"""
|
||||||
|
Generates a platform identifier for a specific target macOS version.
|
||||||
|
This bypasses host detection for the version string.
|
||||||
|
"""
|
||||||
|
# We still need the host's compiler info, so we run detection if not cached.
|
||||||
|
host_platform = get_platform_identifier()
|
||||||
|
host_details = host_platform['details']
|
||||||
|
|
||||||
|
compiler = host_details.get('compiler', 'clang')
|
||||||
|
stdlib = host_details.get('stdlib', 'libc++')
|
||||||
|
abi = host_details.get('abi', 'libc++_abi')
|
||||||
|
arch = platform.machine()
|
||||||
|
|
||||||
|
abi_string = f"{compiler}-{stdlib}-{target_version}-{abi}"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"triplet": f"{arch}-macos",
|
||||||
|
"abi_signature": abi_string,
|
||||||
|
"details": {
|
||||||
|
"os": "macos",
|
||||||
|
"compiler": compiler,
|
||||||
|
"compiler_version": host_details.get('compiler_version'),
|
||||||
|
"stdlib": stdlib,
|
||||||
|
"stdlib_version": target_version, # The key change is here
|
||||||
|
"abi": abi,
|
||||||
|
},
|
||||||
|
"is_native": True,
|
||||||
|
"cross_file": None,
|
||||||
|
"docker_image": None,
|
||||||
|
"arch": arch
|
||||||
|
}
|
||||||
|
|
||||||
def get_available_build_targets() -> list:
|
def get_available_build_targets() -> list:
|
||||||
"""Gets native, cross-compilation, and Docker build targets."""
|
"""Gets native, cross-compilation, and Docker build targets."""
|
||||||
targets = [get_platform_identifier()]
|
targets = [get_platform_identifier()]
|
||||||
|
|||||||
@@ -3,33 +3,58 @@
|
|||||||
import typer
|
import typer
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from fourdst.cli.common.utils import run_command
|
from cryptography.hazmat.primitives.asymmetric import ed25519, rsa
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
|
||||||
keys_app = typer.Typer()
|
keys_app = typer.Typer()
|
||||||
|
|
||||||
@keys_app.command("generate")
|
@keys_app.command("generate")
|
||||||
def keys_generate(
|
def keys_generate(
|
||||||
key_name: str = typer.Option("author_key", "--name", "-n", help="The base name for the generated key files.")
|
key_name: str = typer.Option("author_key", "--name", "-n", help="The base name for the generated key files."),
|
||||||
|
key_type: str = typer.Option("ed25519", "--type", "-t", help="Type of key to generate (ed25519|rsa).", case_sensitive=False)
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Generates a new Ed25519 key pair for signing bundles.
|
Generates a new Ed25519 or RSA key pair for signing bundles.
|
||||||
"""
|
"""
|
||||||
private_key_path = Path(f"{key_name}")
|
# Define PEM-formatted key file paths
|
||||||
public_key_path = Path(f"{key_name}.pub")
|
private_key_path = Path(f"{key_name}.pem")
|
||||||
|
public_key_path = Path(f"{key_name}.pub.pem")
|
||||||
|
|
||||||
if private_key_path.exists() or public_key_path.exists():
|
if private_key_path.exists() or public_key_path.exists():
|
||||||
print(f"Error: Key files '{private_key_path}' or '{public_key_path}' already exist.", file=sys.stderr)
|
print(f"Error: Key files '{private_key_path}' or '{public_key_path}' already exist.", file=sys.stderr)
|
||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
print("Generating Ed25519 key pair...")
|
# Generate key based on requested type
|
||||||
run_command([
|
if key_type.lower() == "ed25519":
|
||||||
"ssh-keygen",
|
typer.echo("Generating Ed25519 key pair in PEM format via cryptography...")
|
||||||
"-t", "ed25519",
|
private_key_obj = ed25519.Ed25519PrivateKey.generate()
|
||||||
"-f", str(private_key_path),
|
elif key_type.lower() == "rsa":
|
||||||
"-N", "", # No passphrase
|
typer.echo("Generating RSA-2048 key pair in PEM format via cryptography...")
|
||||||
"-C", "fourdst bundle signing key"
|
private_key_obj = rsa.generate_private_key(public_exponent=65537, key_size=2048)
|
||||||
])
|
else:
|
||||||
print("\n✅ Keys generated successfully!")
|
typer.secho(f"Unsupported key type: {key_type}", fg=typer.colors.RED)
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
# Serialize private key to PEM
|
||||||
|
priv_pem = private_key_obj.private_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PrivateFormat.PKCS8,
|
||||||
|
encryption_algorithm=serialization.NoEncryption()
|
||||||
|
)
|
||||||
|
private_key_path.write_bytes(priv_pem)
|
||||||
|
# Derive and serialize public key to PEM
|
||||||
|
public_key_obj = private_key_obj.public_key()
|
||||||
|
pub_pem = public_key_obj.public_bytes(
|
||||||
|
encoding=serialization.Encoding.PEM,
|
||||||
|
format=serialization.PublicFormat.SubjectPublicKeyInfo
|
||||||
|
)
|
||||||
|
public_key_path.write_bytes(pub_pem)
|
||||||
|
# Also write OpenSSH-compatible public key
|
||||||
|
openssh_pub = public_key_obj.public_bytes(
|
||||||
|
encoding=serialization.Encoding.OpenSSH,
|
||||||
|
format=serialization.PublicFormat.OpenSSH
|
||||||
|
)
|
||||||
|
Path(f"{key_name}.pub").write_bytes(openssh_pub)
|
||||||
|
print("\n✅ PEM and OpenSSH-compatible keys generated successfully!")
|
||||||
print(f" -> Private Key (KEEP SECRET): {private_key_path.resolve()}")
|
print(f" -> Private Key (KEEP SECRET): {private_key_path.resolve()}")
|
||||||
print(f" -> Public Key (SHARE): {public_key_path.resolve()}")
|
print(f" -> Public Key (SHARE): {public_key_path.resolve()}")
|
||||||
print("\nShare the public key with users who need to trust your bundles.")
|
print("\nShare the public key with users who need to trust your bundles.")
|
||||||
|
|||||||
@@ -130,6 +130,8 @@ def parse_cpp_header(header_path: Path):
|
|||||||
if not cindex.Config.loaded:
|
if not cindex.Config.loaded:
|
||||||
try:
|
try:
|
||||||
# Attempt to find libclang automatically. This may need to be configured by the user.
|
# Attempt to find libclang automatically. This may need to be configured by the user.
|
||||||
|
# On systems like macOS, you might need to point to the specific version, e.g.:
|
||||||
|
# cindex.Config.set_library_path('/opt/homebrew/opt/llvm/lib')
|
||||||
cindex.Config.set_library_file(cindex.conf.get_filename())
|
cindex.Config.set_library_file(cindex.conf.get_filename())
|
||||||
except cindex.LibclangError as e:
|
except cindex.LibclangError as e:
|
||||||
print(f"Error: libclang library not found. Please ensure it's installed and in your system's path.", file=sys.stderr)
|
print(f"Error: libclang library not found. Please ensure it's installed and in your system's path.", file=sys.stderr)
|
||||||
@@ -137,25 +139,53 @@ def parse_cpp_header(header_path: Path):
|
|||||||
raise typer.Exit(code=1)
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
index = cindex.Index.create()
|
index = cindex.Index.create()
|
||||||
translation_unit = index.parse(str(header_path))
|
# Pass standard C++ arguments to the parser. This improves reliability.
|
||||||
|
args = ['-x', 'c++', '-std=c++17']
|
||||||
|
translation_unit = index.parse(str(header_path), args=args)
|
||||||
|
|
||||||
|
if not translation_unit:
|
||||||
|
print(f"Error: Unable to parse the translation unit {header_path}", file=sys.stderr)
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
interfaces = {}
|
interfaces = {}
|
||||||
|
|
||||||
for node in translation_unit.cursor.get_children():
|
# --- Recursive function to walk the AST ---
|
||||||
if node.kind == cindex.CursorKind.CLASS_DECL and node.is_pure_virtual():
|
def walk_ast(node):
|
||||||
# Found a class with pure virtual methods, likely an interface
|
# We are looking for class definitions, not just declarations.
|
||||||
interface_name = node.spelling
|
if node.kind == cindex.CursorKind.CLASS_DECL and node.is_definition():
|
||||||
print(f"Found interface: {interface_name}")
|
# Collect pure virtual methods within this class
|
||||||
|
pv_methods = [m for m in node.get_children()
|
||||||
|
if m.kind == cindex.CursorKind.CXX_METHOD and m.is_pure_virtual_method()]
|
||||||
|
|
||||||
|
# If it has pure virtual methods, it's an interface we care about
|
||||||
|
if pv_methods:
|
||||||
|
interface_name = node.spelling
|
||||||
|
methods = []
|
||||||
|
print(f"Found interface: '{interface_name}'")
|
||||||
|
for method in pv_methods:
|
||||||
|
# Get the string representation of all argument types
|
||||||
|
args_str = ', '.join([arg.type.spelling for arg in method.get_arguments()])
|
||||||
|
|
||||||
|
# Reconstruct the signature from its parts. This is much more reliable.
|
||||||
|
sig = f"{method.result_type.spelling} {method.spelling}({args_str})"
|
||||||
|
|
||||||
|
# Append 'const' if the method is a const method
|
||||||
|
if method.is_const_method():
|
||||||
|
sig += " const"
|
||||||
|
|
||||||
methods = []
|
methods.append({"signature": sig, "body": " // TODO: Implement this method"})
|
||||||
for method in node.get_children():
|
print(f" -> Found pure virtual method: {sig}")
|
||||||
if method.kind == cindex.CursorKind.CXX_METHOD and method.is_pure_virtual():
|
|
||||||
# Only consider pure virtual methods
|
interfaces[interface_name] = methods
|
||||||
method_signature = f"{method.return_type.spelling} {method.spelling}({', '.join([arg.type.spelling for arg in method.get_arguments()])})"
|
|
||||||
method_body = "// TODO: Implement this method"
|
interfaces[interface_name] = methods
|
||||||
methods.append({"signature": method_signature, "body": method_body})
|
|
||||||
print(f" Found pure virtual method: {method_signature}")
|
|
||||||
|
|
||||||
interfaces[interface_name] = methods
|
# --- The recursive step ---
|
||||||
|
# Recurse for children of this node
|
||||||
|
for child in node.get_children():
|
||||||
|
walk_ast(child)
|
||||||
|
|
||||||
return interfaces
|
# Start the traversal from the root of the AST
|
||||||
|
walk_ast(translation_unit.cursor)
|
||||||
|
|
||||||
|
return interfaces
|
||||||
@@ -26,7 +26,8 @@ dependencies = [
|
|||||||
"questionary",
|
"questionary",
|
||||||
"rich",
|
"rich",
|
||||||
"pyyaml",
|
"pyyaml",
|
||||||
"cryptography"
|
"cryptography",
|
||||||
|
"pyOpenSSL"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
|
|||||||
Reference in New Issue
Block a user