mirror of
https://github.com/gumyr/build123d.git
synced 2026-04-27 07:11:05 -07:00
Add build modules and update repo configs
This commit is contained in:
parent
a0a24c3eba
commit
5c0046bf37
4 changed files with 436 additions and 2 deletions
7
.gitignore
vendored
7
.gitignore
vendored
|
|
@ -16,12 +16,15 @@ docs/_build/
|
|||
# User generated debris
|
||||
*.swp
|
||||
*.log
|
||||
*.3mf
|
||||
*.glb
|
||||
*.step
|
||||
*.STEP
|
||||
*.stl
|
||||
*.svg
|
||||
*.dxf
|
||||
*.3mf
|
||||
*.png
|
||||
*.jpg
|
||||
*.svg
|
||||
|
||||
#mypy cache
|
||||
.mypy_cache
|
||||
|
|
|
|||
265
docs/build_artifacts.py
Normal file
265
docs/build_artifacts.py
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
import argparse
|
||||
import contextlib
|
||||
import importlib
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
import shutil
|
||||
|
||||
from pathlib import Path
|
||||
from collections.abc import Iterable
|
||||
|
||||
from tcv_screenshots import get_saved_models
|
||||
from process_image import batch_screenshots, batch_thumbnails
|
||||
|
||||
|
||||
DOCS_ROOT = Path(__file__).parent
|
||||
ARTIFACT_FOLDER = "_build/assets"
|
||||
ASSET_CONFIG_NAME = "asset_config"
|
||||
DEFAULT_MODEL_CONFIG = {
|
||||
"cadWidth": 1000,
|
||||
"height": 1000,
|
||||
}
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def add_to_syspath(paths: Iterable[Path]):
|
||||
"""Temporarily append paths to sys.path"""
|
||||
paths = [str(p) for p in paths]
|
||||
old_sys_path = sys.path.copy()
|
||||
sys.path.extend(paths)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
sys.path[:] = old_sys_path
|
||||
|
||||
|
||||
def hash_folders(folders: Iterable[Path]) -> str:
|
||||
"""Compute hash of list of folder's contents"""
|
||||
h = hashlib.sha256()
|
||||
for folder in sorted(Path(f).resolve() for f in folders):
|
||||
for p in sorted(folder.rglob("*")):
|
||||
if p.is_dir() or p.name == ".asset-stamp":
|
||||
continue
|
||||
h.update(p.read_bytes())
|
||||
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def build_artifacts(folder: Path, *, force=False):
|
||||
"""Generate and copy build artifacts as defined by a folder's asset config
|
||||
|
||||
The config is imported if it exists and sources are added. Sources are checked for changes by
|
||||
hash and skipped if no changes.
|
||||
The artifact destination is set to the cwd as a destination for in process artifact generation
|
||||
and sources are temporarily added to path.
|
||||
|
||||
If the config has `save_models`, that method is run to add screenshot models to global list.
|
||||
Likewise, all `to_generate` items are imported to add any screenshot models to global list and
|
||||
generate any assets to artifact destination. These imports are expected to run all required asset
|
||||
creation outside of methods and class definitions.
|
||||
"""
|
||||
sources = {folder}
|
||||
destination = DOCS_ROOT / ARTIFACT_FOLDER / folder.name
|
||||
config_path = folder / (ASSET_CONFIG_NAME + ".json")
|
||||
empty_config = {
|
||||
"sources": [],
|
||||
"build": [],
|
||||
"thumbnails": [],
|
||||
"exceptions": []
|
||||
}
|
||||
|
||||
if config_path.exists():
|
||||
if not destination.exists():
|
||||
destination.mkdir()
|
||||
|
||||
with contextlib.chdir(destination):
|
||||
# Import asset config
|
||||
with open(config_path, "r", encoding="utf-8") as f:
|
||||
config = json.load(f)
|
||||
config = {**empty_config, **config}
|
||||
|
||||
for source in config["sources"]:
|
||||
sources.add((DOCS_ROOT / source).resolve())
|
||||
|
||||
# Check for changes to sources
|
||||
new_hash = hash_folders(sources)
|
||||
stamp = destination / ".asset-stamp"
|
||||
if stamp.exists() and not force:
|
||||
old = json.loads(stamp.read_text())
|
||||
if old["input_hash"] == new_hash:
|
||||
return
|
||||
|
||||
# Copy assets not found in static
|
||||
copy_assets(sources - {folder}, destination)
|
||||
|
||||
with add_to_syspath(sources):
|
||||
# Save models and generate artifacts
|
||||
for module in config["build"]:
|
||||
importlib.import_module(module)
|
||||
|
||||
if saved_models := get_saved_models():
|
||||
saved_models = [
|
||||
(obj, label, {**DEFAULT_MODEL_CONFIG, **model_config})
|
||||
for obj, label, model_config in saved_models
|
||||
]
|
||||
generate_screenshots(saved_models, destination, config["exceptions"], config["thumbnails"])
|
||||
|
||||
# Check contents of _static and write stamp
|
||||
if any(destination.iterdir()):
|
||||
stamp.write_text(json.dumps({"input_hash": new_hash,}))
|
||||
|
||||
# else:
|
||||
# # Copy assets to artifact destination from folders without config
|
||||
# copy_assets(sources, destination)
|
||||
|
||||
|
||||
def iter_assets(sources: Iterable[Path], exts: set[str]):
|
||||
"""Find all assets in extensions list in sources"""
|
||||
exts = {e.lower().lstrip(".") for e in exts}
|
||||
|
||||
for source in sources:
|
||||
source = Path(source)
|
||||
for p in source.rglob("*"):
|
||||
if p.is_file() and p.suffix.lower().lstrip(".") in exts:
|
||||
yield p
|
||||
|
||||
|
||||
def copy_assets(sources: Iterable[Path], destination: Path):
|
||||
"""Copy all assets to artifact destination"""
|
||||
extensions = {"3mf", "brep", "dxf", "glb", "jpg", "png", "step", "stl", "svg"}
|
||||
|
||||
destination = Path(destination)
|
||||
destination.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for artifact in iter_assets(sources, extensions):
|
||||
target = destination / artifact.name
|
||||
shutil.copy2(artifact, target)
|
||||
|
||||
|
||||
def generate_screenshots(models: list[tuple], destination: Path, exceptions: dict | None, thumbnails: list | None):
|
||||
"""Generate screenshots and batch resize/thumbnail creation"""
|
||||
screenshots_run(
|
||||
models=screenshots_process_examples(models),
|
||||
screenshots_dir=destination,
|
||||
)
|
||||
|
||||
batch_screenshots(destination, exceptions)
|
||||
if thumbnails:
|
||||
batch_thumbnails(destination, thumbnails)
|
||||
|
||||
|
||||
def screenshots_process_examples(
|
||||
models_to_process: list[tuple],
|
||||
) -> list[tuple[str, dict]]:
|
||||
"""Slimmed version of process_examples
|
||||
|
||||
Args:
|
||||
models_to_process: Saved model tuples
|
||||
|
||||
Returns:
|
||||
List of (name, data) tuples where data is {model, config}
|
||||
"""
|
||||
# Import ocp_tessellate once (heavy import)
|
||||
from ocp_tessellate.convert import export_three_cad_viewer_js
|
||||
from tcv_screenshots.render import DEFAULT_CONFIG as TCV_DEFAULT_CONFIG
|
||||
|
||||
processed_models = []
|
||||
|
||||
for cad_object, output_name, example_config in models_to_process:
|
||||
# Merge defaults with example overrides
|
||||
config = {**TCV_DEFAULT_CONFIG, **(example_config or {})}
|
||||
|
||||
# Export model to JSON string
|
||||
model_json = export_three_cad_viewer_js(None, cad_object)
|
||||
model_data = json.loads(model_json)
|
||||
|
||||
# Create combined data with model and config
|
||||
combined_data = {"model": model_data, "config": config}
|
||||
|
||||
processed_models.append((output_name, combined_data))
|
||||
|
||||
return processed_models
|
||||
|
||||
|
||||
def screenshots_run(
|
||||
models: list[tuple[str, dict]],
|
||||
screenshots_dir: Path,
|
||||
headless: bool = True,
|
||||
pause: bool = False,
|
||||
):
|
||||
"""Main entry point."""
|
||||
import asyncio
|
||||
from tcv_screenshots.render import render_models_to_screenshots
|
||||
|
||||
debug_models_dir = None
|
||||
if not models:
|
||||
print("No models to render")
|
||||
return
|
||||
|
||||
# Render models to screenshots
|
||||
print("\n=== Rendering models to screenshots ===")
|
||||
fail_count = asyncio.run(
|
||||
render_models_to_screenshots(
|
||||
models,
|
||||
screenshots_dir,
|
||||
headless=headless,
|
||||
pause=pause,
|
||||
debug=debug_models_dir is not None,
|
||||
)
|
||||
)
|
||||
if fail_count > 0:
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def batch_build_artifacts(root: str | Path, *, force: bool = False):
|
||||
root = Path(root).resolve()
|
||||
destination = DOCS_ROOT / ARTIFACT_FOLDER
|
||||
|
||||
if not destination.exists():
|
||||
destination.mkdir()
|
||||
|
||||
ignore = ["__pycache__"]
|
||||
folders = [p for p in root.rglob("*") if p.is_dir() and p.name not in ignore]
|
||||
if (root / (ASSET_CONFIG_NAME + ".py")).exists():
|
||||
folders.append(root)
|
||||
|
||||
for folder in folders:
|
||||
print("===== Processing " + folder.name + " =====")
|
||||
build_artifacts(folder, force=force)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="build_artifacts",
|
||||
description="Build screenshots, svgs, and other documentation assets from a directory and "
|
||||
"its subdirectories."
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-d", "--directory",
|
||||
type=Path,
|
||||
help="Directory to traverse for generating artifacts.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--clean",
|
||||
action="store_true",
|
||||
help="Clean (erase) artifact folder",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
action="store_true",
|
||||
help="Force (re)generation of artifacts despite source status",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
if args.clean:
|
||||
print(f"Removing everything under '{ARTIFACT_FOLDER}'...")
|
||||
artifact_folder = DOCS_ROOT / ARTIFACT_FOLDER
|
||||
if Path(artifact_folder).exists():
|
||||
shutil.rmtree(artifact_folder)
|
||||
|
||||
else:
|
||||
batch_build_artifacts(args.directory, force=args.force)
|
||||
165
docs/process_image.py
Normal file
165
docs/process_image.py
Normal file
|
|
@ -0,0 +1,165 @@
|
|||
from pathlib import Path
|
||||
from PIL import Image
|
||||
|
||||
|
||||
def crop_to_content(image: Image):
|
||||
"""Crop image to non-background content, assuming transparent background"""
|
||||
bbox = image.getbbox()
|
||||
if bbox:
|
||||
cropped = image.crop(bbox)
|
||||
else:
|
||||
raise RuntimeError("Image is entirely transparent.")
|
||||
|
||||
return cropped
|
||||
|
||||
|
||||
def resize_to_height(image: Image, new_height: int):
|
||||
"Resize image to new height."
|
||||
width, height = image.size
|
||||
if height <= new_height:
|
||||
new_height = height
|
||||
new_width = width
|
||||
else:
|
||||
height_ratio = new_height / height
|
||||
new_width = int(width * height_ratio)
|
||||
|
||||
return image.resize((new_width, new_height), Image.LANCZOS)
|
||||
|
||||
|
||||
def process_screenshot(
|
||||
filepath: str | Path,
|
||||
height: int = 300,
|
||||
margin: int = 0,
|
||||
background: float | tuple[float, ...] | str | None = (0, 0, 0, 0)
|
||||
):
|
||||
"""Crop screenshot to non-transparent objects, resize non-transparent objects,
|
||||
apply margin, update background. Saves to png.
|
||||
|
||||
Args:
|
||||
filepath (str): path to image to process
|
||||
new_height (int): final image height
|
||||
margin (int): image margin around objects
|
||||
background (float, tuple, str, None): RGBA color representation
|
||||
"""
|
||||
|
||||
filepath = Path(filepath)
|
||||
content_height = height - 2 * margin
|
||||
|
||||
with Image.open(filepath) as image:
|
||||
if image.mode != "RGBA":
|
||||
image = image.convert("RGBA")
|
||||
|
||||
cropped = crop_to_content(image)
|
||||
resized = resize_to_height(cropped, content_height)
|
||||
|
||||
# Apply margin and background change
|
||||
resize_width, resizeheight = resized.size
|
||||
width = resize_width + margin * 2
|
||||
height = resizeheight + margin * 2
|
||||
|
||||
x_offset = (width - resize_width) // 2
|
||||
y_offset = (height - resizeheight) // 2
|
||||
|
||||
expanded_image = Image.new("RGBA", (width, height), background)
|
||||
expanded_image.paste(resized, (x_offset, y_offset))
|
||||
|
||||
expanded_image.save(filepath)
|
||||
|
||||
|
||||
def make_thumbnail(
|
||||
filepath: str | Path,
|
||||
label: str | None = None,
|
||||
size: int = 250,
|
||||
crop: bool = False,
|
||||
push: str | None = None,
|
||||
shift: tuple[float] = (0, 0)
|
||||
):
|
||||
"""Make square thumbnail with given name and height. File saved as "thumb_{name}.png
|
||||
|
||||
Args:
|
||||
source (str): source image for thumbnail
|
||||
label (str): name to give thumbnail, no need to include "thumb" or file extension
|
||||
size (int): final image height
|
||||
crop (bool): crop width to fill height. False shrinks foreground height to fit width
|
||||
push (str): push foreground to edge ("top", "bottom, "left", "right")
|
||||
shift (tuple[float]): amount to shift image along x and y in pixels
|
||||
"""
|
||||
|
||||
filepath = Path(filepath)
|
||||
folder = filepath.parent
|
||||
|
||||
if not label:
|
||||
label = filepath.name
|
||||
|
||||
thumb_name = (
|
||||
"thumb_"
|
||||
+ Path(label).stem.replace("thumb_", "")
|
||||
+ ".png"
|
||||
)
|
||||
thumb_path = folder / thumb_name
|
||||
|
||||
with Image.open(filepath) as image:
|
||||
if image.mode != "RGBA":
|
||||
image = image.convert("RGBA")
|
||||
|
||||
cropped = crop_to_content(image)
|
||||
width, height = cropped.size
|
||||
resize_height = size * height // width if not crop and width > height else size
|
||||
resized = resize_to_height(cropped, resize_height)
|
||||
|
||||
# Crop image to thumbnail
|
||||
width, height = resized.size
|
||||
|
||||
shift_x, shift_y = shift
|
||||
x_offset = (size - width) // 2 + shift_x
|
||||
y_offset = (size - height) // 2 + shift_y
|
||||
|
||||
if push:
|
||||
if "left" in push:
|
||||
x_offset = 0
|
||||
elif "right" in push:
|
||||
x_offset = size - width
|
||||
if "top" in push:
|
||||
y_offset = 0
|
||||
elif "bottom" in push:
|
||||
y_offset = size - height
|
||||
|
||||
x_offset += shift_x
|
||||
y_offset += shift_y
|
||||
|
||||
thumb = Image.new("RGBA", (size, size))
|
||||
thumb.paste(resized, (x_offset, y_offset))
|
||||
|
||||
thumb.save(thumb_path)
|
||||
|
||||
|
||||
def batch_screenshots(folder: str | Path, exceptions: dict | None = None, height: int = 300, margin = 0, background: float | tuple[float, ...] | str | None = None):
|
||||
"""Batch process screenshots in folder.
|
||||
|
||||
exceptions is a dict with image paths as keys and dicts of "new_height" and "margin" parameters
|
||||
to override
|
||||
"""
|
||||
|
||||
folder = Path(folder)
|
||||
exceptions = exceptions or {}
|
||||
|
||||
for path in folder.glob("*.png"):
|
||||
if path in exceptions:
|
||||
process_screenshot(path, **exceptions[path])
|
||||
else:
|
||||
process_screenshot(path, height=height, margin=margin)
|
||||
|
||||
|
||||
def batch_thumbnails(folder: str | Path, to_thumbnail: list[dict], size: int = 150):
|
||||
"""Batch create thumbnails from list.
|
||||
|
||||
to_thumbnail is a list of dicts with required keys "source", "label" and optional
|
||||
"size" and "shift".
|
||||
"""
|
||||
folder = Path(folder)
|
||||
|
||||
for thumbnail in to_thumbnail:
|
||||
thumbnail.setdefault("size", size)
|
||||
thumbnail["filepath"] = folder / thumbnail["source"]
|
||||
thumbnail.pop("source")
|
||||
make_thumbnail(**thumbnail)
|
||||
|
|
@ -72,6 +72,7 @@ development = [
|
|||
"pytest-benchmark",
|
||||
"pytest-cov",
|
||||
"pytest-xdist",
|
||||
"tcv-screenshots==0.3.0",
|
||||
"wheel",
|
||||
]
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue