[gdsii] add some profiling helpers

This commit is contained in:
Jan Petykiewicz 2026-04-02 13:22:27 -07:00
commit 9df42000b7
4 changed files with 736 additions and 0 deletions

View file

@ -0,0 +1,5 @@
from masque.file.gdsii_perf import main
if __name__ == '__main__':
raise SystemExit(main())

View file

@ -0,0 +1,74 @@
from __future__ import annotations
import argparse
import importlib
import json
import time
from pathlib import Path
from masque import LibraryError
READERS = {
'gdsii': ('masque.file.gdsii', 'readfile'),
'gdsii_arrow': ('masque.file.gdsii_arrow', 'readfile'),
}
def _summarize(path: Path, elapsed_s: float, info: dict[str, object], lib: object) -> dict[str, object]:
assert hasattr(lib, '__len__')
assert hasattr(lib, 'tops')
tops = lib.tops() # type: ignore[no-any-return, attr-defined]
try:
unique_top = lib.top() # type: ignore[no-any-return, attr-defined]
except LibraryError:
unique_top = None
return {
'path': str(path),
'elapsed_s': elapsed_s,
'library_name': info['name'],
'cell_count': len(lib), # type: ignore[arg-type]
'topcells': tops,
'topcell': unique_top,
}
def build_arg_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description='Profile GDS readers with a stable end-to-end workload.')
parser.add_argument('--reader', choices=sorted(READERS), required=True)
parser.add_argument('--path', type=Path, required=True)
parser.add_argument('--warmup', type=int, default=1)
parser.add_argument('--repeat', type=int, default=1)
return parser
def main(argv: list[str] | None = None) -> int:
parser = build_arg_parser()
args = parser.parse_args(argv)
module_name, attr_name = READERS[args.reader]
readfile = getattr(importlib.import_module(module_name), attr_name)
path = args.path.expanduser().resolve()
for _ in range(args.warmup):
readfile(path)
runs = []
for _ in range(args.repeat):
start = time.perf_counter()
lib, info = readfile(path)
elapsed_s = time.perf_counter() - start
runs.append(_summarize(path, elapsed_s, info, lib))
print(json.dumps({
'reader': args.reader,
'warmup': args.warmup,
'repeat': args.repeat,
'runs': runs,
}, indent=2, sort_keys=True))
return 0
if __name__ == '__main__':
raise SystemExit(main())

633
masque/file/gdsii_perf.py Normal file
View file

@ -0,0 +1,633 @@
"""
Synthetic GDS fixture generation for reader/writer performance testing.
The presets here are intentionally hierarchical and deterministic. They aim to
approximate a pair of real-world layout families discussed during GDS reader and
writer work:
* `many_cells`: tens of thousands of cells, moderate reference count, very heavy
box usage after flattening, and moderate polygon density.
* `many_instances`: a much smaller cell library with very high reference count,
similar box density, and far fewer polygons.
Fixtures are written by streaming structures through `klamath` directly so large
benchmark files can be produced without first materializing an equally large
`masque.Library` in Python.
"""
from __future__ import annotations
from dataclasses import asdict, dataclass
from pathlib import Path
from typing import Any
import argparse
import json
import math
import numpy
import klamath
from klamath import elements
EMPTY_PROPERTIES: dict[int, bytes] = {}
METERS_PER_DB_UNIT = 1e-9
USER_UNITS_PER_DB_UNIT = 1e-3
TOTAL_LAYERS = 200
@dataclass(frozen=True)
class FixturePreset:
name: str
total_layers: int
box_layers: int
heavy_box_layers: int
polygon_layers: int
box_cells: int
poly_cells: int
box_wrappers: int
poly_wrappers: int
box_clusters: int
poly_clusters: int
box_cluster_refs: int
poly_cluster_refs: int
top_direct_box_refs: int
top_direct_poly_refs: int
heavy_boxes_per_cell: int
regular_boxes_per_cell: int
polygons_per_cell: int
path_stride: int
text_stride: int
box_cluster_array: tuple[int, int]
top_box_array: tuple[int, int]
poly_cluster_array: tuple[int, int]
top_poly_array: tuple[int, int]
rare_annotation_stride: int
PRESETS: dict[str, FixturePreset] = {
'many_cells': FixturePreset(
name='many_cells',
total_layers=TOTAL_LAYERS,
box_layers=20,
heavy_box_layers=3,
polygon_layers=20,
box_cells=17_000,
poly_cells=6_000,
box_wrappers=18_000,
poly_wrappers=6_000,
box_clusters=2_000,
poly_clusters=999,
box_cluster_refs=24,
poly_cluster_refs=16,
top_direct_box_refs=21_000,
top_direct_poly_refs=7_000,
heavy_boxes_per_cell=6,
regular_boxes_per_cell=2,
polygons_per_cell=50,
path_stride=2,
text_stride=3,
box_cluster_array=(24, 16),
top_box_array=(8, 8),
poly_cluster_array=(4, 2),
top_poly_array=(3, 2),
rare_annotation_stride=1_250,
),
'many_instances': FixturePreset(
name='many_instances',
total_layers=TOTAL_LAYERS,
box_layers=25,
heavy_box_layers=3,
polygon_layers=10,
box_cells=2_500,
poly_cells=500,
box_wrappers=1_000,
poly_wrappers=500,
box_clusters=1_000,
poly_clusters=499,
box_cluster_refs=1_200,
poly_cluster_refs=400,
top_direct_box_refs=102_001,
top_direct_poly_refs=0,
heavy_boxes_per_cell=40,
regular_boxes_per_cell=16,
polygons_per_cell=60,
path_stride=1,
text_stride=2,
box_cluster_array=(1, 1),
top_box_array=(1, 1),
poly_cluster_array=(1, 1),
top_poly_array=(1, 1),
rare_annotation_stride=250,
),
}
@dataclass(frozen=True)
class FixtureManifest:
preset: str
scale: float
gds_path: str
library_name: str
cells: int
refs: int
layers: int
box_layers: int
heavy_box_layers: list[list[int]]
polygon_layers: list[list[int]]
hierarchical_boxes_per_heavy_layer: int
hierarchical_boxes_per_regular_layer: int
hierarchical_polygons_total: int
hierarchical_paths_total: int
hierarchical_texts_total: int
flattened_box_placements: int
flattened_poly_placements: int
estimated_flat_boxes_per_heavy_layer: int
estimated_flat_polygons_per_active_polygon_layer: int
def _scaled_count(value: int, scale: float, minimum: int = 0) -> int:
if value == 0:
return 0
scaled = int(math.ceil(value * scale))
return max(minimum, scaled)
def _scaled_preset(preset: FixturePreset, scale: float) -> FixturePreset:
if scale <= 0:
raise ValueError(f'scale must be positive, got {scale!r}')
return FixturePreset(
name=preset.name,
total_layers=preset.total_layers,
box_layers=min(preset.box_layers, preset.total_layers),
heavy_box_layers=min(preset.heavy_box_layers, preset.box_layers),
polygon_layers=min(preset.polygon_layers, preset.total_layers),
box_cells=_scaled_count(preset.box_cells, scale, minimum=1),
poly_cells=_scaled_count(preset.poly_cells, scale, minimum=1),
box_wrappers=_scaled_count(preset.box_wrappers, scale),
poly_wrappers=_scaled_count(preset.poly_wrappers, scale),
box_clusters=_scaled_count(preset.box_clusters, scale, minimum=1),
poly_clusters=_scaled_count(preset.poly_clusters, scale, minimum=1),
box_cluster_refs=_scaled_count(preset.box_cluster_refs, scale, minimum=1),
poly_cluster_refs=_scaled_count(preset.poly_cluster_refs, scale, minimum=1),
top_direct_box_refs=_scaled_count(preset.top_direct_box_refs, scale),
top_direct_poly_refs=_scaled_count(preset.top_direct_poly_refs, scale),
heavy_boxes_per_cell=max(1, preset.heavy_boxes_per_cell),
regular_boxes_per_cell=max(1, preset.regular_boxes_per_cell),
polygons_per_cell=max(1, preset.polygons_per_cell),
path_stride=max(1, preset.path_stride),
text_stride=max(1, preset.text_stride),
box_cluster_array=preset.box_cluster_array,
top_box_array=preset.top_box_array,
poly_cluster_array=preset.poly_cluster_array,
top_poly_array=preset.top_poly_array,
rare_annotation_stride=max(1, _scaled_count(preset.rare_annotation_stride, scale, minimum=1)),
)
def _rect_xy(xmin: int, ymin: int, xmax: int, ymax: int) -> numpy.ndarray[Any, numpy.dtype[numpy.int32]]:
return numpy.array(
[[xmin, ymin], [xmin, ymax], [xmax, ymax], [xmax, ymin], [xmin, ymin]],
dtype=numpy.int32,
)
def _poly_xy(points: list[tuple[int, int]]) -> numpy.ndarray[Any, numpy.dtype[numpy.int32]]:
closed = points + [points[0]]
return numpy.array(closed, dtype=numpy.int32)
def _sref(
target: str,
xy: tuple[int, int],
properties: dict[int, bytes] | None = None,
) -> elements.Reference:
return klamath.library.Reference(
struct_name=target.encode('ASCII'),
invert_y=False,
mag=1.0,
angle_deg=0.0,
xy=numpy.array([xy], dtype=numpy.int32),
colrow=None,
properties=EMPTY_PROPERTIES if properties is None else properties,
)
def _aref(
target: str,
origin: tuple[int, int],
counts: tuple[int, int],
step: tuple[int, int],
properties: dict[int, bytes] | None = None,
) -> elements.Reference:
cols, rows = counts
dx, dy = step
xy = numpy.array(
[
origin,
(origin[0] + cols * dx, origin[1]),
(origin[0], origin[1] + rows * dy),
],
dtype=numpy.int32,
)
return klamath.library.Reference(
struct_name=target.encode('ASCII'),
invert_y=False,
mag=1.0,
angle_deg=0.0,
xy=xy,
colrow=(cols, rows),
properties=EMPTY_PROPERTIES if properties is None else properties,
)
def _annotation(index: int) -> dict[int, bytes]:
return {1: f'perf-{index}'.encode('ASCII')}
def _make_box_cell(name: str, index: int, cfg: FixturePreset) -> list[elements.Element]:
cell_elements: list[elements.Element] = []
xbase = (index % 17) * 600
ybase = (index // 17) * 180
for layer in range(cfg.heavy_box_layers):
for box_idx in range(cfg.heavy_boxes_per_cell):
x0 = xbase + box_idx * 22
y0 = ybase + layer * 40
width = 10 + ((index + box_idx + layer) % 7) * 6
height = 10 + ((index * 3 + box_idx + layer) % 5) * 8
properties = _annotation(index) if index % cfg.rare_annotation_stride == 0 and box_idx == 0 and layer == 0 else EMPTY_PROPERTIES
cell_elements.append(elements.Boundary(
layer=(layer, 0),
xy=_rect_xy(x0, y0, x0 + width, y0 + height),
properties=properties,
))
for layer in range(cfg.heavy_box_layers, cfg.box_layers):
for box_idx in range(cfg.regular_boxes_per_cell):
x0 = xbase + box_idx * 38
y0 = ybase + (layer - cfg.heavy_box_layers) * 28 + 400
width = 18 + ((index + layer + box_idx) % 9) * 4
height = 12 + ((index + 2 * layer + box_idx) % 6) * 5
cell_elements.append(elements.Boundary(
layer=(layer, 0),
xy=_rect_xy(x0, y0, x0 + width, y0 + height),
properties=EMPTY_PROPERTIES,
))
return cell_elements
def _make_poly_cell(name: str, index: int, cfg: FixturePreset) -> list[elements.Element]:
cell_elements: list[elements.Element] = []
xbase = (index % 19) * 900
ybase = (index // 19) * 260
for poly_idx in range(cfg.polygons_per_cell):
layer = poly_idx % cfg.polygon_layers
dx = xbase + (poly_idx % 5) * 120
dy = ybase + (poly_idx // 5) * 80
size = 18 + ((index + poly_idx + layer) % 11) * 7
points = [
(dx, dy),
(dx + size, dy + size // 5),
(dx + size + size // 3, dy + size),
(dx + size // 2, dy + size + size // 2),
(dx - size // 4, dy + size // 2),
]
properties = _annotation(index) if poly_idx == 0 and index % cfg.rare_annotation_stride == 0 else EMPTY_PROPERTIES
cell_elements.append(elements.Boundary(
layer=(layer, 0),
xy=_poly_xy(points),
properties=properties,
))
if index % cfg.path_stride == 0:
layer = index % cfg.polygon_layers
cell_elements.append(elements.Path(
layer=(layer, 1),
path_type=2,
width=12 + (index % 5) * 4,
extension=(0, 0),
xy=numpy.array(
[
[xbase, ybase + 900],
[xbase + 240, ybase + 930],
[xbase + 420, ybase + 960],
],
dtype=numpy.int32,
),
properties=EMPTY_PROPERTIES,
))
if index % cfg.text_stride == 0:
layer = index % cfg.polygon_layers
properties = _annotation(index) if index % cfg.rare_annotation_stride == 0 else EMPTY_PROPERTIES
cell_elements.append(elements.Text(
layer=(layer, 2),
presentation=0,
path_type=0,
width=0,
invert_y=False,
mag=1.0,
angle_deg=0.0,
xy=numpy.array([[xbase + 64, ybase + 1536]], dtype=numpy.int32),
string=f'T{index:05d}'.encode('ASCII'),
properties=properties,
))
return cell_elements
def _write_struct(stream: Any, name: str, cell_elements: list[elements.Element]) -> None:
klamath.library.write_struct(stream, name=name.encode('ASCII'), elements=cell_elements)
def _box_name(index: int) -> str:
return f'box_{index:05d}'
def _poly_name(index: int) -> str:
return f'poly_{index:05d}'
def _box_wrapper_name(index: int) -> str:
return f'box_wrap_{index:05d}'
def _poly_wrapper_name(index: int) -> str:
return f'poly_wrap_{index:05d}'
def _box_cluster_name(index: int) -> str:
return f'box_cluster_{index:05d}'
def _poly_cluster_name(index: int) -> str:
return f'poly_cluster_{index:05d}'
def _write_box_cells(stream: Any, cfg: FixturePreset) -> None:
for idx in range(cfg.box_cells):
_write_struct(stream, _box_name(idx), _make_box_cell(_box_name(idx), idx, cfg))
def _write_poly_cells(stream: Any, cfg: FixturePreset) -> None:
for idx in range(cfg.poly_cells):
_write_struct(stream, _poly_name(idx), _make_poly_cell(_poly_name(idx), idx, cfg))
def _write_wrappers(stream: Any, cfg: FixturePreset) -> None:
for idx in range(cfg.box_wrappers):
target = _box_name(idx % cfg.box_cells)
origin = ((idx % 97) * 2_000, (idx // 97) * 2_000)
_write_struct(stream, _box_wrapper_name(idx), [_sref(target, origin)])
for idx in range(cfg.poly_wrappers):
target = _poly_name(idx % cfg.poly_cells)
origin = ((idx % 61) * 3_200, (idx // 61) * 3_200)
_write_struct(stream, _poly_wrapper_name(idx), [_sref(target, origin)])
def _write_box_clusters(stream: Any, cfg: FixturePreset) -> None:
array_refs = min(cfg.box_cluster_refs, max(1, (3 * cfg.box_cluster_refs) // 4))
for idx in range(cfg.box_clusters):
cell_elements: list[elements.Element] = []
for ref_idx in range(cfg.box_cluster_refs):
target = _box_name((idx * cfg.box_cluster_refs + ref_idx) % cfg.box_cells)
origin = (
(ref_idx % 6) * 48_000,
(ref_idx // 6) * 48_000,
)
if ref_idx < array_refs:
cell_elements.append(_aref(target, origin, cfg.box_cluster_array, (720, 900)))
else:
cell_elements.append(_sref(target, origin))
_write_struct(stream, _box_cluster_name(idx), cell_elements)
def _write_poly_clusters(stream: Any, cfg: FixturePreset) -> None:
array_refs = min(cfg.poly_cluster_refs, cfg.poly_cluster_refs // 2)
for idx in range(cfg.poly_clusters):
cell_elements: list[elements.Element] = []
for ref_idx in range(cfg.poly_cluster_refs):
target = _poly_name((idx * cfg.poly_cluster_refs + ref_idx) % cfg.poly_cells)
origin = (
(ref_idx % 10) * 96_000,
(ref_idx // 10) * 96_000,
)
if ref_idx < array_refs:
cell_elements.append(_aref(target, origin, cfg.poly_cluster_array, (12_000, 8_500)))
else:
cell_elements.append(_sref(target, origin))
_write_struct(stream, _poly_cluster_name(idx), cell_elements)
def _top_box_refs(cfg: FixturePreset) -> list[elements.Reference]:
refs: list[elements.Reference] = []
for idx in range(cfg.box_wrappers):
refs.append(_sref(
_box_wrapper_name(idx),
((idx % 240) * 240_000, (idx // 240) * 240_000),
))
for idx in range(cfg.box_clusters):
refs.append(_sref(
_box_cluster_name(idx),
((idx % 100) * 800_000, (idx // 100) * 800_000 + 14_000_000),
))
for idx in range(cfg.top_direct_box_refs):
target = _box_name(idx % cfg.box_cells)
origin = (
(idx % 150) * 160_000,
(idx // 150) * 160_000 + 26_000_000,
)
if cfg.top_box_array == (1, 1):
refs.append(_sref(target, origin))
else:
refs.append(_aref(target, origin, cfg.top_box_array, (1_100, 1_350)))
return refs
def _top_poly_refs(cfg: FixturePreset) -> list[elements.Reference]:
refs: list[elements.Reference] = []
for idx in range(cfg.poly_wrappers):
refs.append(_sref(
_poly_wrapper_name(idx),
((idx % 180) * 360_000, (idx // 180) * 360_000 + 44_000_000),
))
for idx in range(cfg.poly_clusters):
refs.append(_sref(
_poly_cluster_name(idx),
((idx % 70) * 1_100_000, (idx // 70) * 1_100_000 + 58_000_000),
))
for idx in range(cfg.top_direct_poly_refs):
target = _poly_name(idx % cfg.poly_cells)
origin = (
(idx % 110) * 420_000,
(idx // 110) * 420_000 + 72_000_000,
)
if cfg.top_poly_array == (1, 1):
refs.append(_sref(target, origin))
else:
refs.append(_aref(target, origin, cfg.top_poly_array, (16_000, 14_000)))
return refs
def _write_top(stream: Any, cfg: FixturePreset) -> None:
cell_elements: list[elements.Element] = []
cell_elements.extend(_top_box_refs(cfg))
cell_elements.extend(_top_poly_refs(cfg))
_write_struct(stream, 'TOP', cell_elements)
def _poly_paths_total(cfg: FixturePreset) -> int:
return (cfg.poly_cells - 1) // cfg.path_stride + 1
def _poly_texts_total(cfg: FixturePreset) -> int:
return (cfg.poly_cells - 1) // cfg.text_stride + 1
def _ref_instances_per_box_cluster(cfg: FixturePreset) -> int:
array_refs = min(cfg.box_cluster_refs, max(1, (3 * cfg.box_cluster_refs) // 4))
array_mult = cfg.box_cluster_array[0] * cfg.box_cluster_array[1]
return array_refs * array_mult + (cfg.box_cluster_refs - array_refs)
def _ref_instances_per_poly_cluster(cfg: FixturePreset) -> int:
array_refs = min(cfg.poly_cluster_refs, cfg.poly_cluster_refs // 2)
array_mult = cfg.poly_cluster_array[0] * cfg.poly_cluster_array[1]
return array_refs * array_mult + (cfg.poly_cluster_refs - array_refs)
def fixture_manifest(path: str | Path, preset: str, scale: float = 1.0) -> FixtureManifest:
base = PRESETS[preset]
cfg = _scaled_preset(base, scale)
flattened_box_placements = (
cfg.box_wrappers
+ cfg.box_clusters * _ref_instances_per_box_cluster(cfg)
+ cfg.top_direct_box_refs * cfg.top_box_array[0] * cfg.top_box_array[1]
)
flattened_poly_placements = (
cfg.poly_wrappers
+ cfg.poly_clusters * _ref_instances_per_poly_cluster(cfg)
+ cfg.top_direct_poly_refs * cfg.top_poly_array[0] * cfg.top_poly_array[1]
)
polygon_layers = max(1, cfg.polygon_layers)
polys_per_layer = (cfg.poly_cells * cfg.polygons_per_cell) // polygon_layers
return FixtureManifest(
preset=cfg.name,
scale=scale,
gds_path=str(Path(path)),
library_name=f'masque-perf-{cfg.name}',
cells=cfg.box_cells + cfg.poly_cells + cfg.box_wrappers + cfg.poly_wrappers + cfg.box_clusters + cfg.poly_clusters + 1,
refs=(
cfg.box_wrappers
+ cfg.poly_wrappers
+ cfg.box_clusters * cfg.box_cluster_refs
+ cfg.poly_clusters * cfg.poly_cluster_refs
+ cfg.box_wrappers + cfg.poly_wrappers + cfg.box_clusters + cfg.poly_clusters
+ cfg.top_direct_box_refs + cfg.top_direct_poly_refs
),
layers=cfg.total_layers,
box_layers=cfg.box_layers,
heavy_box_layers=[[layer, 0] for layer in range(cfg.heavy_box_layers)],
polygon_layers=[[layer, 0] for layer in range(cfg.polygon_layers)],
hierarchical_boxes_per_heavy_layer=cfg.box_cells * cfg.heavy_boxes_per_cell,
hierarchical_boxes_per_regular_layer=cfg.box_cells * cfg.regular_boxes_per_cell,
hierarchical_polygons_total=cfg.poly_cells * cfg.polygons_per_cell,
hierarchical_paths_total=_poly_paths_total(cfg),
hierarchical_texts_total=_poly_texts_total(cfg),
flattened_box_placements=flattened_box_placements,
flattened_poly_placements=flattened_poly_placements,
estimated_flat_boxes_per_heavy_layer=flattened_box_placements * cfg.heavy_boxes_per_cell,
estimated_flat_polygons_per_active_polygon_layer=flattened_poly_placements * polys_per_layer // cfg.poly_cells if cfg.poly_cells else 0,
)
def write_fixture(
path: str | Path,
*,
preset: str,
scale: float = 1.0,
write_manifest: bool = True,
) -> FixtureManifest:
if preset not in PRESETS:
known = ', '.join(sorted(PRESETS))
raise KeyError(f'unknown preset {preset!r}; expected one of: {known}')
manifest = fixture_manifest(path, preset, scale)
cfg = _scaled_preset(PRESETS[preset], scale)
output = Path(path)
output.parent.mkdir(parents=True, exist_ok=True)
with output.open('wb') as stream:
header = klamath.library.FileHeader(
name=manifest.library_name.encode('ASCII'),
user_units_per_db_unit=USER_UNITS_PER_DB_UNIT,
meters_per_db_unit=METERS_PER_DB_UNIT,
)
header.write(stream)
_write_box_cells(stream, cfg)
_write_poly_cells(stream, cfg)
_write_wrappers(stream, cfg)
_write_box_clusters(stream, cfg)
_write_poly_clusters(stream, cfg)
_write_top(stream, cfg)
klamath.records.ENDLIB.write(stream, None)
if write_manifest:
manifest_path = output.with_suffix(output.suffix + '.json')
manifest_path.write_text(json.dumps(asdict(manifest), indent=2, sort_keys=True) + '\n')
return manifest
def build_arg_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description='Generate synthetic GDS fixtures for GDS reader/writer performance work.')
parser.add_argument(
'preset',
nargs='?',
default='many_cells',
choices=sorted(PRESETS),
help='Fixture family to generate.',
)
parser.add_argument(
'output',
nargs='?',
help='Output .gds path. Defaults to build/gds_perf/<preset>.gds',
)
parser.add_argument(
'--scale',
type=float,
default=1.0,
help='Scale the preset counts down or up while keeping the same shape mix. Default: 1.0',
)
parser.add_argument(
'--no-manifest',
action='store_true',
help='Do not write the sidecar JSON manifest.',
)
return parser
def main(argv: list[str] | None = None) -> int:
parser = build_arg_parser()
args = parser.parse_args(argv)
output = Path(args.output) if args.output is not None else Path('build/gds_perf') / f'{args.preset}.gds'
manifest = write_fixture(output, preset=args.preset, scale=args.scale, write_manifest=not args.no_manifest)
print(json.dumps(asdict(manifest), indent=2, sort_keys=True))
return 0
if __name__ == '__main__':
raise SystemExit(main())

View file

@ -0,0 +1,24 @@
from dataclasses import asdict
import json
from pathlib import Path
from ..file import gdsii
from ..file.gdsii_perf import fixture_manifest, write_fixture
def test_gdsii_perf_fixture_smoke(tmp_path: Path) -> None:
output = tmp_path / 'many_cells.gds'
manifest = write_fixture(output, preset='many_cells', scale=0.002)
expected = fixture_manifest(output, preset='many_cells', scale=0.002)
assert output.exists()
assert manifest == expected
sidecar = json.loads(output.with_suffix('.gds.json').read_text())
assert sidecar == asdict(manifest)
read_lib, info = gdsii.readfile(output)
assert info['name'] == manifest.library_name
assert len(read_lib) == manifest.cells
assert 'TOP' in read_lib
assert len(read_lib['TOP'].refs) > 0