Compare commits
23 Commits
master
...
polycollec
Author | SHA1 | Date | |
---|---|---|---|
029a0f681f | |||
bc36e92aab | |||
681f8e6354 | |||
1992cf572a | |||
3a73fb1d60 | |||
25cde0abb5 | |||
2ef7a6e9e3 | |||
f78ba3655e | |||
b4d287f384 | |||
037118883b | |||
5368fd4e16 | |||
d0c1b00d7e | |||
6fba14ae21 | |||
82fafdc61b | |||
7336545f07 | |||
4e40e3f829 | |||
79f2088180 | |||
e89d912ce8 | |||
76511b95e6 | |||
88bd5e897e | |||
dc89491694 | |||
de9714041f | |||
35e28acb89 |
@ -21,6 +21,7 @@ Notes:
|
||||
"""
|
||||
from typing import IO, cast, Any
|
||||
from collections.abc import Iterable, Mapping, Callable
|
||||
from types import MappingProxyType
|
||||
import io
|
||||
import mmap
|
||||
import logging
|
||||
@ -52,6 +53,8 @@ path_cap_map = {
|
||||
4: Path.Cap.SquareCustom,
|
||||
}
|
||||
|
||||
RO_EMPTY_DICT: Mapping[int, bytes] = MappingProxyType({})
|
||||
|
||||
|
||||
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
|
||||
return numpy.rint(val).astype(numpy.int32)
|
||||
@ -399,11 +402,15 @@ def _mrefs_to_grefs(refs: dict[str | None, list[Ref]]) -> list[klamath.library.R
|
||||
return grefs
|
||||
|
||||
|
||||
def _properties_to_annotations(properties: dict[int, bytes]) -> annotations_t:
|
||||
def _properties_to_annotations(properties: Mapping[int, bytes]) -> annotations_t:
|
||||
if not properties:
|
||||
return None
|
||||
return {str(k): [v.decode()] for k, v in properties.items()}
|
||||
|
||||
|
||||
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> dict[int, bytes]:
|
||||
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> Mapping[int, bytes]:
|
||||
if annotations is None:
|
||||
return RO_EMPTY_DICT
|
||||
cum_len = 0
|
||||
props = {}
|
||||
for key, vals in annotations.items():
|
||||
|
452
masque/file/gdsii_arrow.py
Normal file
452
masque/file/gdsii_arrow.py
Normal file
@ -0,0 +1,452 @@
|
||||
"""
|
||||
GDSII file format readers and writers using the `TODO` library.
|
||||
|
||||
Note that GDSII references follow the same convention as `masque`,
|
||||
with this order of operations:
|
||||
1. Mirroring
|
||||
2. Rotation
|
||||
3. Scaling
|
||||
4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets)
|
||||
|
||||
Scaling, rotation, and mirroring apply to individual instances, not grid
|
||||
vectors or offsets.
|
||||
|
||||
Notes:
|
||||
* absolute positioning is not supported
|
||||
* PLEX is not supported
|
||||
* ELFLAGS are not supported
|
||||
* GDS does not support library- or structure-level annotations
|
||||
* GDS creation/modification/access times are set to 1900-01-01 for reproducibility.
|
||||
* Gzip modification time is set to 0 (start of current epoch, usually 1970-01-01)
|
||||
|
||||
TODO writing
|
||||
TODO warn on boxes, nodes
|
||||
"""
|
||||
from typing import IO, cast, Any
|
||||
from collections.abc import Iterable, Mapping, Callable
|
||||
import io
|
||||
import mmap
|
||||
import logging
|
||||
import pathlib
|
||||
import gzip
|
||||
import string
|
||||
from pprint import pformat
|
||||
|
||||
import numpy
|
||||
from numpy.typing import ArrayLike, NDArray
|
||||
from numpy.testing import assert_equal
|
||||
import pyarrow
|
||||
from pyarrow.cffi import ffi
|
||||
|
||||
from .utils import is_gzipped, tmpfile
|
||||
from .. import Pattern, Ref, PatternError, LibraryError, Label, Shape
|
||||
from ..shapes import Polygon, Path, PolyCollection
|
||||
from ..repetition import Grid
|
||||
from ..utils import layer_t, annotations_t
|
||||
from ..library import LazyLibrary, Library, ILibrary, ILibraryView
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
clib = ffi.dlopen('/home/jan/projects/klamath-rs/target/release/libklamath_rs_ext.so')
|
||||
ffi.cdef('void read_path(char* path, struct ArrowArray* array, struct ArrowSchema* schema);')
|
||||
|
||||
|
||||
path_cap_map = {
|
||||
0: Path.Cap.Flush,
|
||||
1: Path.Cap.Circle,
|
||||
2: Path.Cap.Square,
|
||||
4: Path.Cap.SquareCustom,
|
||||
}
|
||||
|
||||
|
||||
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
|
||||
return numpy.rint(val).astype(numpy.int32)
|
||||
|
||||
|
||||
def _read_to_arrow(
|
||||
filename: str | pathlib.Path,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> pyarrow.Array:
|
||||
path = pathlib.Path(filename)
|
||||
path.resolve()
|
||||
ptr_array = ffi.new('struct ArrowArray[]', 1)
|
||||
ptr_schema = ffi.new('struct ArrowSchema[]', 1)
|
||||
clib.read_path(str(path).encode(), ptr_array, ptr_schema)
|
||||
|
||||
iptr_schema = int(ffi.cast('uintptr_t', ptr_schema))
|
||||
iptr_array = int(ffi.cast('uintptr_t', ptr_array))
|
||||
arrow_arr = pyarrow.Array._import_from_c(iptr_array, iptr_schema)
|
||||
|
||||
return arrow_arr
|
||||
|
||||
|
||||
def readfile(
|
||||
filename: str | pathlib.Path,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> tuple[Library, dict[str, Any]]:
|
||||
"""
|
||||
Wrapper for `read()` that takes a filename or path instead of a stream.
|
||||
|
||||
Will automatically decompress gzipped files.
|
||||
|
||||
Args:
|
||||
filename: Filename to save to.
|
||||
*args: passed to `read()`
|
||||
**kwargs: passed to `read()`
|
||||
"""
|
||||
arrow_arr = _read_to_arrow(filename)
|
||||
assert len(arrow_arr) == 1
|
||||
|
||||
results = read_arrow(arrow_arr[0])
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def read_arrow(
|
||||
libarr: pyarrow.Array,
|
||||
raw_mode: bool = True,
|
||||
) -> tuple[Library, dict[str, Any]]:
|
||||
"""
|
||||
# TODO check GDSII file for cycles!
|
||||
Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are
|
||||
translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs
|
||||
are translated into Ref objects.
|
||||
|
||||
Additional library info is returned in a dict, containing:
|
||||
'name': name of the library
|
||||
'meters_per_unit': number of meters per database unit (all values are in database units)
|
||||
'logical_units_per_unit': number of "logical" units displayed by layout tools (typically microns)
|
||||
per database unit
|
||||
|
||||
Args:
|
||||
stream: Stream to read from.
|
||||
raw_mode: If True, constructs shapes in raw mode, bypassing most data validation, Default True.
|
||||
|
||||
Returns:
|
||||
- dict of pattern_name:Patterns generated from GDSII structures
|
||||
- dict of GDSII library info
|
||||
"""
|
||||
library_info = _read_header(libarr)
|
||||
|
||||
layer_names_np = libarr['layers'].values.to_numpy().view('i2').reshape((-1, 2))
|
||||
layer_tups = [tuple(pair) for pair in layer_names_np]
|
||||
|
||||
cell_ids = libarr['cells'].values.field('id').to_numpy()
|
||||
cell_names = libarr['cell_names'].as_py()
|
||||
|
||||
def get_geom(libarr: pyarrow.Array, geom_type: str) -> dict[str, Any]:
|
||||
el = libarr['cells'].values.field(geom_type)
|
||||
elem = dict(
|
||||
offsets = el.offsets.to_numpy(),
|
||||
xy_arr = el.values.field('xy').values.to_numpy().reshape((-1, 2)),
|
||||
xy_off = el.values.field('xy').offsets.to_numpy() // 2,
|
||||
layer_inds = el.values.field('layer').to_numpy(),
|
||||
prop_off = el.values.field('properties').offsets.to_numpy(),
|
||||
prop_key = el.values.field('properties').values.field('key').to_numpy(),
|
||||
prop_val = el.values.field('properties').values.field('value').to_pylist(),
|
||||
)
|
||||
return elem
|
||||
|
||||
rf = libarr['cells'].values.field('refs')
|
||||
refs = dict(
|
||||
offsets = rf.offsets.to_numpy(),
|
||||
targets = rf.values.field('target').to_numpy(),
|
||||
xy = rf.values.field('xy').to_numpy().view('i4').reshape((-1, 2)),
|
||||
invert_y = rf.values.field('invert_y').fill_null(False).to_numpy(zero_copy_only=False),
|
||||
angle_rad = numpy.rad2deg(rf.values.field('angle_deg').fill_null(0).to_numpy()),
|
||||
scale = rf.values.field('mag').fill_null(1).to_numpy(),
|
||||
rep_valid = rf.values.field('repetition').is_valid().to_numpy(zero_copy_only=False),
|
||||
rep_xy0 = rf.values.field('repetition').field('xy0').fill_null(0).to_numpy().view('i4').reshape((-1, 2)),
|
||||
rep_xy1 = rf.values.field('repetition').field('xy1').fill_null(0).to_numpy().view('i4').reshape((-1, 2)),
|
||||
rep_counts = rf.values.field('repetition').field('counts').fill_null(0).to_numpy().view('i2').reshape((-1, 2)),
|
||||
prop_off = rf.values.field('properties').offsets.to_numpy(),
|
||||
prop_key = rf.values.field('properties').values.field('key').to_numpy(),
|
||||
prop_val = rf.values.field('properties').values.field('value').to_pylist(),
|
||||
)
|
||||
|
||||
txt = libarr['cells'].values.field('texts')
|
||||
texts = dict(
|
||||
offsets = txt.offsets.to_numpy(),
|
||||
layer_inds = txt.values.field('layer').to_numpy(),
|
||||
xy = txt.values.field('xy').to_numpy().view('i4').reshape((-1, 2)),
|
||||
string = txt.values.field('string').to_pylist(),
|
||||
prop_off = txt.values.field('properties').offsets.to_numpy(),
|
||||
prop_key = txt.values.field('properties').values.field('key').to_numpy(),
|
||||
prop_val = txt.values.field('properties').values.field('value').to_pylist(),
|
||||
)
|
||||
|
||||
elements = dict(
|
||||
boundaries = get_geom(libarr, 'boundaries'),
|
||||
paths = get_geom(libarr, 'paths'),
|
||||
boxes = get_geom(libarr, 'boxes'),
|
||||
nodes = get_geom(libarr, 'nodes'),
|
||||
texts = texts,
|
||||
refs = refs,
|
||||
)
|
||||
|
||||
paths = libarr['cells'].values.field('paths')
|
||||
elements['paths'].update(dict(
|
||||
width = paths.values.field('width').fill_null(0).to_numpy(),
|
||||
path_type = paths.values.field('path_type').fill_null(0).to_numpy(),
|
||||
extensions = numpy.stack((
|
||||
paths.values.field('extension_start').fill_null(0).to_numpy(),
|
||||
paths.values.field('extension_end').fill_null(0).to_numpy(),
|
||||
), axis=-1),
|
||||
))
|
||||
|
||||
global_args = dict(
|
||||
cell_names = cell_names,
|
||||
layer_tups = layer_tups,
|
||||
raw_mode = raw_mode,
|
||||
)
|
||||
|
||||
mlib = Library()
|
||||
for cc in range(len(libarr['cells'])):
|
||||
name = cell_names[cell_ids[cc]]
|
||||
pat = Pattern()
|
||||
_boundaries_to_polygons(pat, global_args, elements['boundaries'], cc)
|
||||
_gpaths_to_mpaths(pat, global_args, elements['paths'], cc)
|
||||
_grefs_to_mrefs(pat, global_args, elements['refs'], cc)
|
||||
_texts_to_labels(pat, global_args, elements['texts'], cc)
|
||||
mlib[name] = pat
|
||||
|
||||
return mlib, library_info
|
||||
|
||||
|
||||
def _read_header(libarr: pyarrow.Array) -> dict[str, Any]:
|
||||
"""
|
||||
Read the file header and create the library_info dict.
|
||||
"""
|
||||
library_info = dict(
|
||||
name = libarr['lib_name'],
|
||||
meters_per_unit = libarr['meters_per_db_unit'],
|
||||
logical_units_per_unit = libarr['user_units_per_db_unit'],
|
||||
)
|
||||
return library_info
|
||||
|
||||
|
||||
def _grefs_to_mrefs(
|
||||
pat: Pattern,
|
||||
global_args: dict[str, Any],
|
||||
elem: dict[str, Any],
|
||||
cc: int,
|
||||
) -> None:
|
||||
cell_names = global_args['cell_names']
|
||||
elem_off = elem['offsets'] # which elements belong to each cell
|
||||
xy = elem['xy']
|
||||
prop_key = elem['prop_key']
|
||||
prop_val = elem['prop_val']
|
||||
targets = elem['targets']
|
||||
|
||||
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||
elem_invert_y = elem['invert_y'][elem_slc][:elem_count]
|
||||
elem_angle_rad = elem['angle_rad'][elem_slc][:elem_count]
|
||||
elem_scale = elem['scale'][elem_slc][:elem_count]
|
||||
elem_rep_xy0 = elem['rep_xy0'][elem_slc][:elem_count]
|
||||
elem_rep_xy1 = elem['rep_xy1'][elem_slc][:elem_count]
|
||||
elem_rep_counts = elem['rep_counts'][elem_slc][:elem_count]
|
||||
rep_valid = elem['rep_valid'][elem_slc][:elem_count]
|
||||
|
||||
|
||||
for ee in range(elem_count):
|
||||
target = cell_names[targets[ee]]
|
||||
offset = xy[ee]
|
||||
mirr = elem_invert_y[ee]
|
||||
rot = elem_angle_rad[ee]
|
||||
mag = elem_scale[ee]
|
||||
|
||||
rep: None | Grid = None
|
||||
if rep_valid[ee]:
|
||||
a_vector = elem_rep_xy0[ee]
|
||||
b_vector = elem_rep_xy1[ee]
|
||||
a_count, b_count = elem_rep_counts[ee]
|
||||
rep = Grid(a_vector=a_vector, b_vector=b_vector, a_count=a_count, b_count=b_count)
|
||||
|
||||
annotations: None | dict[str, list[int | float | str]] = None
|
||||
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||
if prop_ii < prop_ff:
|
||||
annotations = {str(prop_key[off]): [prop_val[off]] for off in range(prop_ii, prop_ff)}
|
||||
|
||||
ref = Ref(offset=offset, mirrored=mirr, rotation=rot, scale=mag, repetition=rep, annotations=annotations)
|
||||
pat.refs[target].append(ref)
|
||||
|
||||
|
||||
def _texts_to_labels(
|
||||
pat: Pattern,
|
||||
global_args: dict[str, Any],
|
||||
elem: dict[str, Any],
|
||||
cc: int,
|
||||
) -> None:
|
||||
elem_off = elem['offsets'] # which elements belong to each cell
|
||||
xy = elem['xy']
|
||||
layer_tups = global_args['layer_tups']
|
||||
layer_inds = elem['layer_inds']
|
||||
prop_key = elem['prop_key']
|
||||
prop_val = elem['prop_val']
|
||||
|
||||
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||
elem_layer_inds = layer_inds[elem_slc][:elem_count]
|
||||
elem_strings = elem['string'][elem_slc][:elem_count]
|
||||
|
||||
for ee in range(elem_count):
|
||||
layer = layer_tups[elem_layer_inds[ee]]
|
||||
offset = xy[ee]
|
||||
string = elem_strings[ee]
|
||||
|
||||
annotations: None | dict[str, list[int | float | str]] = None
|
||||
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||
if prop_ii < prop_ff:
|
||||
annotations = {str(prop_key[off]): [prop_val[off]] for off in range(prop_ii, prop_ff)}
|
||||
|
||||
mlabel = Label(string=string, offset=offset, annotations=annotations)
|
||||
pat.labels[layer].append(mlabel)
|
||||
|
||||
|
||||
def _gpaths_to_mpaths(
|
||||
pat: Pattern,
|
||||
global_args: dict[str, Any],
|
||||
elem: dict[str, Any],
|
||||
cc: int,
|
||||
) -> None:
|
||||
elem_off = elem['offsets'] # which elements belong to each cell
|
||||
xy_val = elem['xy_arr']
|
||||
layer_tups = global_args['layer_tups']
|
||||
layer_inds = elem['layer_inds']
|
||||
prop_key = elem['prop_key']
|
||||
prop_val = elem['prop_val']
|
||||
|
||||
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||
xy_offs = elem['xy_off'][elem_slc] # which xy coords belong to each element
|
||||
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||
elem_layer_inds = layer_inds[elem_slc][:elem_count]
|
||||
elem_widths = elem['width'][elem_slc][:elem_count]
|
||||
elem_path_types = elem['path_type'][elem_slc][:elem_count]
|
||||
elem_extensions = elem['extensions'][elem_slc][:elem_count]
|
||||
|
||||
zeros = numpy.zeros((elem_count, 2))
|
||||
raw_mode = global_args['raw_mode']
|
||||
for ee in range(elem_count):
|
||||
layer = layer_tups[elem_layer_inds[ee]]
|
||||
vertices = xy_val[xy_offs[ee]:xy_offs[ee + 1]]
|
||||
width = elem_widths[ee]
|
||||
cap_int = elem_path_types[ee]
|
||||
cap = path_cap_map[cap_int]
|
||||
if cap_int == 4:
|
||||
cap_extensions = elem_extensions[ee]
|
||||
else:
|
||||
cap_extensions = None
|
||||
|
||||
annotations: None | dict[str, list[int | float | str]] = None
|
||||
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||
if prop_ii < prop_ff:
|
||||
annotations = {str(prop_key[off]): [prop_val[off]] for off in range(prop_ii, prop_ff)}
|
||||
|
||||
path = Path(vertices=vertices, offset=zeros[ee], annotations=annotations, raw=raw_mode,
|
||||
width=width, cap=cap,cap_extensions=cap_extensions)
|
||||
pat.shapes[layer].append(path)
|
||||
|
||||
|
||||
def _boundaries_to_polygons(
|
||||
pat: Pattern,
|
||||
global_args: dict[str, Any],
|
||||
elem: dict[str, Any],
|
||||
cc: int,
|
||||
) -> None:
|
||||
elem_off = elem['offsets'] # which elements belong to each cell
|
||||
xy_val = elem['xy_arr']
|
||||
layer_inds = elem['layer_inds']
|
||||
layer_tups = global_args['layer_tups']
|
||||
prop_key = elem['prop_key']
|
||||
prop_val = elem['prop_val']
|
||||
|
||||
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||
xy_offs = elem['xy_off'][elem_slc] # which xy coords belong to each element
|
||||
xy_counts = xy_offs[1:] - xy_offs[:-1]
|
||||
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||
prop_counts = prop_offs[1:] - prop_offs[:-1]
|
||||
elem_layer_inds = layer_inds[elem_slc][:elem_count]
|
||||
|
||||
order = numpy.argsort(elem_layer_inds, stable=True)
|
||||
unilayer_inds, unilayer_first, unilayer_count = numpy.unique(elem_layer_inds, return_index=True, return_counts=True)
|
||||
|
||||
zeros = numpy.zeros((elem_count, 2))
|
||||
raw_mode = global_args['raw_mode']
|
||||
for layer_ind, ff, cc in zip(unilayer_inds, unilayer_first, unilayer_count, strict=True):
|
||||
ee_inds = order[ff:ff + cc]
|
||||
layer = layer_tups[layer_ind]
|
||||
propless_mask = prop_counts[ee_inds] == 0
|
||||
|
||||
poly_count_on_layer = propless_mask.sum()
|
||||
if poly_count_on_layer == 1:
|
||||
propless_mask[:] = 0 # Never make a 1-element collection
|
||||
elif poly_count_on_layer > 1:
|
||||
propless_vert_counts = xy_counts[ee_inds[propless_mask]] - 1 # -1 to drop closing point
|
||||
vertex_lists = numpy.empty((propless_vert_counts.sum(), 2), dtype=numpy.float64)
|
||||
vertex_offsets = numpy.cumsum(numpy.concatenate([[0], propless_vert_counts]))
|
||||
|
||||
for ii, ee in enumerate(ee_inds[propless_mask]):
|
||||
vo = vertex_offsets[ii]
|
||||
vertex_lists[vo:vo + propless_vert_counts[ii]] = xy_val[xy_offs[ee]:xy_offs[ee + 1] - 1]
|
||||
|
||||
polys = PolyCollection(vertex_lists=vertex_lists, vertex_offsets=vertex_offsets, offset=zeros[ee])
|
||||
pat.shapes[layer].append(polys)
|
||||
|
||||
# Handle single polygons
|
||||
for ee in ee_inds[~propless_mask]:
|
||||
layer = layer_tups[elem_layer_inds[ee]]
|
||||
vertices = xy_val[xy_offs[ee]:xy_offs[ee + 1] - 1] # -1 to drop closing point
|
||||
|
||||
annotations: None | dict[str, list[int | float | str]] = None
|
||||
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||
if prop_ii < prop_ff:
|
||||
annotations = {str(prop_key[off]): prop_val[off] for off in range(prop_ii, prop_ff)}
|
||||
|
||||
poly = Polygon(vertices=vertices, offset=zeros[ee], annotations=annotations, raw=raw_mode)
|
||||
pat.shapes[layer].append(poly)
|
||||
|
||||
|
||||
#def _properties_to_annotations(properties: pyarrow.Array) -> annotations_t:
|
||||
# return {prop['key'].as_py(): prop['value'].as_py() for prop in properties}
|
||||
|
||||
|
||||
def check_valid_names(
|
||||
names: Iterable[str],
|
||||
max_length: int = 32,
|
||||
) -> None:
|
||||
"""
|
||||
Check all provided names to see if they're valid GDSII cell names.
|
||||
|
||||
Args:
|
||||
names: Collection of names to check
|
||||
max_length: Max allowed length
|
||||
|
||||
"""
|
||||
allowed_chars = set(string.ascii_letters + string.digits + '_?$')
|
||||
|
||||
bad_chars = [
|
||||
name for name in names
|
||||
if not set(name).issubset(allowed_chars)
|
||||
]
|
||||
|
||||
bad_lengths = [
|
||||
name for name in names
|
||||
if len(name) > max_length
|
||||
]
|
||||
|
||||
if bad_chars:
|
||||
logger.error('Names contain invalid characters:\n' + pformat(bad_chars))
|
||||
|
||||
if bad_lengths:
|
||||
logger.error(f'Names too long (>{max_length}:\n' + pformat(bad_chars))
|
||||
|
||||
if bad_chars or bad_lengths:
|
||||
raise LibraryError('Library contains invalid names, see log above')
|
@ -671,6 +671,8 @@ def repetition_masq2fata(
|
||||
|
||||
def annotations_to_properties(annotations: annotations_t) -> list[fatrec.Property]:
|
||||
#TODO determine is_standard based on key?
|
||||
if annotations is None:
|
||||
return []
|
||||
properties = []
|
||||
for key, values in annotations.items():
|
||||
vals = [AString(v) if isinstance(v, str) else v
|
||||
|
@ -332,7 +332,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
))
|
||||
|
||||
self.ports = dict(sorted(self.ports.items()))
|
||||
self.annotations = dict(sorted(self.annotations.items()))
|
||||
self.annotations = dict(sorted(self.annotations.items())) if self.annotations is not None else None
|
||||
|
||||
return self
|
||||
|
||||
@ -354,10 +354,13 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
for layer, lseq in other_pattern.labels.items():
|
||||
self.labels[layer].extend(lseq)
|
||||
|
||||
annotation_conflicts = set(self.annotations.keys()) & set(other_pattern.annotations.keys())
|
||||
if annotation_conflicts:
|
||||
raise PatternError(f'Annotation keys overlap: {annotation_conflicts}')
|
||||
self.annotations.update(other_pattern.annotations)
|
||||
if other_pattern.annotations is not None:
|
||||
if self.annotations is None:
|
||||
self.annotations = {}
|
||||
annotation_conflicts = set(self.annotations.keys()) & set(other_pattern.annotations.keys())
|
||||
if annotation_conflicts:
|
||||
raise PatternError(f'Annotation keys overlap: {annotation_conflicts}')
|
||||
self.annotations.update(other_pattern.annotations)
|
||||
|
||||
port_conflicts = set(self.ports.keys()) & set(other_pattern.ports.keys())
|
||||
if port_conflicts:
|
||||
@ -415,7 +418,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
elif default_keep:
|
||||
pat.refs = copy.copy(self.refs)
|
||||
|
||||
if annotations is not None:
|
||||
if annotations is not None and self.annotations is not None:
|
||||
pat.annotations = {k: v for k, v in self.annotations.items() if annotations(k, v)}
|
||||
elif default_keep:
|
||||
pat.annotations = copy.copy(self.annotations)
|
||||
|
@ -10,6 +10,7 @@ from .shape import (
|
||||
)
|
||||
|
||||
from .polygon import Polygon as Polygon
|
||||
from .poly_collection import PolyCollection as PolyCollection
|
||||
from .circle import Circle as Circle
|
||||
from .ellipse import Ellipse as Ellipse
|
||||
from .arc import Arc as Arc
|
||||
|
@ -157,7 +157,7 @@ class Arc(Shape):
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
rotation: float = 0,
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t | None = None,
|
||||
annotations: annotations_t = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
if raw:
|
||||
@ -170,7 +170,7 @@ class Arc(Shape):
|
||||
self._offset = offset
|
||||
self._rotation = rotation
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations if annotations is not None else {}
|
||||
self._annotations = annotations
|
||||
else:
|
||||
self.radii = radii
|
||||
self.angles = angles
|
||||
@ -178,7 +178,7 @@ class Arc(Shape):
|
||||
self.offset = offset
|
||||
self.rotation = rotation
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.annotations = annotations
|
||||
|
||||
def __deepcopy__(self, memo: dict | None = None) -> 'Arc':
|
||||
memo = {} if memo is None else memo
|
||||
|
@ -48,7 +48,7 @@ class Circle(Shape):
|
||||
*,
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t | None = None,
|
||||
annotations: annotations_t = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
if raw:
|
||||
@ -56,12 +56,12 @@ class Circle(Shape):
|
||||
self._radius = radius
|
||||
self._offset = offset
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations if annotations is not None else {}
|
||||
self._annotations = annotations
|
||||
else:
|
||||
self.radius = radius
|
||||
self.offset = offset
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.annotations = annotations
|
||||
|
||||
def __deepcopy__(self, memo: dict | None = None) -> 'Circle':
|
||||
memo = {} if memo is None else memo
|
||||
|
@ -93,7 +93,7 @@ class Ellipse(Shape):
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
rotation: float = 0,
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t | None = None,
|
||||
annotations: annotations_t = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
if raw:
|
||||
@ -103,13 +103,13 @@ class Ellipse(Shape):
|
||||
self._offset = offset
|
||||
self._rotation = rotation
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations if annotations is not None else {}
|
||||
self._annotations = annotations
|
||||
else:
|
||||
self.radii = radii
|
||||
self.offset = offset
|
||||
self.rotation = rotation
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.annotations = annotations
|
||||
|
||||
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
||||
memo = {} if memo is None else memo
|
||||
|
@ -170,7 +170,7 @@ class Path(Shape):
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
rotation: float = 0,
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t | None = None,
|
||||
annotations: annotations_t = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
self._cap_extensions = None # Since .cap setter might access it
|
||||
@ -182,7 +182,7 @@ class Path(Shape):
|
||||
self._vertices = vertices
|
||||
self._offset = offset
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations if annotations is not None else {}
|
||||
self._annotations = annotations
|
||||
self._width = width
|
||||
self._cap = cap
|
||||
self._cap_extensions = cap_extensions
|
||||
@ -190,7 +190,7 @@ class Path(Shape):
|
||||
self.vertices = vertices
|
||||
self.offset = offset
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.annotations = annotations
|
||||
self.width = width
|
||||
self.cap = cap
|
||||
self.cap_extensions = cap_extensions
|
||||
|
207
masque/shapes/poly_collection.py
Normal file
207
masque/shapes/poly_collection.py
Normal file
@ -0,0 +1,207 @@
|
||||
from typing import Any, cast, Self
|
||||
from collections.abc import Iterator
|
||||
import copy
|
||||
import functools
|
||||
from itertools import chain
|
||||
|
||||
import numpy
|
||||
from numpy import pi
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
|
||||
from . import Shape, normalized_shape_tuple
|
||||
from .polygon import Polygon
|
||||
from ..repetition import Repetition
|
||||
from ..utils import rotation_matrix_2d, annotations_lt, annotations_eq, rep2key, annotations_t
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class PolyCollection(Shape):
|
||||
"""
|
||||
A collection of polygons, consisting of concatenated vertex arrays (N_m x 2 ndarray) which specify
|
||||
implicitly-closed boundaries, and an array of offets specifying the first vertex of each
|
||||
successive polygon.
|
||||
|
||||
A `normalized_form(...)` is available, but is untested and probably fairly slow.
|
||||
"""
|
||||
__slots__ = (
|
||||
'_vertex_lists',
|
||||
'_vertex_offsets',
|
||||
# Inherited
|
||||
'_offset', '_repetition', '_annotations',
|
||||
)
|
||||
|
||||
_vertex_lists: NDArray[numpy.float64]
|
||||
""" 2D NDArray ((N+M+...) x 2) of vertices `[[xa0, ya0], [xa1, ya1], ..., [xb0, yb0], [xb1, yb1], ... ]` """
|
||||
|
||||
_vertex_offsets: NDArray[numpy.intp]
|
||||
""" 1D NDArray specifying the starting offset for each polygon """
|
||||
|
||||
@property
|
||||
def vertex_lists(self) -> Any: # mypy#3004 NDArray[numpy.float64]:
|
||||
"""
|
||||
Vertices of the polygons, ((N+M+...) x 2). Use with `vertex_offsets`.
|
||||
"""
|
||||
return self._vertex_lists
|
||||
|
||||
@property
|
||||
def vertex_offsets(self) -> Any: # mypy#3004 NDArray[numpy.intp]:
|
||||
"""
|
||||
Starting offset (in `vertex_lists`) for each polygon
|
||||
"""
|
||||
return self._vertex_offsets
|
||||
|
||||
@property
|
||||
def vertex_slices(self) -> Iterator[slice]:
|
||||
"""
|
||||
Iterator which provides slices which index vertex_lists
|
||||
"""
|
||||
for ii, ff in zip(
|
||||
self._vertex_offsets,
|
||||
chain(self._vertex_offsets, (self._vertex_lists.shape[0],)),
|
||||
strict=True,
|
||||
):
|
||||
yield slice(ii, ff)
|
||||
|
||||
@property
|
||||
def polygon_vertices(self) -> Iterator[NDArray[numpy.float64]]:
|
||||
for slc in self.vertex_slices:
|
||||
yield self._vertex_lists[slc]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vertex_lists: ArrayLike,
|
||||
vertex_offsets: ArrayLike,
|
||||
*,
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
if raw:
|
||||
assert isinstance(vertex_lists, numpy.ndarray)
|
||||
assert isinstance(vertex_offsets, numpy.ndarray)
|
||||
assert isinstance(offset, numpy.ndarray)
|
||||
self._vertex_lists = vertex_lists
|
||||
self._vertex_offsets = vertex_offsets
|
||||
self._offset = offset
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations
|
||||
else:
|
||||
self._vertex_lists = numpy.asarray(vertex_lists, dtype=float)
|
||||
self._vertex_offsets = numpy.asarray(vertex_offsets, dtype=numpy.intp)
|
||||
self.offset = offset
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations
|
||||
if rotation:
|
||||
self.rotate(rotation)
|
||||
|
||||
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
new._vertex_lists = self._vertex_lists.copy()
|
||||
new._vertex_offsets = self._vertex_offsets.copy()
|
||||
new._annotations = copy.deepcopy(self._annotations)
|
||||
return new
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return (
|
||||
type(self) is type(other)
|
||||
and numpy.array_equal(self.offset, other.offset)
|
||||
and numpy.array_equal(self._vertex_lists, other._vertex_lists)
|
||||
and numpy.array_equal(self._vertex_offsets, other._vertex_offsets)
|
||||
and self.repetition == other.repetition
|
||||
and annotations_eq(self.annotations, other.annotations)
|
||||
)
|
||||
|
||||
def __lt__(self, other: Shape) -> bool:
|
||||
if type(self) is not type(other):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
|
||||
other = cast('PolyCollection', other)
|
||||
|
||||
for vv, oo in zip(self.polygon_vertices, other.polygon_vertices, strict=False):
|
||||
if not numpy.array_equal(vv, oo):
|
||||
min_len = min(vv.shape[0], oo.shape[0])
|
||||
eq_mask = vv[:min_len] != oo[:min_len]
|
||||
eq_lt = vv[:min_len] < oo[:min_len]
|
||||
eq_lt_masked = eq_lt[eq_mask]
|
||||
if eq_lt_masked.size > 0:
|
||||
return eq_lt_masked.flat[0]
|
||||
return vv.shape[0] < oo.shape[0]
|
||||
if len(self.vertex_lists) != len(other.vertex_lists):
|
||||
return len(self.vertex_lists) < len(other.vertex_lists)
|
||||
if not numpy.array_equal(self.offset, other.offset):
|
||||
return tuple(self.offset) < tuple(other.offset)
|
||||
if self.repetition != other.repetition:
|
||||
return rep2key(self.repetition) < rep2key(other.repetition)
|
||||
return annotations_lt(self.annotations, other.annotations)
|
||||
|
||||
def to_polygons(
|
||||
self,
|
||||
num_vertices: int | None = None, # unused # noqa: ARG002
|
||||
max_arclen: float | None = None, # unused # noqa: ARG002
|
||||
) -> list['Polygon']:
|
||||
return [Polygon(
|
||||
vertices = vv,
|
||||
offset = self.offset,
|
||||
repetition = copy.deepcopy(self.repetition),
|
||||
annotations = copy.deepcopy(self.annotations),
|
||||
) for vv in self.polygon_vertices]
|
||||
|
||||
def get_bounds_single(self) -> NDArray[numpy.float64]: # TODO note shape get_bounds doesn't include repetition
|
||||
return numpy.vstack((self.offset + numpy.min(self._vertex_lists, axis=0),
|
||||
self.offset + numpy.max(self._vertex_lists, axis=0)))
|
||||
|
||||
def rotate(self, theta: float) -> Self:
|
||||
if theta != 0:
|
||||
rot = rotation_matrix_2d(theta)
|
||||
self._vertex_lists = numpy.einsum('ij,kj->ki', rot, self._vertex_lists)
|
||||
return self
|
||||
|
||||
def mirror(self, axis: int = 0) -> Self:
|
||||
self._vertex_lists[:, axis - 1] *= -1
|
||||
return self
|
||||
|
||||
def scale_by(self, c: float) -> Self:
|
||||
self._vertex_lists *= c
|
||||
return self
|
||||
|
||||
def normalized_form(self, norm_value: float) -> normalized_shape_tuple:
|
||||
# Note: this function is going to be pretty slow for many-vertexed polygons, relative to
|
||||
# other shapes
|
||||
meanv = self._vertex_lists.mean(axis=0)
|
||||
zeroed_vertices = self._vertex_lists - [meanv]
|
||||
offset = meanv + self.offset
|
||||
|
||||
scale = zeroed_vertices.std()
|
||||
normed_vertices = zeroed_vertices / scale
|
||||
|
||||
_, _, vertex_axis = numpy.linalg.svd(zeroed_vertices)
|
||||
rotation = numpy.arctan2(vertex_axis[0][1], vertex_axis[0][0]) % (2 * pi)
|
||||
rotated_vertices = numpy.einsum('ij,kj->ki', rotation_matrix_2d(-rotation), normed_vertices)
|
||||
|
||||
# TODO consider how to reorder vertices for polycollection
|
||||
## Reorder the vertices so that the one with lowest x, then y, comes first.
|
||||
#x_min = rotated_vertices[:, 0].argmin()
|
||||
#if not is_scalar(x_min):
|
||||
# y_min = rotated_vertices[x_min, 1].argmin()
|
||||
# x_min = cast('Sequence', x_min)[y_min]
|
||||
#reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||
|
||||
# TODO: normalize mirroring?
|
||||
|
||||
return ((type(self), rotated_vertices.data.tobytes() + self._vertex_offsets.tobytes()),
|
||||
(offset, scale / norm_value, rotation, False),
|
||||
lambda: PolyCollection(
|
||||
vertex_lists=rotated_vertices * norm_value,
|
||||
vertex_offsets=self._vertex_offsets,
|
||||
),
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
centroid = self.offset + self.vertex_lists.mean(axis=0)
|
||||
return f'<PolyCollection centroid {centroid} p{len(self.vertex_offsets)}>'
|
@ -92,7 +92,7 @@ class Polygon(Shape):
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t | None = None,
|
||||
annotations: annotations_t = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
if raw:
|
||||
@ -101,13 +101,14 @@ class Polygon(Shape):
|
||||
self._vertices = vertices
|
||||
self._offset = offset
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations if annotations is not None else {}
|
||||
self._annotations = annotations
|
||||
else:
|
||||
self.vertices = vertices
|
||||
self.offset = offset
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.rotate(rotation)
|
||||
self.annotations = annotations
|
||||
if rotation:
|
||||
self.rotate(rotation)
|
||||
|
||||
def __deepcopy__(self, memo: dict | None = None) -> 'Polygon':
|
||||
memo = {} if memo is None else memo
|
||||
|
@ -71,7 +71,7 @@ class Text(RotatableImpl, Shape):
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t | None = None,
|
||||
annotations: annotations_t = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
if raw:
|
||||
@ -81,14 +81,14 @@ class Text(RotatableImpl, Shape):
|
||||
self._height = height
|
||||
self._rotation = rotation
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations if annotations is not None else {}
|
||||
self._annotations = annotations
|
||||
else:
|
||||
self.offset = offset
|
||||
self.string = string
|
||||
self.height = height
|
||||
self.rotation = rotation
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.annotations = annotations
|
||||
self.font_path = font_path
|
||||
|
||||
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
||||
|
@ -45,6 +45,6 @@ class AnnotatableImpl(Annotatable, metaclass=ABCMeta):
|
||||
|
||||
@annotations.setter
|
||||
def annotations(self, annotations: annotations_t) -> None:
|
||||
if not isinstance(annotations, dict):
|
||||
raise MasqueError(f'annotations expected dict, got {type(annotations)}')
|
||||
if not isinstance(annotations, dict) and annotations is not None:
|
||||
raise MasqueError(f'annotations expected dict or None, got {type(annotations)}')
|
||||
self._annotations = annotations
|
||||
|
@ -5,7 +5,7 @@ from numpy import pi
|
||||
try:
|
||||
from numpy import trapezoid
|
||||
except ImportError:
|
||||
from numpy import trapz as trapezoid
|
||||
from numpy import trapz as trapezoid # type:ignore
|
||||
|
||||
|
||||
def bezier(
|
||||
|
@ -5,7 +5,7 @@ from typing import Protocol
|
||||
|
||||
|
||||
layer_t = int | tuple[int, int] | str
|
||||
annotations_t = dict[str, list[int | float | str]]
|
||||
annotations_t = dict[str, list[int | float | str]] | None
|
||||
|
||||
|
||||
class SupportsBool(Protocol):
|
||||
|
Loading…
x
Reference in New Issue
Block a user