Compare commits
20 Commits
1fdfcbd85d
...
f78ba3655e
Author | SHA1 | Date | |
---|---|---|---|
f78ba3655e | |||
b4d287f384 | |||
037118883b | |||
5368fd4e16 | |||
d0c1b00d7e | |||
6fba14ae21 | |||
82fafdc61b | |||
7336545f07 | |||
4e40e3f829 | |||
79f2088180 | |||
e89d912ce8 | |||
76511b95e6 | |||
88bd5e897e | |||
dc89491694 | |||
de9714041f | |||
35e28acb89 | |||
c1bfee1ddd | |||
560c165f2e | |||
284c7e4fd0 | |||
1eac3baf6a |
@ -169,11 +169,11 @@ def ell(
|
|||||||
'emax', 'max_extension',
|
'emax', 'max_extension',
|
||||||
'min_past_furthest',):
|
'min_past_furthest',):
|
||||||
if numpy.size(bound) == 2:
|
if numpy.size(bound) == 2:
|
||||||
bound = cast(Sequence[float], bound)
|
bound = cast('Sequence[float]', bound)
|
||||||
rot_bound = (rot_matrix @ ((bound[0], 0),
|
rot_bound = (rot_matrix @ ((bound[0], 0),
|
||||||
(0, bound[1])))[0, :]
|
(0, bound[1])))[0, :]
|
||||||
else:
|
else:
|
||||||
bound = cast(float, bound)
|
bound = cast('float', bound)
|
||||||
rot_bound = numpy.array(bound)
|
rot_bound = numpy.array(bound)
|
||||||
|
|
||||||
if rot_bound < 0:
|
if rot_bound < 0:
|
||||||
@ -185,10 +185,10 @@ def ell(
|
|||||||
offsets += rot_bound.min() - offsets.max()
|
offsets += rot_bound.min() - offsets.max()
|
||||||
else:
|
else:
|
||||||
if numpy.size(bound) == 2:
|
if numpy.size(bound) == 2:
|
||||||
bound = cast(Sequence[float], bound)
|
bound = cast('Sequence[float]', bound)
|
||||||
rot_bound = (rot_matrix @ bound)[0]
|
rot_bound = (rot_matrix @ bound)[0]
|
||||||
else:
|
else:
|
||||||
bound = cast(float, bound)
|
bound = cast('float', bound)
|
||||||
neg = (direction + pi / 4) % (2 * pi) > pi
|
neg = (direction + pi / 4) % (2 * pi) > pi
|
||||||
rot_bound = -bound if neg else bound
|
rot_bound = -bound if neg else bound
|
||||||
|
|
||||||
|
@ -132,7 +132,7 @@ def writefile(
|
|||||||
with tmpfile(path) as base_stream:
|
with tmpfile(path) as base_stream:
|
||||||
streams: tuple[Any, ...] = (base_stream,)
|
streams: tuple[Any, ...] = (base_stream,)
|
||||||
if path.suffix == '.gz':
|
if path.suffix == '.gz':
|
||||||
gz_stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
gz_stream = cast('IO[bytes]', gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
||||||
streams = (gz_stream,) + streams
|
streams = (gz_stream,) + streams
|
||||||
else:
|
else:
|
||||||
gz_stream = base_stream
|
gz_stream = base_stream
|
||||||
|
@ -21,6 +21,7 @@ Notes:
|
|||||||
"""
|
"""
|
||||||
from typing import IO, cast, Any
|
from typing import IO, cast, Any
|
||||||
from collections.abc import Iterable, Mapping, Callable
|
from collections.abc import Iterable, Mapping, Callable
|
||||||
|
from types import MappingProxyType
|
||||||
import io
|
import io
|
||||||
import mmap
|
import mmap
|
||||||
import logging
|
import logging
|
||||||
@ -52,6 +53,8 @@ path_cap_map = {
|
|||||||
4: Path.Cap.SquareCustom,
|
4: Path.Cap.SquareCustom,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
RO_EMPTY_DICT: Mapping[int, bytes] = MappingProxyType({})
|
||||||
|
|
||||||
|
|
||||||
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
|
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
|
||||||
return numpy.rint(val).astype(numpy.int32)
|
return numpy.rint(val).astype(numpy.int32)
|
||||||
@ -145,7 +148,7 @@ def writefile(
|
|||||||
with tmpfile(path) as base_stream:
|
with tmpfile(path) as base_stream:
|
||||||
streams: tuple[Any, ...] = (base_stream,)
|
streams: tuple[Any, ...] = (base_stream,)
|
||||||
if path.suffix == '.gz':
|
if path.suffix == '.gz':
|
||||||
stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb', compresslevel=6))
|
stream = cast('IO[bytes]', gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb', compresslevel=6))
|
||||||
streams = (stream,) + streams
|
streams = (stream,) + streams
|
||||||
else:
|
else:
|
||||||
stream = base_stream
|
stream = base_stream
|
||||||
@ -399,11 +402,15 @@ def _mrefs_to_grefs(refs: dict[str | None, list[Ref]]) -> list[klamath.library.R
|
|||||||
return grefs
|
return grefs
|
||||||
|
|
||||||
|
|
||||||
def _properties_to_annotations(properties: dict[int, bytes]) -> annotations_t:
|
def _properties_to_annotations(properties: Mapping[int, bytes]) -> annotations_t:
|
||||||
|
if not properties:
|
||||||
|
return None
|
||||||
return {str(k): [v.decode()] for k, v in properties.items()}
|
return {str(k): [v.decode()] for k, v in properties.items()}
|
||||||
|
|
||||||
|
|
||||||
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> dict[int, bytes]:
|
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> Mapping[int, bytes]:
|
||||||
|
if annotations is None:
|
||||||
|
return RO_EMPTY_DICT
|
||||||
cum_len = 0
|
cum_len = 0
|
||||||
props = {}
|
props = {}
|
||||||
for key, vals in annotations.items():
|
for key, vals in annotations.items():
|
||||||
|
426
masque/file/gdsii_arrow.py
Normal file
426
masque/file/gdsii_arrow.py
Normal file
@ -0,0 +1,426 @@
|
|||||||
|
"""
|
||||||
|
GDSII file format readers and writers using the `TODO` library.
|
||||||
|
|
||||||
|
Note that GDSII references follow the same convention as `masque`,
|
||||||
|
with this order of operations:
|
||||||
|
1. Mirroring
|
||||||
|
2. Rotation
|
||||||
|
3. Scaling
|
||||||
|
4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets)
|
||||||
|
|
||||||
|
Scaling, rotation, and mirroring apply to individual instances, not grid
|
||||||
|
vectors or offsets.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
* absolute positioning is not supported
|
||||||
|
* PLEX is not supported
|
||||||
|
* ELFLAGS are not supported
|
||||||
|
* GDS does not support library- or structure-level annotations
|
||||||
|
* GDS creation/modification/access times are set to 1900-01-01 for reproducibility.
|
||||||
|
* Gzip modification time is set to 0 (start of current epoch, usually 1970-01-01)
|
||||||
|
|
||||||
|
TODO writing
|
||||||
|
TODO warn on boxes, nodes
|
||||||
|
"""
|
||||||
|
from typing import IO, cast, Any
|
||||||
|
from collections.abc import Iterable, Mapping, Callable
|
||||||
|
import io
|
||||||
|
import mmap
|
||||||
|
import logging
|
||||||
|
import pathlib
|
||||||
|
import gzip
|
||||||
|
import string
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import numpy
|
||||||
|
from numpy.typing import ArrayLike, NDArray
|
||||||
|
from numpy.testing import assert_equal
|
||||||
|
import pyarrow
|
||||||
|
from pyarrow.cffi import ffi
|
||||||
|
|
||||||
|
from .utils import is_gzipped, tmpfile
|
||||||
|
from .. import Pattern, Ref, PatternError, LibraryError, Label, Shape
|
||||||
|
from ..shapes import Polygon, Path
|
||||||
|
from ..repetition import Grid
|
||||||
|
from ..utils import layer_t, annotations_t
|
||||||
|
from ..library import LazyLibrary, Library, ILibrary, ILibraryView
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
clib = ffi.dlopen('/home/jan/projects/klamath-rs/target/release/libklamath_rs_ext.so')
|
||||||
|
ffi.cdef('void read_path(char* path, struct ArrowArray* array, struct ArrowSchema* schema);')
|
||||||
|
|
||||||
|
|
||||||
|
path_cap_map = {
|
||||||
|
0: Path.Cap.Flush,
|
||||||
|
1: Path.Cap.Circle,
|
||||||
|
2: Path.Cap.Square,
|
||||||
|
4: Path.Cap.SquareCustom,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
|
||||||
|
return numpy.rint(val).astype(numpy.int32)
|
||||||
|
|
||||||
|
|
||||||
|
def _read_to_arrow(
|
||||||
|
filename: str | pathlib.Path,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
) -> pyarrow.Array:
|
||||||
|
path = pathlib.Path(filename)
|
||||||
|
path.resolve()
|
||||||
|
ptr_array = ffi.new('struct ArrowArray[]', 1)
|
||||||
|
ptr_schema = ffi.new('struct ArrowSchema[]', 1)
|
||||||
|
clib.read_path(str(path).encode(), ptr_array, ptr_schema)
|
||||||
|
|
||||||
|
iptr_schema = int(ffi.cast('uintptr_t', ptr_schema))
|
||||||
|
iptr_array = int(ffi.cast('uintptr_t', ptr_array))
|
||||||
|
arrow_arr = pyarrow.Array._import_from_c(iptr_array, iptr_schema)
|
||||||
|
|
||||||
|
return arrow_arr
|
||||||
|
|
||||||
|
|
||||||
|
def readfile(
|
||||||
|
filename: str | pathlib.Path,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
) -> tuple[Library, dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Wrapper for `read()` that takes a filename or path instead of a stream.
|
||||||
|
|
||||||
|
Will automatically decompress gzipped files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: Filename to save to.
|
||||||
|
*args: passed to `read()`
|
||||||
|
**kwargs: passed to `read()`
|
||||||
|
"""
|
||||||
|
arrow_arr = _read_to_arrow(filename)
|
||||||
|
assert len(arrow_arr) == 1
|
||||||
|
|
||||||
|
results = read_arrow(arrow_arr[0])
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def read_arrow(
|
||||||
|
libarr: pyarrow.Array,
|
||||||
|
raw_mode: bool = True,
|
||||||
|
) -> tuple[Library, dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
# TODO check GDSII file for cycles!
|
||||||
|
Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are
|
||||||
|
translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs
|
||||||
|
are translated into Ref objects.
|
||||||
|
|
||||||
|
Additional library info is returned in a dict, containing:
|
||||||
|
'name': name of the library
|
||||||
|
'meters_per_unit': number of meters per database unit (all values are in database units)
|
||||||
|
'logical_units_per_unit': number of "logical" units displayed by layout tools (typically microns)
|
||||||
|
per database unit
|
||||||
|
|
||||||
|
Args:
|
||||||
|
stream: Stream to read from.
|
||||||
|
raw_mode: If True, constructs shapes in raw mode, bypassing most data validation, Default True.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- dict of pattern_name:Patterns generated from GDSII structures
|
||||||
|
- dict of GDSII library info
|
||||||
|
"""
|
||||||
|
library_info = _read_header(libarr)
|
||||||
|
|
||||||
|
layer_names_np = libarr['layers'].values.to_numpy().view('i2').reshape((-1, 2))
|
||||||
|
layer_tups = [tuple(pair) for pair in layer_names_np]
|
||||||
|
|
||||||
|
cell_ids = libarr['cells'].values.field('id').to_numpy()
|
||||||
|
cell_names = libarr['cell_names'].as_py()
|
||||||
|
|
||||||
|
def get_geom(libarr: pyarrow.Array, geom_type: str) -> dict[str, Any]:
|
||||||
|
el = libarr['cells'].values.field(geom_type)
|
||||||
|
elem = dict(
|
||||||
|
offsets = el.offsets.to_numpy(),
|
||||||
|
xy_arr = el.values.field('xy').values.to_numpy().reshape((-1, 2)),
|
||||||
|
xy_off = el.values.field('xy').offsets.to_numpy() // 2,
|
||||||
|
layer_inds = el.values.field('layer').to_numpy(),
|
||||||
|
prop_off = el.values.field('properties').offsets.to_numpy(),
|
||||||
|
prop_key = el.values.field('properties').values.field('key').to_numpy(),
|
||||||
|
prop_val = el.values.field('properties').values.field('value').to_pylist(),
|
||||||
|
)
|
||||||
|
return elem
|
||||||
|
|
||||||
|
rf = libarr['cells'].values.field('refs')
|
||||||
|
refs = dict(
|
||||||
|
offsets = rf.offsets.to_numpy(),
|
||||||
|
targets = rf.values.field('target').to_numpy(),
|
||||||
|
xy = rf.values.field('xy').to_numpy().view('i4').reshape((-1, 2)),
|
||||||
|
invert_y = rf.values.field('invert_y').fill_null(False).to_numpy(zero_copy_only=False),
|
||||||
|
angle_rad = numpy.rad2deg(rf.values.field('angle_deg').fill_null(0).to_numpy()),
|
||||||
|
scale = rf.values.field('mag').fill_null(1).to_numpy(),
|
||||||
|
rep_valid = rf.values.field('repetition').is_valid().to_numpy(zero_copy_only=False),
|
||||||
|
rep_xy0 = rf.values.field('repetition').field('xy0').fill_null(0).to_numpy().view('i4').reshape((-1, 2)),
|
||||||
|
rep_xy1 = rf.values.field('repetition').field('xy1').fill_null(0).to_numpy().view('i4').reshape((-1, 2)),
|
||||||
|
rep_counts = rf.values.field('repetition').field('counts').fill_null(0).to_numpy().view('i2').reshape((-1, 2)),
|
||||||
|
prop_off = rf.values.field('properties').offsets.to_numpy(),
|
||||||
|
prop_key = rf.values.field('properties').values.field('key').to_numpy(),
|
||||||
|
prop_val = rf.values.field('properties').values.field('value').to_pylist(),
|
||||||
|
)
|
||||||
|
|
||||||
|
txt = libarr['cells'].values.field('texts')
|
||||||
|
texts = dict(
|
||||||
|
offsets = txt.offsets.to_numpy(),
|
||||||
|
layer_inds = txt.values.field('layer').to_numpy(),
|
||||||
|
xy = txt.values.field('xy').to_numpy().view('i4').reshape((-1, 2)),
|
||||||
|
string = txt.values.field('string').to_pylist(),
|
||||||
|
prop_off = txt.values.field('properties').offsets.to_numpy(),
|
||||||
|
prop_key = txt.values.field('properties').values.field('key').to_numpy(),
|
||||||
|
prop_val = txt.values.field('properties').values.field('value').to_pylist(),
|
||||||
|
)
|
||||||
|
|
||||||
|
elements = dict(
|
||||||
|
boundaries = get_geom(libarr, 'boundaries'),
|
||||||
|
paths = get_geom(libarr, 'paths'),
|
||||||
|
boxes = get_geom(libarr, 'boxes'),
|
||||||
|
nodes = get_geom(libarr, 'nodes'),
|
||||||
|
texts = texts,
|
||||||
|
refs = refs,
|
||||||
|
)
|
||||||
|
|
||||||
|
paths = libarr['cells'].values.field('paths')
|
||||||
|
elements['paths'].update(dict(
|
||||||
|
width = paths.values.field('width').to_numpy(),
|
||||||
|
path_type = paths.values.field('path_type').to_numpy(),
|
||||||
|
extensions = numpy.stack((
|
||||||
|
paths.values.field('extension_start').to_numpy(zero_copy_only=False),
|
||||||
|
paths.values.field('extension_end').to_numpy(zero_copy_only=False),
|
||||||
|
), axis=-1),
|
||||||
|
))
|
||||||
|
|
||||||
|
global_args = dict(
|
||||||
|
cell_names = cell_names,
|
||||||
|
layer_tups = layer_tups,
|
||||||
|
raw_mode = raw_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
mlib = Library()
|
||||||
|
for cc in range(len(libarr['cells'])):
|
||||||
|
name = cell_names[cell_ids[cc]]
|
||||||
|
pat = Pattern()
|
||||||
|
_boundaries_to_polygons(pat, global_args, elements['boundaries'], cc)
|
||||||
|
_gpaths_to_mpaths(pat, global_args, elements['paths'], cc)
|
||||||
|
_grefs_to_mrefs(pat, global_args, elements['refs'], cc)
|
||||||
|
_texts_to_labels(pat, global_args, elements['texts'], cc)
|
||||||
|
mlib[name] = pat
|
||||||
|
|
||||||
|
return mlib, library_info
|
||||||
|
|
||||||
|
|
||||||
|
def _read_header(libarr: pyarrow.Array) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Read the file header and create the library_info dict.
|
||||||
|
"""
|
||||||
|
library_info = dict(
|
||||||
|
name = libarr['lib_name'],
|
||||||
|
meters_per_unit = libarr['meters_per_db_unit'],
|
||||||
|
logical_units_per_unit = libarr['user_units_per_db_unit'],
|
||||||
|
)
|
||||||
|
return library_info
|
||||||
|
|
||||||
|
|
||||||
|
def _grefs_to_mrefs(
|
||||||
|
pat: Pattern,
|
||||||
|
global_args: dict[str, Any],
|
||||||
|
elem: dict[str, Any],
|
||||||
|
cc: int,
|
||||||
|
) -> None:
|
||||||
|
cell_names = global_args['cell_names']
|
||||||
|
elem_off = elem['offsets'] # which elements belong to each cell
|
||||||
|
xy = elem['xy']
|
||||||
|
prop_key = elem['prop_key']
|
||||||
|
prop_val = elem['prop_val']
|
||||||
|
targets = elem['targets']
|
||||||
|
|
||||||
|
rep_valid = elem['rep_valid']
|
||||||
|
|
||||||
|
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||||
|
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||||
|
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||||
|
elem_invert_y = elem['invert_y'][elem_slc]
|
||||||
|
elem_angle_rad = elem['angle_rad'][elem_slc]
|
||||||
|
elem_scale = elem['scale'][elem_slc]
|
||||||
|
elem_rep_xy0 = elem['rep_xy0'][elem_slc]
|
||||||
|
elem_rep_xy1 = elem['rep_xy1'][elem_slc]
|
||||||
|
elem_rep_counts = elem['rep_counts'][elem_slc]
|
||||||
|
|
||||||
|
for ee in range(elem_count):
|
||||||
|
target = cell_names[targets[ee]]
|
||||||
|
offset = xy[ee]
|
||||||
|
mirr = elem_invert_y[ee]
|
||||||
|
rot = elem_angle_rad[ee]
|
||||||
|
mag = elem_scale[ee]
|
||||||
|
|
||||||
|
rep: None | Grid = None
|
||||||
|
if rep_valid[ee]:
|
||||||
|
a_vector = elem_rep_xy0[ee]
|
||||||
|
b_vector = elem_rep_xy1[ee]
|
||||||
|
a_count, b_count = elem_rep_counts[ee]
|
||||||
|
rep = Grid(a_vector=a_vector, b_vector=b_vector, a_count=a_count, b_count=b_count)
|
||||||
|
|
||||||
|
annotations: None | dict[int, str] = None
|
||||||
|
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||||
|
if prop_ii < prop_ff:
|
||||||
|
annotations = {prop_key[off]: prop_val[off] for off in range(prop_ii, prop_ff)}
|
||||||
|
|
||||||
|
ref = Ref(offset=offset, mirrored=mirr, rotation=rot, scale=mag, repetition=rep, annotations=annotations)
|
||||||
|
pat.refs[target].append(ref)
|
||||||
|
|
||||||
|
|
||||||
|
def _texts_to_labels(
|
||||||
|
pat: Pattern,
|
||||||
|
global_args: dict[str, Any],
|
||||||
|
elem: dict[str, Any],
|
||||||
|
cc: int,
|
||||||
|
) -> None:
|
||||||
|
elem_off = elem['offsets'] # which elements belong to each cell
|
||||||
|
xy = elem['xy']
|
||||||
|
layer_tups = global_args['layer_tups']
|
||||||
|
layer_inds = elem['layer_inds']
|
||||||
|
prop_key = elem['prop_key']
|
||||||
|
prop_val = elem['prop_val']
|
||||||
|
|
||||||
|
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||||
|
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||||
|
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||||
|
elem_layer_inds = layer_inds[elem_slc]
|
||||||
|
elem_strings = elem['string'][elem_slc]
|
||||||
|
|
||||||
|
for ee in range(elem_count):
|
||||||
|
layer = layer_tups[elem_layer_inds[ee]]
|
||||||
|
offset = xy[ee]
|
||||||
|
string = elem_strings[ee]
|
||||||
|
|
||||||
|
annotations: None | dict[int, str] = None
|
||||||
|
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||||
|
if prop_ii < prop_ff:
|
||||||
|
annotations = {prop_key[off]: prop_val[off] for off in range(prop_ii, prop_ff)}
|
||||||
|
|
||||||
|
mlabel = Label(string=string, offset=offset, annotations=annotations)
|
||||||
|
pat.labels[layer].append(mlabel)
|
||||||
|
|
||||||
|
|
||||||
|
def _gpaths_to_mpaths(
|
||||||
|
pat: Pattern,
|
||||||
|
global_args: dict[str, Any],
|
||||||
|
elem: dict[str, Any],
|
||||||
|
cc: int,
|
||||||
|
) -> None:
|
||||||
|
elem_off = elem['offsets'] # which elements belong to each cell
|
||||||
|
xy_val = elem['xy_arr']
|
||||||
|
layer_tups = global_args['layer_tups']
|
||||||
|
layer_inds = elem['layer_inds']
|
||||||
|
prop_key = elem['prop_key']
|
||||||
|
prop_val = elem['prop_val']
|
||||||
|
|
||||||
|
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||||
|
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||||
|
xy_offs = elem['xy_off'][elem_slc] # which xy coords belong to each element
|
||||||
|
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||||
|
elem_layer_inds = layer_inds[elem_slc]
|
||||||
|
elem_widths = elem['width'][elem_slc]
|
||||||
|
elem_path_types = elem['path_type'][elem_slc]
|
||||||
|
elem_extensions = elem['extensions'][elem_slc]
|
||||||
|
|
||||||
|
zeros = numpy.zeros((elem_count, 2))
|
||||||
|
raw_mode = global_args['raw_mode']
|
||||||
|
for ee in range(elem_count):
|
||||||
|
layer = layer_tups[elem_layer_inds[ee]]
|
||||||
|
vertices = xy_val[xy_offs[ee]:xy_offs[ee + 1]]
|
||||||
|
width = elem_widths[ee]
|
||||||
|
cap_int = elem_path_types[ee]
|
||||||
|
cap = path_cap_map[cap_int]
|
||||||
|
if cap_int == 4:
|
||||||
|
cap_extensions = elem_extensions[ee]
|
||||||
|
else:
|
||||||
|
cap_extensions = None
|
||||||
|
|
||||||
|
annotations: None | dict[int, str] = None
|
||||||
|
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||||
|
if prop_ii < prop_ff:
|
||||||
|
annotations = {prop_key[off]: prop_val[off] for off in range(prop_ii, prop_ff)}
|
||||||
|
|
||||||
|
path = Path(vertices=vertices, offset=zeros[ee], annotations=annotations, raw=raw_mode,
|
||||||
|
width=width, cap=cap,cap_extensions=cap_extensions)
|
||||||
|
pat.shapes[layer].append(path)
|
||||||
|
|
||||||
|
|
||||||
|
def _boundaries_to_polygons(
|
||||||
|
pat: Pattern,
|
||||||
|
global_args: dict[str, Any],
|
||||||
|
elem: dict[str, Any],
|
||||||
|
cc: int,
|
||||||
|
) -> None:
|
||||||
|
elem_off = elem['offsets'] # which elements belong to each cell
|
||||||
|
xy_val = elem['xy_arr']
|
||||||
|
layer_inds = elem['layer_inds']
|
||||||
|
layer_tups = global_args['layer_tups']
|
||||||
|
prop_key = elem['prop_key']
|
||||||
|
prop_val = elem['prop_val']
|
||||||
|
|
||||||
|
elem_count = elem_off[cc + 1] - elem_off[cc]
|
||||||
|
elem_slc = slice(elem_off[cc], elem_off[cc] + elem_count + 1) # +1 to capture ending location for last elem
|
||||||
|
xy_offs = elem['xy_off'][elem_slc] # which xy coords belong to each element
|
||||||
|
prop_offs = elem['prop_off'][elem_slc] # which props belong to each element
|
||||||
|
elem_layer_inds = layer_inds[elem_slc]
|
||||||
|
|
||||||
|
zeros = numpy.zeros((elem_count, 2))
|
||||||
|
raw_mode = global_args['raw_mode']
|
||||||
|
for ee in range(elem_count):
|
||||||
|
layer = layer_tups[elem_layer_inds[ee]]
|
||||||
|
vertices = xy_val[xy_offs[ee]:xy_offs[ee + 1] - 1] # -1 to drop closing point
|
||||||
|
|
||||||
|
annotations: None | dict[int, str] = None
|
||||||
|
prop_ii, prop_ff = prop_offs[ee], prop_offs[ee + 1]
|
||||||
|
if prop_ii < prop_ff:
|
||||||
|
annotations = {prop_key[off]: prop_val[off] for off in range(prop_ii, prop_ff)}
|
||||||
|
|
||||||
|
poly = Polygon(vertices=vertices, offset=zeros[ee], annotations=annotations, raw=raw_mode)
|
||||||
|
pat.shapes[layer].append(poly)
|
||||||
|
|
||||||
|
|
||||||
|
#def _properties_to_annotations(properties: pyarrow.Array) -> annotations_t:
|
||||||
|
# return {prop['key'].as_py(): prop['value'].as_py() for prop in properties}
|
||||||
|
|
||||||
|
|
||||||
|
def check_valid_names(
|
||||||
|
names: Iterable[str],
|
||||||
|
max_length: int = 32,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Check all provided names to see if they're valid GDSII cell names.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
names: Collection of names to check
|
||||||
|
max_length: Max allowed length
|
||||||
|
|
||||||
|
"""
|
||||||
|
allowed_chars = set(string.ascii_letters + string.digits + '_?$')
|
||||||
|
|
||||||
|
bad_chars = [
|
||||||
|
name for name in names
|
||||||
|
if not set(name).issubset(allowed_chars)
|
||||||
|
]
|
||||||
|
|
||||||
|
bad_lengths = [
|
||||||
|
name for name in names
|
||||||
|
if len(name) > max_length
|
||||||
|
]
|
||||||
|
|
||||||
|
if bad_chars:
|
||||||
|
logger.error('Names contain invalid characters:\n' + pformat(bad_chars))
|
||||||
|
|
||||||
|
if bad_lengths:
|
||||||
|
logger.error(f'Names too long (>{max_length}:\n' + pformat(bad_chars))
|
||||||
|
|
||||||
|
if bad_chars or bad_lengths:
|
||||||
|
raise LibraryError('Library contains invalid names, see log above')
|
@ -190,7 +190,7 @@ def writefile(
|
|||||||
with tmpfile(path) as base_stream:
|
with tmpfile(path) as base_stream:
|
||||||
streams: tuple[Any, ...] = (base_stream,)
|
streams: tuple[Any, ...] = (base_stream,)
|
||||||
if path.suffix == '.gz':
|
if path.suffix == '.gz':
|
||||||
stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
stream = cast('IO[bytes]', gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
||||||
streams += (stream,)
|
streams += (stream,)
|
||||||
else:
|
else:
|
||||||
stream = base_stream
|
stream = base_stream
|
||||||
@ -551,7 +551,7 @@ def _shapes_to_elements(
|
|||||||
circle = fatrec.Circle(
|
circle = fatrec.Circle(
|
||||||
layer=layer,
|
layer=layer,
|
||||||
datatype=datatype,
|
datatype=datatype,
|
||||||
radius=cast(int, radius),
|
radius=cast('int', radius),
|
||||||
x=offset[0],
|
x=offset[0],
|
||||||
y=offset[1],
|
y=offset[1],
|
||||||
properties=properties,
|
properties=properties,
|
||||||
@ -568,8 +568,8 @@ def _shapes_to_elements(
|
|||||||
path = fatrec.Path(
|
path = fatrec.Path(
|
||||||
layer=layer,
|
layer=layer,
|
||||||
datatype=datatype,
|
datatype=datatype,
|
||||||
point_list=cast(Sequence[Sequence[int]], deltas),
|
point_list=cast('Sequence[Sequence[int]]', deltas),
|
||||||
half_width=cast(int, half_width),
|
half_width=cast('int', half_width),
|
||||||
x=xy[0],
|
x=xy[0],
|
||||||
y=xy[1],
|
y=xy[1],
|
||||||
extension_start=extension_start, # TODO implement multiple cap types?
|
extension_start=extension_start, # TODO implement multiple cap types?
|
||||||
@ -587,7 +587,7 @@ def _shapes_to_elements(
|
|||||||
datatype=datatype,
|
datatype=datatype,
|
||||||
x=xy[0],
|
x=xy[0],
|
||||||
y=xy[1],
|
y=xy[1],
|
||||||
point_list=cast(list[list[int]], points),
|
point_list=cast('list[list[int]]', points),
|
||||||
properties=properties,
|
properties=properties,
|
||||||
repetition=repetition,
|
repetition=repetition,
|
||||||
))
|
))
|
||||||
@ -651,10 +651,10 @@ def repetition_masq2fata(
|
|||||||
a_count = rint_cast(rep.a_count)
|
a_count = rint_cast(rep.a_count)
|
||||||
b_count = rint_cast(rep.b_count) if rep.b_count is not None else None
|
b_count = rint_cast(rep.b_count) if rep.b_count is not None else None
|
||||||
frep = fatamorgana.GridRepetition(
|
frep = fatamorgana.GridRepetition(
|
||||||
a_vector=cast(list[int], a_vector),
|
a_vector=cast('list[int]', a_vector),
|
||||||
b_vector=cast(list[int] | None, b_vector),
|
b_vector=cast('list[int] | None', b_vector),
|
||||||
a_count=cast(int, a_count),
|
a_count=cast('int', a_count),
|
||||||
b_count=cast(int | None, b_count),
|
b_count=cast('int | None', b_count),
|
||||||
)
|
)
|
||||||
offset = (0, 0)
|
offset = (0, 0)
|
||||||
elif isinstance(rep, Arbitrary):
|
elif isinstance(rep, Arbitrary):
|
||||||
@ -671,6 +671,8 @@ def repetition_masq2fata(
|
|||||||
|
|
||||||
def annotations_to_properties(annotations: annotations_t) -> list[fatrec.Property]:
|
def annotations_to_properties(annotations: annotations_t) -> list[fatrec.Property]:
|
||||||
#TODO determine is_standard based on key?
|
#TODO determine is_standard based on key?
|
||||||
|
if annotations is None:
|
||||||
|
return []
|
||||||
properties = []
|
properties = []
|
||||||
for key, values in annotations.items():
|
for key, values in annotations.items():
|
||||||
vals = [AString(v) if isinstance(v, str) else v
|
vals = [AString(v) if isinstance(v, str) else v
|
||||||
|
@ -211,7 +211,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
if isinstance(tops, str):
|
if isinstance(tops, str):
|
||||||
tops = (tops,)
|
tops = (tops,)
|
||||||
|
|
||||||
keep = cast(set[str], self.referenced_patterns(tops) - {None})
|
keep = cast('set[str]', self.referenced_patterns(tops) - {None})
|
||||||
keep |= set(tops)
|
keep |= set(tops)
|
||||||
|
|
||||||
filtered = {kk: vv for kk, vv in self.items() if kk in keep}
|
filtered = {kk: vv for kk, vv in self.items() if kk in keep}
|
||||||
@ -314,7 +314,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
flatten_single(top)
|
flatten_single(top)
|
||||||
|
|
||||||
assert None not in flattened.values()
|
assert None not in flattened.values()
|
||||||
return cast(dict[str, 'Pattern'], flattened)
|
return cast('dict[str, Pattern]', flattened)
|
||||||
|
|
||||||
def get_name(
|
def get_name(
|
||||||
self,
|
self,
|
||||||
@ -504,7 +504,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
raise LibraryError('visit_* functions returned a new `Pattern` object'
|
raise LibraryError('visit_* functions returned a new `Pattern` object'
|
||||||
' but no top-level name was provided in `hierarchy`')
|
' but no top-level name was provided in `hierarchy`')
|
||||||
|
|
||||||
cast(ILibrary, self)[name] = pattern
|
cast('ILibrary', self)[name] = pattern
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@ -542,7 +542,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
Return:
|
Return:
|
||||||
Topologically sorted list of pattern names.
|
Topologically sorted list of pattern names.
|
||||||
"""
|
"""
|
||||||
return cast(list[str], list(TopologicalSorter(self.child_graph()).static_order()))
|
return cast('list[str]', list(TopologicalSorter(self.child_graph()).static_order()))
|
||||||
|
|
||||||
def find_refs_local(
|
def find_refs_local(
|
||||||
self,
|
self,
|
||||||
@ -827,7 +827,7 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
for old_name in temp:
|
for old_name in temp:
|
||||||
new_name = rename_map.get(old_name, old_name)
|
new_name = rename_map.get(old_name, old_name)
|
||||||
pat = self[new_name]
|
pat = self[new_name]
|
||||||
pat.refs = map_targets(pat.refs, lambda tt: cast(dict[str | None, str | None], rename_map).get(tt, tt))
|
pat.refs = map_targets(pat.refs, lambda tt: cast('dict[str | None, str | None]', rename_map).get(tt, tt))
|
||||||
|
|
||||||
return rename_map
|
return rename_map
|
||||||
|
|
||||||
@ -944,8 +944,8 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
|
|
||||||
shape_table: dict[tuple, list] = defaultdict(list)
|
shape_table: dict[tuple, list] = defaultdict(list)
|
||||||
for layer, sseq in pat.shapes.items():
|
for layer, sseq in pat.shapes.items():
|
||||||
for i, shape in enumerate(sseq):
|
for ii, shape in enumerate(sseq):
|
||||||
if any(isinstance(shape, t) for t in exclude_types):
|
if any(isinstance(shape, tt) for tt in exclude_types):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
base_label, values, _func = shape.normalized_form(norm_value)
|
base_label, values, _func = shape.normalized_form(norm_value)
|
||||||
@ -954,16 +954,16 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
if label not in shape_pats:
|
if label not in shape_pats:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
shape_table[label].append((i, values))
|
shape_table[label].append((ii, values))
|
||||||
|
|
||||||
# For repeated shapes, create a `Pattern` holding a normalized shape object,
|
# For repeated shapes, create a `Pattern` holding a normalized shape object,
|
||||||
# and add `pat.refs` entries for each occurrence in pat. Also, note down that
|
# and add `pat.refs` entries for each occurrence in pat. Also, note down that
|
||||||
# we should delete the `pat.shapes` entries for which we made `Ref`s.
|
# we should delete the `pat.shapes` entries for which we made `Ref`s.
|
||||||
shapes_to_remove = []
|
shapes_to_remove = []
|
||||||
for label in shape_table:
|
for label, shape_entries in shape_table.items():
|
||||||
layer = label[-1]
|
layer = label[-1]
|
||||||
target = label2name(label)
|
target = label2name(label)
|
||||||
for ii, values in shape_table[label]:
|
for ii, values in shape_entries:
|
||||||
offset, scale, rotation, mirror_x = values
|
offset, scale, rotation, mirror_x = values
|
||||||
pat.ref(target=target, offset=offset, scale=scale,
|
pat.ref(target=target, offset=offset, scale=scale,
|
||||||
rotation=rotation, mirrored=(mirror_x, False))
|
rotation=rotation, mirrored=(mirror_x, False))
|
||||||
@ -1047,7 +1047,7 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
|||||||
if isinstance(tops, str):
|
if isinstance(tops, str):
|
||||||
tops = (tops,)
|
tops = (tops,)
|
||||||
|
|
||||||
keep = cast(set[str], self.referenced_patterns(tops) - {None})
|
keep = cast('set[str]', self.referenced_patterns(tops) - {None})
|
||||||
keep |= set(tops)
|
keep |= set(tops)
|
||||||
|
|
||||||
new = type(self)()
|
new = type(self)()
|
||||||
|
@ -332,7 +332,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
))
|
))
|
||||||
|
|
||||||
self.ports = dict(sorted(self.ports.items()))
|
self.ports = dict(sorted(self.ports.items()))
|
||||||
self.annotations = dict(sorted(self.annotations.items()))
|
self.annotations = dict(sorted(self.annotations.items())) if self.annotations is not None else None
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@ -354,6 +354,9 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
for layer, lseq in other_pattern.labels.items():
|
for layer, lseq in other_pattern.labels.items():
|
||||||
self.labels[layer].extend(lseq)
|
self.labels[layer].extend(lseq)
|
||||||
|
|
||||||
|
if other_pattern.annotations is not None:
|
||||||
|
if self.annotations is None:
|
||||||
|
self.annotations = {}
|
||||||
annotation_conflicts = set(self.annotations.keys()) & set(other_pattern.annotations.keys())
|
annotation_conflicts = set(self.annotations.keys()) & set(other_pattern.annotations.keys())
|
||||||
if annotation_conflicts:
|
if annotation_conflicts:
|
||||||
raise PatternError(f'Annotation keys overlap: {annotation_conflicts}')
|
raise PatternError(f'Annotation keys overlap: {annotation_conflicts}')
|
||||||
@ -415,7 +418,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
elif default_keep:
|
elif default_keep:
|
||||||
pat.refs = copy.copy(self.refs)
|
pat.refs = copy.copy(self.refs)
|
||||||
|
|
||||||
if annotations is not None:
|
if annotations is not None and self.annotations is not None:
|
||||||
pat.annotations = {k: v for k, v in self.annotations.items() if annotations(k, v)}
|
pat.annotations = {k: v for k, v in self.annotations.items() if annotations(k, v)}
|
||||||
elif default_keep:
|
elif default_keep:
|
||||||
pat.annotations = copy.copy(self.annotations)
|
pat.annotations = copy.copy(self.annotations)
|
||||||
@ -491,7 +494,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
"""
|
"""
|
||||||
pat = self.deepcopy().polygonize().flatten(library=library)
|
pat = self.deepcopy().polygonize().flatten(library=library)
|
||||||
polys = [
|
polys = [
|
||||||
cast(Polygon, shape).vertices + cast(Polygon, shape).offset
|
cast('Polygon', shape).vertices + cast('Polygon', shape).offset
|
||||||
for shape in chain_elements(pat.shapes)
|
for shape in chain_elements(pat.shapes)
|
||||||
]
|
]
|
||||||
return polys
|
return polys
|
||||||
@ -533,7 +536,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
n_elems = sum(1 for _ in chain_elements(self.shapes, self.labels))
|
n_elems = sum(1 for _ in chain_elements(self.shapes, self.labels))
|
||||||
ebounds = numpy.full((n_elems, 2, 2), nan)
|
ebounds = numpy.full((n_elems, 2, 2), nan)
|
||||||
for ee, entry in enumerate(chain_elements(self.shapes, self.labels)):
|
for ee, entry in enumerate(chain_elements(self.shapes, self.labels)):
|
||||||
maybe_ebounds = cast(Bounded, entry).get_bounds()
|
maybe_ebounds = cast('Bounded', entry).get_bounds()
|
||||||
if maybe_ebounds is not None:
|
if maybe_ebounds is not None:
|
||||||
ebounds[ee] = maybe_ebounds
|
ebounds[ee] = maybe_ebounds
|
||||||
mask = ~numpy.isnan(ebounds[:, 0, 0])
|
mask = ~numpy.isnan(ebounds[:, 0, 0])
|
||||||
@ -631,7 +634,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
for entry in chain(chain_elements(self.shapes, self.labels, self.refs), self.ports.values()):
|
for entry in chain(chain_elements(self.shapes, self.labels, self.refs), self.ports.values()):
|
||||||
cast(Positionable, entry).translate(offset)
|
cast('Positionable', entry).translate(offset)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def scale_elements(self, c: float) -> Self:
|
def scale_elements(self, c: float) -> Self:
|
||||||
@ -645,33 +648,37 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
for entry in chain_elements(self.shapes, self.refs):
|
for entry in chain_elements(self.shapes, self.refs):
|
||||||
cast(Scalable, entry).scale_by(c)
|
cast('Scalable', entry).scale_by(c)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def scale_by(self, c: float) -> Self:
|
def scale_by(self, c: float, scale_refs: bool = True) -> Self:
|
||||||
"""
|
"""
|
||||||
Scale this Pattern by the given value
|
Scale this Pattern by the given value
|
||||||
(all shapes and refs and their offsets are scaled,
|
All shapes and (optionally) refs and their offsets are scaled,
|
||||||
as are all label and port offsets)
|
as are all label and port offsets.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
c: factor to scale by
|
c: factor to scale by
|
||||||
|
scale_refs: Whether to scale refs. Ref offsets are always scaled,
|
||||||
|
but it may be desirable to not scale the ref itself (e.g. if
|
||||||
|
the target cell was also scaled).
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
for entry in chain_elements(self.shapes, self.refs):
|
for entry in chain_elements(self.shapes, self.refs):
|
||||||
cast(Positionable, entry).offset *= c
|
cast('Positionable', entry).offset *= c
|
||||||
cast(Scalable, entry).scale_by(c)
|
if scale_refs or not isinstance(entry, Ref):
|
||||||
|
cast('Scalable', entry).scale_by(c)
|
||||||
|
|
||||||
rep = cast(Repeatable, entry).repetition
|
rep = cast('Repeatable', entry).repetition
|
||||||
if rep:
|
if rep:
|
||||||
rep.scale_by(c)
|
rep.scale_by(c)
|
||||||
|
|
||||||
for label in chain_elements(self.labels):
|
for label in chain_elements(self.labels):
|
||||||
cast(Positionable, label).offset *= c
|
cast('Positionable', label).offset *= c
|
||||||
|
|
||||||
rep = cast(Repeatable, label).repetition
|
rep = cast('Repeatable', label).repetition
|
||||||
if rep:
|
if rep:
|
||||||
rep.scale_by(c)
|
rep.scale_by(c)
|
||||||
|
|
||||||
@ -708,8 +715,8 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
for entry in chain(chain_elements(self.shapes, self.refs, self.labels), self.ports.values()):
|
for entry in chain(chain_elements(self.shapes, self.refs, self.labels), self.ports.values()):
|
||||||
old_offset = cast(Positionable, entry).offset
|
old_offset = cast('Positionable', entry).offset
|
||||||
cast(Positionable, entry).offset = numpy.dot(rotation_matrix_2d(rotation), old_offset)
|
cast('Positionable', entry).offset = numpy.dot(rotation_matrix_2d(rotation), old_offset)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def rotate_elements(self, rotation: float) -> Self:
|
def rotate_elements(self, rotation: float) -> Self:
|
||||||
@ -723,7 +730,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
for entry in chain(chain_elements(self.shapes, self.refs), self.ports.values()):
|
for entry in chain(chain_elements(self.shapes, self.refs), self.ports.values()):
|
||||||
cast(Rotatable, entry).rotate(rotation)
|
cast('Rotatable', entry).rotate(rotation)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def mirror_element_centers(self, across_axis: int = 0) -> Self:
|
def mirror_element_centers(self, across_axis: int = 0) -> Self:
|
||||||
@ -738,7 +745,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
for entry in chain(chain_elements(self.shapes, self.refs, self.labels), self.ports.values()):
|
for entry in chain(chain_elements(self.shapes, self.refs, self.labels), self.ports.values()):
|
||||||
cast(Positionable, entry).offset[across_axis - 1] *= -1
|
cast('Positionable', entry).offset[across_axis - 1] *= -1
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def mirror_elements(self, across_axis: int = 0) -> Self:
|
def mirror_elements(self, across_axis: int = 0) -> Self:
|
||||||
@ -754,7 +761,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
|||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
for entry in chain(chain_elements(self.shapes, self.refs), self.ports.values()):
|
for entry in chain(chain_elements(self.shapes, self.refs), self.ports.values()):
|
||||||
cast(Mirrorable, entry).mirror(across_axis)
|
cast('Mirrorable', entry).mirror(across_axis)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def mirror(self, across_axis: int = 0) -> Self:
|
def mirror(self, across_axis: int = 0) -> Self:
|
||||||
|
@ -294,7 +294,7 @@ class Grid(Repetition):
|
|||||||
def __le__(self, other: Repetition) -> bool:
|
def __le__(self, other: Repetition) -> bool:
|
||||||
if type(self) is not type(other):
|
if type(self) is not type(other):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
other = cast(Grid, other)
|
other = cast('Grid', other)
|
||||||
if self.a_count != other.a_count:
|
if self.a_count != other.a_count:
|
||||||
return self.a_count < other.a_count
|
return self.a_count < other.a_count
|
||||||
if self.b_count != other.b_count:
|
if self.b_count != other.b_count:
|
||||||
@ -357,7 +357,7 @@ class Arbitrary(Repetition):
|
|||||||
def __le__(self, other: Repetition) -> bool:
|
def __le__(self, other: Repetition) -> bool:
|
||||||
if type(self) is not type(other):
|
if type(self) is not type(other):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
other = cast(Arbitrary, other)
|
other = cast('Arbitrary', other)
|
||||||
if self.displacements.size != other.displacements.size:
|
if self.displacements.size != other.displacements.size:
|
||||||
return self.displacements.size < other.displacements.size
|
return self.displacements.size < other.displacements.size
|
||||||
|
|
||||||
|
@ -157,7 +157,7 @@ class Arc(Shape):
|
|||||||
offset: ArrayLike = (0.0, 0.0),
|
offset: ArrayLike = (0.0, 0.0),
|
||||||
rotation: float = 0,
|
rotation: float = 0,
|
||||||
repetition: Repetition | None = None,
|
repetition: Repetition | None = None,
|
||||||
annotations: annotations_t | None = None,
|
annotations: annotations_t = None,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if raw:
|
if raw:
|
||||||
@ -170,7 +170,7 @@ class Arc(Shape):
|
|||||||
self._offset = offset
|
self._offset = offset
|
||||||
self._rotation = rotation
|
self._rotation = rotation
|
||||||
self._repetition = repetition
|
self._repetition = repetition
|
||||||
self._annotations = annotations if annotations is not None else {}
|
self._annotations = annotations
|
||||||
else:
|
else:
|
||||||
self.radii = radii
|
self.radii = radii
|
||||||
self.angles = angles
|
self.angles = angles
|
||||||
@ -178,7 +178,7 @@ class Arc(Shape):
|
|||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.rotation = rotation
|
self.rotation = rotation
|
||||||
self.repetition = repetition
|
self.repetition = repetition
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations
|
||||||
|
|
||||||
def __deepcopy__(self, memo: dict | None = None) -> 'Arc':
|
def __deepcopy__(self, memo: dict | None = None) -> 'Arc':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
@ -206,7 +206,7 @@ class Arc(Shape):
|
|||||||
if repr(type(self)) != repr(type(other)):
|
if repr(type(self)) != repr(type(other)):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
return id(type(self)) < id(type(other))
|
return id(type(self)) < id(type(other))
|
||||||
other = cast(Arc, other)
|
other = cast('Arc', other)
|
||||||
if self.width != other.width:
|
if self.width != other.width:
|
||||||
return self.width < other.width
|
return self.width < other.width
|
||||||
if not numpy.array_equal(self.radii, other.radii):
|
if not numpy.array_equal(self.radii, other.radii):
|
||||||
@ -233,7 +233,7 @@ class Arc(Shape):
|
|||||||
r0, r1 = self.radii
|
r0, r1 = self.radii
|
||||||
|
|
||||||
# Convert from polar angle to ellipse parameter (for [rx*cos(t), ry*sin(t)] representation)
|
# Convert from polar angle to ellipse parameter (for [rx*cos(t), ry*sin(t)] representation)
|
||||||
a_ranges = cast(_array2x2_t, self._angles_to_parameters())
|
a_ranges = cast('_array2x2_t', self._angles_to_parameters())
|
||||||
|
|
||||||
# Approximate perimeter via numerical integration
|
# Approximate perimeter via numerical integration
|
||||||
|
|
||||||
@ -321,7 +321,7 @@ class Arc(Shape):
|
|||||||
|
|
||||||
If the extrema are innaccessible due to arc constraints, check the arc endpoints instead.
|
If the extrema are innaccessible due to arc constraints, check the arc endpoints instead.
|
||||||
"""
|
"""
|
||||||
a_ranges = cast(_array2x2_t, self._angles_to_parameters())
|
a_ranges = cast('_array2x2_t', self._angles_to_parameters())
|
||||||
|
|
||||||
mins = []
|
mins = []
|
||||||
maxs = []
|
maxs = []
|
||||||
@ -432,7 +432,7 @@ class Arc(Shape):
|
|||||||
[[x2, y2], [x3, y3]]], would create this arc from its corresponding ellipse.
|
[[x2, y2], [x3, y3]]], would create this arc from its corresponding ellipse.
|
||||||
```
|
```
|
||||||
"""
|
"""
|
||||||
a_ranges = cast(_array2x2_t, self._angles_to_parameters())
|
a_ranges = cast('_array2x2_t', self._angles_to_parameters())
|
||||||
|
|
||||||
mins = []
|
mins = []
|
||||||
maxs = []
|
maxs = []
|
||||||
|
@ -48,7 +48,7 @@ class Circle(Shape):
|
|||||||
*,
|
*,
|
||||||
offset: ArrayLike = (0.0, 0.0),
|
offset: ArrayLike = (0.0, 0.0),
|
||||||
repetition: Repetition | None = None,
|
repetition: Repetition | None = None,
|
||||||
annotations: annotations_t | None = None,
|
annotations: annotations_t = None,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if raw:
|
if raw:
|
||||||
@ -56,12 +56,12 @@ class Circle(Shape):
|
|||||||
self._radius = radius
|
self._radius = radius
|
||||||
self._offset = offset
|
self._offset = offset
|
||||||
self._repetition = repetition
|
self._repetition = repetition
|
||||||
self._annotations = annotations if annotations is not None else {}
|
self._annotations = annotations
|
||||||
else:
|
else:
|
||||||
self.radius = radius
|
self.radius = radius
|
||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.repetition = repetition
|
self.repetition = repetition
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations
|
||||||
|
|
||||||
def __deepcopy__(self, memo: dict | None = None) -> 'Circle':
|
def __deepcopy__(self, memo: dict | None = None) -> 'Circle':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
@ -84,7 +84,7 @@ class Circle(Shape):
|
|||||||
if repr(type(self)) != repr(type(other)):
|
if repr(type(self)) != repr(type(other)):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
return id(type(self)) < id(type(other))
|
return id(type(self)) < id(type(other))
|
||||||
other = cast(Circle, other)
|
other = cast('Circle', other)
|
||||||
if not self.radius == other.radius:
|
if not self.radius == other.radius:
|
||||||
return self.radius < other.radius
|
return self.radius < other.radius
|
||||||
if not numpy.array_equal(self.offset, other.offset):
|
if not numpy.array_equal(self.offset, other.offset):
|
||||||
|
@ -93,7 +93,7 @@ class Ellipse(Shape):
|
|||||||
offset: ArrayLike = (0.0, 0.0),
|
offset: ArrayLike = (0.0, 0.0),
|
||||||
rotation: float = 0,
|
rotation: float = 0,
|
||||||
repetition: Repetition | None = None,
|
repetition: Repetition | None = None,
|
||||||
annotations: annotations_t | None = None,
|
annotations: annotations_t = None,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if raw:
|
if raw:
|
||||||
@ -103,13 +103,13 @@ class Ellipse(Shape):
|
|||||||
self._offset = offset
|
self._offset = offset
|
||||||
self._rotation = rotation
|
self._rotation = rotation
|
||||||
self._repetition = repetition
|
self._repetition = repetition
|
||||||
self._annotations = annotations if annotations is not None else {}
|
self._annotations = annotations
|
||||||
else:
|
else:
|
||||||
self.radii = radii
|
self.radii = radii
|
||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.rotation = rotation
|
self.rotation = rotation
|
||||||
self.repetition = repetition
|
self.repetition = repetition
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations
|
||||||
|
|
||||||
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
@ -134,7 +134,7 @@ class Ellipse(Shape):
|
|||||||
if repr(type(self)) != repr(type(other)):
|
if repr(type(self)) != repr(type(other)):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
return id(type(self)) < id(type(other))
|
return id(type(self)) < id(type(other))
|
||||||
other = cast(Ellipse, other)
|
other = cast('Ellipse', other)
|
||||||
if not numpy.array_equal(self.radii, other.radii):
|
if not numpy.array_equal(self.radii, other.radii):
|
||||||
return tuple(self.radii) < tuple(other.radii)
|
return tuple(self.radii) < tuple(other.radii)
|
||||||
if not numpy.array_equal(self.offset, other.offset):
|
if not numpy.array_equal(self.offset, other.offset):
|
||||||
|
@ -170,7 +170,7 @@ class Path(Shape):
|
|||||||
offset: ArrayLike = (0.0, 0.0),
|
offset: ArrayLike = (0.0, 0.0),
|
||||||
rotation: float = 0,
|
rotation: float = 0,
|
||||||
repetition: Repetition | None = None,
|
repetition: Repetition | None = None,
|
||||||
annotations: annotations_t | None = None,
|
annotations: annotations_t = None,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
self._cap_extensions = None # Since .cap setter might access it
|
self._cap_extensions = None # Since .cap setter might access it
|
||||||
@ -182,7 +182,7 @@ class Path(Shape):
|
|||||||
self._vertices = vertices
|
self._vertices = vertices
|
||||||
self._offset = offset
|
self._offset = offset
|
||||||
self._repetition = repetition
|
self._repetition = repetition
|
||||||
self._annotations = annotations if annotations is not None else {}
|
self._annotations = annotations
|
||||||
self._width = width
|
self._width = width
|
||||||
self._cap = cap
|
self._cap = cap
|
||||||
self._cap_extensions = cap_extensions
|
self._cap_extensions = cap_extensions
|
||||||
@ -190,7 +190,7 @@ class Path(Shape):
|
|||||||
self.vertices = vertices
|
self.vertices = vertices
|
||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.repetition = repetition
|
self.repetition = repetition
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations
|
||||||
self.width = width
|
self.width = width
|
||||||
self.cap = cap
|
self.cap = cap
|
||||||
self.cap_extensions = cap_extensions
|
self.cap_extensions = cap_extensions
|
||||||
@ -223,7 +223,7 @@ class Path(Shape):
|
|||||||
if repr(type(self)) != repr(type(other)):
|
if repr(type(self)) != repr(type(other)):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
return id(type(self)) < id(type(other))
|
return id(type(self)) < id(type(other))
|
||||||
other = cast(Path, other)
|
other = cast('Path', other)
|
||||||
if self.width != other.width:
|
if self.width != other.width:
|
||||||
return self.width < other.width
|
return self.width < other.width
|
||||||
if self.cap != other.cap:
|
if self.cap != other.cap:
|
||||||
@ -405,7 +405,7 @@ class Path(Shape):
|
|||||||
x_min = rotated_vertices[:, 0].argmin()
|
x_min = rotated_vertices[:, 0].argmin()
|
||||||
if not is_scalar(x_min):
|
if not is_scalar(x_min):
|
||||||
y_min = rotated_vertices[x_min, 1].argmin()
|
y_min = rotated_vertices[x_min, 1].argmin()
|
||||||
x_min = cast(Sequence, x_min)[y_min]
|
x_min = cast('Sequence', x_min)[y_min]
|
||||||
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||||
|
|
||||||
width0 = self.width / norm_value
|
width0 = self.width / norm_value
|
||||||
|
@ -1,97 +1,103 @@
|
|||||||
from typing import Any, cast, Iterable
|
from typing import Any, cast, Self
|
||||||
from collections.abc import Sequence
|
from collections.abc import Iterator
|
||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
import numpy
|
import numpy
|
||||||
from numpy import pi
|
from numpy import pi
|
||||||
from numpy.typing import NDArray, ArrayLike
|
from numpy.typing import NDArray, ArrayLike
|
||||||
|
|
||||||
from . import Shape, normalized_shape_tuple
|
from . import Shape, normalized_shape_tuple
|
||||||
from ..error import PatternError
|
from .polygon import Polygon
|
||||||
from ..repetition import Repetition
|
from ..repetition import Repetition
|
||||||
from ..utils import is_scalar, rotation_matrix_2d, annotations_lt, annotations_eq, rep2key
|
from ..utils import rotation_matrix_2d, annotations_lt, annotations_eq, rep2key, annotations_t
|
||||||
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
|
|
||||||
|
|
||||||
|
|
||||||
@functools.total_ordering
|
@functools.total_ordering
|
||||||
class PolyCollection(Shape):
|
class PolyCollection(Shape):
|
||||||
"""
|
"""
|
||||||
A collection of polygons, consisting of list of vertex arrays (N_m x 2 ndarrays) which specify
|
A collection of polygons, consisting of concatenated vertex arrays (N_m x 2 ndarray) which specify
|
||||||
implicitly-closed boundaries, and an offset.
|
implicitly-closed boundaries, and an array of offets specifying the first vertex of each
|
||||||
|
successive polygon.
|
||||||
|
|
||||||
Note that the setter for `PolyCollection.vertex_list` creates a copy of the
|
A `normalized_form(...)` is available, but is untested and probably fairly slow.
|
||||||
passed vertex coordinates.
|
|
||||||
|
|
||||||
A `normalized_form(...)` is available, but can be quite slow with lots of vertices.
|
|
||||||
"""
|
"""
|
||||||
__slots__ = (
|
__slots__ = (
|
||||||
'_vertex_lists',
|
'_vertex_lists',
|
||||||
|
'_vertex_offsets',
|
||||||
# Inherited
|
# Inherited
|
||||||
'_offset', '_repetition', '_annotations',
|
'_offset', '_repetition', '_annotations',
|
||||||
)
|
)
|
||||||
|
|
||||||
_vertex_lists: list[NDArray[numpy.float64]]
|
_vertex_lists: NDArray[numpy.float64]
|
||||||
""" List of ndarrays (N_m x 2) of vertices `[ [[x0, y0], [x1, y1], ...] ]` """
|
""" 2D NDArray ((N+M+...) x 2) of vertices `[[xa0, ya0], [xa1, ya1], ..., [xb0, yb0], [xb1, yb1], ... ]` """
|
||||||
|
|
||||||
|
_vertex_offsets: NDArray[numpy.intp]
|
||||||
|
""" 1D NDArray specifying the starting offset for each polygon """
|
||||||
|
|
||||||
# vertex_lists property
|
|
||||||
@property
|
@property
|
||||||
def vertex_lists(self) -> Any: # mypy#3004 NDArray[numpy.float64]:
|
def vertex_lists(self) -> Any: # mypy#3004 NDArray[numpy.float64]:
|
||||||
"""
|
"""
|
||||||
Vertices of the polygons (ist of ndarrays (N_m x 2) `[ [[x0, y0], [x1, y1], ...] ]`
|
Vertices of the polygons, ((N+M+...) x 2). Use with `vertex_offsets`.
|
||||||
|
|
||||||
When setting, note that a copy will be made,
|
|
||||||
"""
|
"""
|
||||||
return self._vertex_lists
|
return self._vertex_lists
|
||||||
|
|
||||||
@vertex_lists.setter
|
|
||||||
def vertex_lists(self, val: ArrayLike) -> None:
|
|
||||||
val = [numpy.array(vv, dtype=float) for vv in val]
|
|
||||||
for ii, vv in enumerate(val):
|
|
||||||
if len(vv.shape) < 2 or vv.shape[1] != 2:
|
|
||||||
raise PatternError(f'vertex_lists contents must be an Nx2 arrays (polygon #{ii} fails)')
|
|
||||||
if vv.shape[0] < 3:
|
|
||||||
raise PatternError(f'vertex_lists contents must have at least 3 vertices (Nx2 where N>2) (polygon ${ii} has shape {vv.shape})')
|
|
||||||
self._vertices = val
|
|
||||||
|
|
||||||
# xs property
|
|
||||||
@property
|
@property
|
||||||
def xs(self) -> NDArray[numpy.float64]:
|
def vertex_offsets(self) -> Any: # mypy#3004 NDArray[numpy.intp]:
|
||||||
"""
|
"""
|
||||||
All vertex x coords as a 1D ndarray
|
Starting offset (in `vertex_lists`) for each polygon
|
||||||
"""
|
"""
|
||||||
return self.vertices[:, 0]
|
return self._vertex_offsets
|
||||||
|
|
||||||
|
@property
|
||||||
|
def vertex_slices(self) -> Iterator[slice]:
|
||||||
|
"""
|
||||||
|
Iterator which provides slices which index vertex_lists
|
||||||
|
"""
|
||||||
|
for ii, ff in chain(self._vertex_offsets, (self._vertex_lists.shape[0],)):
|
||||||
|
yield slice(ii, ff)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def polygon_vertices(self) -> Iterator[NDArray[numpy.float64]]:
|
||||||
|
for slc in self.vertex_slices:
|
||||||
|
yield self._vertex_lists[slc]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
vertex_lists: Iterable[ArrayLike],
|
vertex_lists: ArrayLike,
|
||||||
|
vertex_offsets: ArrayLike,
|
||||||
*,
|
*,
|
||||||
offset: ArrayLike = (0.0, 0.0),
|
offset: ArrayLike = (0.0, 0.0),
|
||||||
rotation: float = 0.0,
|
rotation: float = 0.0,
|
||||||
repetition: Repetition | None = None,
|
repetition: Repetition | None = None,
|
||||||
annotations: annotations_t | None = None,
|
annotations: annotations_t = None,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if raw:
|
if raw:
|
||||||
assert isinstance(vertex_lists, list)
|
assert isinstance(vertex_lists, numpy.ndarray)
|
||||||
assert all(isinstance(vv, numpy.ndarray) for vv in vertex_lists)
|
assert isinstance(vertex_offsets, numpy.ndarray)
|
||||||
assert isinstance(offset, numpy.ndarray)
|
assert isinstance(offset, numpy.ndarray)
|
||||||
self._vertex_lists = vertex_lists
|
self._vertex_lists = vertex_lists
|
||||||
|
self._vertex_offsets = vertex_offsets
|
||||||
self._offset = offset
|
self._offset = offset
|
||||||
self._repetition = repetition
|
self._repetition = repetition
|
||||||
self._annotations = annotations if annotations is not None else {}
|
self._annotations = annotations
|
||||||
else:
|
else:
|
||||||
self.vertices = vertices
|
self._vertex_lists = numpy.asarray(vertex_lists, dtype=float)
|
||||||
|
self._vertex_offsets = numpy.asarray(vertex_offsets, dtype=numpy.intp)
|
||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.repetition = repetition
|
self.repetition = repetition
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations
|
||||||
|
if rotation:
|
||||||
self.rotate(rotation)
|
self.rotate(rotation)
|
||||||
|
|
||||||
def __deepcopy__(self, memo: dict | None = None) -> 'PolyCollection':
|
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new._offset = self._offset.copy()
|
new._offset = self._offset.copy()
|
||||||
new._vertex_lists = [vv.copy() for vv in self._vertex_lists]
|
new._vertex_lists = self._vertex_lists.copy()
|
||||||
|
new._vertex_offsets = self._vertex_offsets.copy()
|
||||||
new._annotations = copy.deepcopy(self._annotations)
|
new._annotations = copy.deepcopy(self._annotations)
|
||||||
return new
|
return new
|
||||||
|
|
||||||
@ -99,7 +105,8 @@ class PolyCollection(Shape):
|
|||||||
return (
|
return (
|
||||||
type(self) is type(other)
|
type(self) is type(other)
|
||||||
and numpy.array_equal(self.offset, other.offset)
|
and numpy.array_equal(self.offset, other.offset)
|
||||||
and all(numpy.array_equal(ss, oo) for ss, oo in zip(self.vertices, other.vertices))
|
and numpy.array_equal(self._vertex_lists, other._vertex_lists)
|
||||||
|
and numpy.array_equal(self._vertex_offsets, other._vertex_offsets)
|
||||||
and self.repetition == other.repetition
|
and self.repetition == other.repetition
|
||||||
and annotations_eq(self.annotations, other.annotations)
|
and annotations_eq(self.annotations, other.annotations)
|
||||||
)
|
)
|
||||||
@ -110,8 +117,9 @@ class PolyCollection(Shape):
|
|||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
return id(type(self)) < id(type(other))
|
return id(type(self)) < id(type(other))
|
||||||
|
|
||||||
other = cast(PolyCollection, other)
|
other = cast('PolyCollection', other)
|
||||||
for vv, oo in zip(self.vertices, other.vertices):
|
|
||||||
|
for vv, oo in zip(self.polygon_vertices, other.polygon_vertices, strict=False):
|
||||||
if not numpy.array_equal(vv, oo):
|
if not numpy.array_equal(vv, oo):
|
||||||
min_len = min(vv.shape[0], oo.shape[0])
|
min_len = min(vv.shape[0], oo.shape[0])
|
||||||
eq_mask = vv[:min_len] != oo[:min_len]
|
eq_mask = vv[:min_len] != oo[:min_len]
|
||||||
@ -121,67 +129,48 @@ class PolyCollection(Shape):
|
|||||||
return eq_lt_masked.flat[0]
|
return eq_lt_masked.flat[0]
|
||||||
return vv.shape[0] < oo.shape[0]
|
return vv.shape[0] < oo.shape[0]
|
||||||
if len(self.vertex_lists) != len(other.vertex_lists):
|
if len(self.vertex_lists) != len(other.vertex_lists):
|
||||||
return len(self.vertex_lists) < len(other.vertex_lists):
|
return len(self.vertex_lists) < len(other.vertex_lists)
|
||||||
if not numpy.array_equal(self.offset, other.offset):
|
if not numpy.array_equal(self.offset, other.offset):
|
||||||
return tuple(self.offset) < tuple(other.offset)
|
return tuple(self.offset) < tuple(other.offset)
|
||||||
if self.repetition != other.repetition:
|
if self.repetition != other.repetition:
|
||||||
return rep2key(self.repetition) < rep2key(other.repetition)
|
return rep2key(self.repetition) < rep2key(other.repetition)
|
||||||
return annotations_lt(self.annotations, other.annotations)
|
return annotations_lt(self.annotations, other.annotations)
|
||||||
|
|
||||||
def pop_as_polygon(self, index: int) -> 'Polygon':
|
|
||||||
"""
|
|
||||||
Remove one polygon from the list, and return it as a `Polygon` object.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
index: which polygon to pop
|
|
||||||
"""
|
|
||||||
verts = self.vertex_lists.pop(index)
|
|
||||||
return Polygon(
|
|
||||||
vertices=verts,
|
|
||||||
offset=self.offset,
|
|
||||||
repetition=self.repetition.copy(),
|
|
||||||
annotations=copy.deepcopy(self.annotations),
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_polygons(
|
def to_polygons(
|
||||||
self,
|
self,
|
||||||
num_vertices: int | None = None, # unused # noqa: ARG002
|
num_vertices: int | None = None, # unused # noqa: ARG002
|
||||||
max_arclen: float | None = None, # unused # noqa: ARG002
|
max_arclen: float | None = None, # unused # noqa: ARG002
|
||||||
) -> list['Polygon']:
|
) -> list['Polygon']:
|
||||||
return [Polygon(
|
return [Polygon(
|
||||||
vertices=vv,
|
vertices = vv,
|
||||||
offset=self.offset,
|
offset = self.offset,
|
||||||
repetition=self.repetition.copy(),
|
repetition = self.repetition.copy(),
|
||||||
annotations=copy.deepcopy(self.annotations),
|
annotations = copy.deepcopy(self.annotations),
|
||||||
) for vv in self.vertex_lists]
|
) for vv in self.polygon_vertices]
|
||||||
|
|
||||||
def get_bounds_single(self) -> NDArray[numpy.float64]: # TODO note shape get_bounds doesn't include repetition
|
def get_bounds_single(self) -> NDArray[numpy.float64]: # TODO note shape get_bounds doesn't include repetition
|
||||||
mins = [numpy.min(vv, axis=0) for vv self.vertex_lists]
|
return numpy.vstack((self.offset + numpy.min(self._vertex_lists, axis=0),
|
||||||
maxs = [numpy.max(vv, axis=0) for vv self.vertex_lists]
|
self.offset + numpy.max(self._vertex_lists, axis=0)))
|
||||||
return numpy.vstack((self.offset + numpy.min(self.vertex_lists, axis=0),
|
|
||||||
self.offset + numpy.max(self.vertex_lists, axis=0)))
|
|
||||||
|
|
||||||
def rotate(self, theta: float) -> 'Polygon':
|
def rotate(self, theta: float) -> Self:
|
||||||
if theta != 0:
|
if theta != 0:
|
||||||
for vv in self.vertex_lists:
|
rot = rotation_matrix_2d(theta)
|
||||||
vv[:] = numpy.dot(rotation_matrix_2d(theta), vv.T).T
|
self._vertex_lists = numpy.einsum('ij,kj->ki', rot, self._vertex_lists_)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def mirror(self, axis: int = 0) -> 'Polygon':
|
def mirror(self, axis: int = 0) -> Self:
|
||||||
for vv in self.vertex_lists:
|
self._vertex_lists[:, axis - 1] *= -1
|
||||||
vv[:, axis - 1] *= -1
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def scale_by(self, c: float) -> 'Polygon':
|
def scale_by(self, c: float) -> Self:
|
||||||
for vv in self.vertex_lists:
|
self.vertex_lists *= c
|
||||||
vv *= c
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def normalized_form(self, norm_value: float) -> normalized_shape_tuple:
|
def normalized_form(self, norm_value: float) -> normalized_shape_tuple:
|
||||||
# Note: this function is going to be pretty slow for many-vertexed polygons, relative to
|
# Note: this function is going to be pretty slow for many-vertexed polygons, relative to
|
||||||
# other shapes
|
# other shapes
|
||||||
meanv = numpy.concatenate(self.vertex_lists).mean(axis=0)
|
meanv = self._vertex_lists.mean(axis=0)
|
||||||
zeroed_vertices = [vv - meanv for vv in self.vertex_lists]
|
zeroed_vertices = self._vertex_lists - [meanv]
|
||||||
offset = meanv + self.offset
|
offset = meanv + self.offset
|
||||||
|
|
||||||
scale = zeroed_vertices.std()
|
scale = zeroed_vertices.std()
|
||||||
@ -189,22 +178,26 @@ class PolyCollection(Shape):
|
|||||||
|
|
||||||
_, _, vertex_axis = numpy.linalg.svd(zeroed_vertices)
|
_, _, vertex_axis = numpy.linalg.svd(zeroed_vertices)
|
||||||
rotation = numpy.arctan2(vertex_axis[0][1], vertex_axis[0][0]) % (2 * pi)
|
rotation = numpy.arctan2(vertex_axis[0][1], vertex_axis[0][0]) % (2 * pi)
|
||||||
rotated_vertices = numpy.vstack([numpy.dot(rotation_matrix_2d(-rotation), v)
|
rotated_vertices = numpy.einsum('ij,kj->ki', rotation_matrix_2d(-rotation), normed_vertices)
|
||||||
for v in normed_vertices])
|
|
||||||
|
|
||||||
# Reorder the vertices so that the one with lowest x, then y, comes first.
|
# TODO consider how to reorder vertices for polycollection
|
||||||
x_min = rotated_vertices[:, 0].argmin()
|
## Reorder the vertices so that the one with lowest x, then y, comes first.
|
||||||
if not is_scalar(x_min):
|
#x_min = rotated_vertices[:, 0].argmin()
|
||||||
y_min = rotated_vertices[x_min, 1].argmin()
|
#if not is_scalar(x_min):
|
||||||
x_min = cast(Sequence, x_min)[y_min]
|
# y_min = rotated_vertices[x_min, 1].argmin()
|
||||||
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
# x_min = cast('Sequence', x_min)[y_min]
|
||||||
|
#reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||||
|
|
||||||
# TODO: normalize mirroring?
|
# TODO: normalize mirroring?
|
||||||
|
|
||||||
return ((type(self), reordered_vertices.data.tobytes()),
|
return ((type(self), rotated_vertices.data.tobytes() + self._vertex_offsets.tobytes()),
|
||||||
(offset, scale / norm_value, rotation, False),
|
(offset, scale / norm_value, rotation, False),
|
||||||
lambda: Polygon(reordered_vertices * norm_value))
|
lambda: PolyCollection(
|
||||||
|
vertex_lists=rotated_vertices * norm_value,
|
||||||
|
vertex_offsets=self._vertex_offsets,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
centroid = self.offset + numpy.concatenate(self.vertex_lists).mean(axis=0)
|
centroid = self.offset + self.vertex_lists.mean(axis=0)
|
||||||
return f'<PolyCollection centroid {centroid} p{len(self.vertex_lists)}>'
|
return f'<PolyCollection centroid {centroid} p{len(self.vertex_offsets)}>'
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from typing import Any, cast
|
from typing import Any, cast, TYPE_CHECKING
|
||||||
from collections.abc import Sequence
|
|
||||||
import copy
|
import copy
|
||||||
import functools
|
import functools
|
||||||
|
|
||||||
@ -13,6 +12,9 @@ from ..repetition import Repetition
|
|||||||
from ..utils import is_scalar, rotation_matrix_2d, annotations_lt, annotations_eq, rep2key
|
from ..utils import is_scalar, rotation_matrix_2d, annotations_lt, annotations_eq, rep2key
|
||||||
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
|
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
|
||||||
@functools.total_ordering
|
@functools.total_ordering
|
||||||
class Polygon(Shape):
|
class Polygon(Shape):
|
||||||
@ -90,7 +92,7 @@ class Polygon(Shape):
|
|||||||
offset: ArrayLike = (0.0, 0.0),
|
offset: ArrayLike = (0.0, 0.0),
|
||||||
rotation: float = 0.0,
|
rotation: float = 0.0,
|
||||||
repetition: Repetition | None = None,
|
repetition: Repetition | None = None,
|
||||||
annotations: annotations_t | None = None,
|
annotations: annotations_t = None,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if raw:
|
if raw:
|
||||||
@ -99,12 +101,13 @@ class Polygon(Shape):
|
|||||||
self._vertices = vertices
|
self._vertices = vertices
|
||||||
self._offset = offset
|
self._offset = offset
|
||||||
self._repetition = repetition
|
self._repetition = repetition
|
||||||
self._annotations = annotations if annotations is not None else {}
|
self._annotations = annotations
|
||||||
else:
|
else:
|
||||||
self.vertices = vertices
|
self.vertices = vertices
|
||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.repetition = repetition
|
self.repetition = repetition
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations
|
||||||
|
if rotation:
|
||||||
self.rotate(rotation)
|
self.rotate(rotation)
|
||||||
|
|
||||||
def __deepcopy__(self, memo: dict | None = None) -> 'Polygon':
|
def __deepcopy__(self, memo: dict | None = None) -> 'Polygon':
|
||||||
@ -129,7 +132,7 @@ class Polygon(Shape):
|
|||||||
if repr(type(self)) != repr(type(other)):
|
if repr(type(self)) != repr(type(other)):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
return id(type(self)) < id(type(other))
|
return id(type(self)) < id(type(other))
|
||||||
other = cast(Polygon, other)
|
other = cast('Polygon', other)
|
||||||
if not numpy.array_equal(self.vertices, other.vertices):
|
if not numpy.array_equal(self.vertices, other.vertices):
|
||||||
min_len = min(self.vertices.shape[0], other.vertices.shape[0])
|
min_len = min(self.vertices.shape[0], other.vertices.shape[0])
|
||||||
eq_mask = self.vertices[:min_len] != other.vertices[:min_len]
|
eq_mask = self.vertices[:min_len] != other.vertices[:min_len]
|
||||||
@ -395,7 +398,7 @@ class Polygon(Shape):
|
|||||||
x_min = rotated_vertices[:, 0].argmin()
|
x_min = rotated_vertices[:, 0].argmin()
|
||||||
if not is_scalar(x_min):
|
if not is_scalar(x_min):
|
||||||
y_min = rotated_vertices[x_min, 1].argmin()
|
y_min = rotated_vertices[x_min, 1].argmin()
|
||||||
x_min = cast(Sequence, x_min)[y_min]
|
x_min = cast('Sequence', x_min)[y_min]
|
||||||
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||||
|
|
||||||
# TODO: normalize mirroring?
|
# TODO: normalize mirroring?
|
||||||
|
@ -71,7 +71,7 @@ class Text(RotatableImpl, Shape):
|
|||||||
offset: ArrayLike = (0.0, 0.0),
|
offset: ArrayLike = (0.0, 0.0),
|
||||||
rotation: float = 0.0,
|
rotation: float = 0.0,
|
||||||
repetition: Repetition | None = None,
|
repetition: Repetition | None = None,
|
||||||
annotations: annotations_t | None = None,
|
annotations: annotations_t = None,
|
||||||
raw: bool = False,
|
raw: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
if raw:
|
if raw:
|
||||||
@ -81,14 +81,14 @@ class Text(RotatableImpl, Shape):
|
|||||||
self._height = height
|
self._height = height
|
||||||
self._rotation = rotation
|
self._rotation = rotation
|
||||||
self._repetition = repetition
|
self._repetition = repetition
|
||||||
self._annotations = annotations if annotations is not None else {}
|
self._annotations = annotations
|
||||||
else:
|
else:
|
||||||
self.offset = offset
|
self.offset = offset
|
||||||
self.string = string
|
self.string = string
|
||||||
self.height = height
|
self.height = height
|
||||||
self.rotation = rotation
|
self.rotation = rotation
|
||||||
self.repetition = repetition
|
self.repetition = repetition
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations
|
||||||
self.font_path = font_path
|
self.font_path = font_path
|
||||||
|
|
||||||
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
def __deepcopy__(self, memo: dict | None = None) -> Self:
|
||||||
@ -115,7 +115,7 @@ class Text(RotatableImpl, Shape):
|
|||||||
if repr(type(self)) != repr(type(other)):
|
if repr(type(self)) != repr(type(other)):
|
||||||
return repr(type(self)) < repr(type(other))
|
return repr(type(self)) < repr(type(other))
|
||||||
return id(type(self)) < id(type(other))
|
return id(type(self)) < id(type(other))
|
||||||
other = cast(Text, other)
|
other = cast('Text', other)
|
||||||
if not self.height == other.height:
|
if not self.height == other.height:
|
||||||
return self.height < other.height
|
return self.height < other.height
|
||||||
if not self.string == other.string:
|
if not self.string == other.string:
|
||||||
|
@ -1,14 +1,15 @@
|
|||||||
from typing import Self, cast, Any
|
from typing import Self, cast, Any, TYPE_CHECKING
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
import numpy
|
import numpy
|
||||||
from numpy import pi
|
from numpy import pi
|
||||||
from numpy.typing import ArrayLike
|
from numpy.typing import ArrayLike
|
||||||
|
|
||||||
from .positionable import Positionable
|
|
||||||
from ..error import MasqueError
|
from ..error import MasqueError
|
||||||
from ..utils import rotation_matrix_2d
|
from ..utils import rotation_matrix_2d
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .positionable import Positionable
|
||||||
|
|
||||||
_empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
|
_empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
|
||||||
|
|
||||||
@ -113,9 +114,9 @@ class PivotableImpl(Pivotable, metaclass=ABCMeta):
|
|||||||
|
|
||||||
def rotate_around(self, pivot: ArrayLike, rotation: float) -> Self:
|
def rotate_around(self, pivot: ArrayLike, rotation: float) -> Self:
|
||||||
pivot = numpy.asarray(pivot, dtype=float)
|
pivot = numpy.asarray(pivot, dtype=float)
|
||||||
cast(Positionable, self).translate(-pivot)
|
cast('Positionable', self).translate(-pivot)
|
||||||
cast(Rotatable, self).rotate(rotation)
|
cast('Rotatable', self).rotate(rotation)
|
||||||
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset) # type: ignore # mypy#3004
|
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset) # type: ignore # mypy#3004
|
||||||
cast(Positionable, self).translate(+pivot)
|
cast('Positionable', self).translate(+pivot)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ from numpy import pi
|
|||||||
try:
|
try:
|
||||||
from numpy import trapezoid
|
from numpy import trapezoid
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from numpy import trapz as trapezoid
|
from numpy import trapz as trapezoid # type:ignore
|
||||||
|
|
||||||
|
|
||||||
def bezier(
|
def bezier(
|
||||||
|
@ -5,7 +5,7 @@ from typing import Protocol
|
|||||||
|
|
||||||
|
|
||||||
layer_t = int | tuple[int, int] | str
|
layer_t = int | tuple[int, int] | str
|
||||||
annotations_t = dict[str, list[int | float | str]]
|
annotations_t = dict[str, list[int | float | str]] | None
|
||||||
|
|
||||||
|
|
||||||
class SupportsBool(Protocol):
|
class SupportsBool(Protocol):
|
||||||
|
@ -78,7 +78,6 @@ lint.ignore = [
|
|||||||
"ANN002", # *args
|
"ANN002", # *args
|
||||||
"ANN003", # **kwargs
|
"ANN003", # **kwargs
|
||||||
"ANN401", # Any
|
"ANN401", # Any
|
||||||
"ANN101", # self: Self
|
|
||||||
"SIM108", # single-line if / else assignment
|
"SIM108", # single-line if / else assignment
|
||||||
"RET504", # x=y+z; return x
|
"RET504", # x=y+z; return x
|
||||||
"PIE790", # unnecessary pass
|
"PIE790", # unnecessary pass
|
||||||
|
Loading…
x
Reference in New Issue
Block a user