Compare commits
1 Commits
master
...
polycollec
Author | SHA1 | Date | |
---|---|---|---|
1fdfcbd85d |
@ -169,11 +169,11 @@ def ell(
|
||||
'emax', 'max_extension',
|
||||
'min_past_furthest',):
|
||||
if numpy.size(bound) == 2:
|
||||
bound = cast('Sequence[float]', bound)
|
||||
bound = cast(Sequence[float], bound)
|
||||
rot_bound = (rot_matrix @ ((bound[0], 0),
|
||||
(0, bound[1])))[0, :]
|
||||
else:
|
||||
bound = cast('float', bound)
|
||||
bound = cast(float, bound)
|
||||
rot_bound = numpy.array(bound)
|
||||
|
||||
if rot_bound < 0:
|
||||
@ -185,10 +185,10 @@ def ell(
|
||||
offsets += rot_bound.min() - offsets.max()
|
||||
else:
|
||||
if numpy.size(bound) == 2:
|
||||
bound = cast('Sequence[float]', bound)
|
||||
bound = cast(Sequence[float], bound)
|
||||
rot_bound = (rot_matrix @ bound)[0]
|
||||
else:
|
||||
bound = cast('float', bound)
|
||||
bound = cast(float, bound)
|
||||
neg = (direction + pi / 4) % (2 * pi) > pi
|
||||
rot_bound = -bound if neg else bound
|
||||
|
||||
|
@ -132,7 +132,7 @@ def writefile(
|
||||
with tmpfile(path) as base_stream:
|
||||
streams: tuple[Any, ...] = (base_stream,)
|
||||
if path.suffix == '.gz':
|
||||
gz_stream = cast('IO[bytes]', gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
||||
gz_stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
||||
streams = (gz_stream,) + streams
|
||||
else:
|
||||
gz_stream = base_stream
|
||||
|
@ -145,7 +145,7 @@ def writefile(
|
||||
with tmpfile(path) as base_stream:
|
||||
streams: tuple[Any, ...] = (base_stream,)
|
||||
if path.suffix == '.gz':
|
||||
stream = cast('IO[bytes]', gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb', compresslevel=6))
|
||||
stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb', compresslevel=6))
|
||||
streams = (stream,) + streams
|
||||
else:
|
||||
stream = base_stream
|
||||
|
@ -190,7 +190,7 @@ def writefile(
|
||||
with tmpfile(path) as base_stream:
|
||||
streams: tuple[Any, ...] = (base_stream,)
|
||||
if path.suffix == '.gz':
|
||||
stream = cast('IO[bytes]', gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
||||
stream = cast(IO[bytes], gzip.GzipFile(filename='', mtime=0, fileobj=base_stream, mode='wb'))
|
||||
streams += (stream,)
|
||||
else:
|
||||
stream = base_stream
|
||||
@ -551,7 +551,7 @@ def _shapes_to_elements(
|
||||
circle = fatrec.Circle(
|
||||
layer=layer,
|
||||
datatype=datatype,
|
||||
radius=cast('int', radius),
|
||||
radius=cast(int, radius),
|
||||
x=offset[0],
|
||||
y=offset[1],
|
||||
properties=properties,
|
||||
@ -568,8 +568,8 @@ def _shapes_to_elements(
|
||||
path = fatrec.Path(
|
||||
layer=layer,
|
||||
datatype=datatype,
|
||||
point_list=cast('Sequence[Sequence[int]]', deltas),
|
||||
half_width=cast('int', half_width),
|
||||
point_list=cast(Sequence[Sequence[int]], deltas),
|
||||
half_width=cast(int, half_width),
|
||||
x=xy[0],
|
||||
y=xy[1],
|
||||
extension_start=extension_start, # TODO implement multiple cap types?
|
||||
@ -587,7 +587,7 @@ def _shapes_to_elements(
|
||||
datatype=datatype,
|
||||
x=xy[0],
|
||||
y=xy[1],
|
||||
point_list=cast('list[list[int]]', points),
|
||||
point_list=cast(list[list[int]], points),
|
||||
properties=properties,
|
||||
repetition=repetition,
|
||||
))
|
||||
@ -651,10 +651,10 @@ def repetition_masq2fata(
|
||||
a_count = rint_cast(rep.a_count)
|
||||
b_count = rint_cast(rep.b_count) if rep.b_count is not None else None
|
||||
frep = fatamorgana.GridRepetition(
|
||||
a_vector=cast('list[int]', a_vector),
|
||||
b_vector=cast('list[int] | None', b_vector),
|
||||
a_count=cast('int', a_count),
|
||||
b_count=cast('int | None', b_count),
|
||||
a_vector=cast(list[int], a_vector),
|
||||
b_vector=cast(list[int] | None, b_vector),
|
||||
a_count=cast(int, a_count),
|
||||
b_count=cast(int | None, b_count),
|
||||
)
|
||||
offset = (0, 0)
|
||||
elif isinstance(rep, Arbitrary):
|
||||
|
@ -211,7 +211,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
if isinstance(tops, str):
|
||||
tops = (tops,)
|
||||
|
||||
keep = cast('set[str]', self.referenced_patterns(tops) - {None})
|
||||
keep = cast(set[str], self.referenced_patterns(tops) - {None})
|
||||
keep |= set(tops)
|
||||
|
||||
filtered = {kk: vv for kk, vv in self.items() if kk in keep}
|
||||
@ -314,7 +314,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
flatten_single(top)
|
||||
|
||||
assert None not in flattened.values()
|
||||
return cast('dict[str, Pattern]', flattened)
|
||||
return cast(dict[str, 'Pattern'], flattened)
|
||||
|
||||
def get_name(
|
||||
self,
|
||||
@ -504,7 +504,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
raise LibraryError('visit_* functions returned a new `Pattern` object'
|
||||
' but no top-level name was provided in `hierarchy`')
|
||||
|
||||
cast('ILibrary', self)[name] = pattern
|
||||
cast(ILibrary, self)[name] = pattern
|
||||
|
||||
return self
|
||||
|
||||
@ -542,7 +542,7 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
Return:
|
||||
Topologically sorted list of pattern names.
|
||||
"""
|
||||
return cast('list[str]', list(TopologicalSorter(self.child_graph()).static_order()))
|
||||
return cast(list[str], list(TopologicalSorter(self.child_graph()).static_order()))
|
||||
|
||||
def find_refs_local(
|
||||
self,
|
||||
@ -827,7 +827,7 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
for old_name in temp:
|
||||
new_name = rename_map.get(old_name, old_name)
|
||||
pat = self[new_name]
|
||||
pat.refs = map_targets(pat.refs, lambda tt: cast('dict[str | None, str | None]', rename_map).get(tt, tt))
|
||||
pat.refs = map_targets(pat.refs, lambda tt: cast(dict[str | None, str | None], rename_map).get(tt, tt))
|
||||
|
||||
return rename_map
|
||||
|
||||
@ -944,8 +944,8 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
|
||||
shape_table: dict[tuple, list] = defaultdict(list)
|
||||
for layer, sseq in pat.shapes.items():
|
||||
for ii, shape in enumerate(sseq):
|
||||
if any(isinstance(shape, tt) for tt in exclude_types):
|
||||
for i, shape in enumerate(sseq):
|
||||
if any(isinstance(shape, t) for t in exclude_types):
|
||||
continue
|
||||
|
||||
base_label, values, _func = shape.normalized_form(norm_value)
|
||||
@ -954,16 +954,16 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
if label not in shape_pats:
|
||||
continue
|
||||
|
||||
shape_table[label].append((ii, values))
|
||||
shape_table[label].append((i, values))
|
||||
|
||||
# For repeated shapes, create a `Pattern` holding a normalized shape object,
|
||||
# and add `pat.refs` entries for each occurrence in pat. Also, note down that
|
||||
# we should delete the `pat.shapes` entries for which we made `Ref`s.
|
||||
shapes_to_remove = []
|
||||
for label, shape_entries in shape_table.items():
|
||||
for label in shape_table:
|
||||
layer = label[-1]
|
||||
target = label2name(label)
|
||||
for ii, values in shape_entries:
|
||||
for ii, values in shape_table[label]:
|
||||
offset, scale, rotation, mirror_x = values
|
||||
pat.ref(target=target, offset=offset, scale=scale,
|
||||
rotation=rotation, mirrored=(mirror_x, False))
|
||||
@ -1047,7 +1047,7 @@ class ILibrary(ILibraryView, MutableMapping[str, 'Pattern'], metaclass=ABCMeta):
|
||||
if isinstance(tops, str):
|
||||
tops = (tops,)
|
||||
|
||||
keep = cast('set[str]', self.referenced_patterns(tops) - {None})
|
||||
keep = cast(set[str], self.referenced_patterns(tops) - {None})
|
||||
keep |= set(tops)
|
||||
|
||||
new = type(self)()
|
||||
|
@ -491,7 +491,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
"""
|
||||
pat = self.deepcopy().polygonize().flatten(library=library)
|
||||
polys = [
|
||||
cast('Polygon', shape).vertices + cast('Polygon', shape).offset
|
||||
cast(Polygon, shape).vertices + cast(Polygon, shape).offset
|
||||
for shape in chain_elements(pat.shapes)
|
||||
]
|
||||
return polys
|
||||
@ -533,7 +533,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
n_elems = sum(1 for _ in chain_elements(self.shapes, self.labels))
|
||||
ebounds = numpy.full((n_elems, 2, 2), nan)
|
||||
for ee, entry in enumerate(chain_elements(self.shapes, self.labels)):
|
||||
maybe_ebounds = cast('Bounded', entry).get_bounds()
|
||||
maybe_ebounds = cast(Bounded, entry).get_bounds()
|
||||
if maybe_ebounds is not None:
|
||||
ebounds[ee] = maybe_ebounds
|
||||
mask = ~numpy.isnan(ebounds[:, 0, 0])
|
||||
@ -631,7 +631,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
self
|
||||
"""
|
||||
for entry in chain(chain_elements(self.shapes, self.labels, self.refs), self.ports.values()):
|
||||
cast('Positionable', entry).translate(offset)
|
||||
cast(Positionable, entry).translate(offset)
|
||||
return self
|
||||
|
||||
def scale_elements(self, c: float) -> Self:
|
||||
@ -645,37 +645,33 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
self
|
||||
"""
|
||||
for entry in chain_elements(self.shapes, self.refs):
|
||||
cast('Scalable', entry).scale_by(c)
|
||||
cast(Scalable, entry).scale_by(c)
|
||||
return self
|
||||
|
||||
def scale_by(self, c: float, scale_refs: bool = True) -> Self:
|
||||
def scale_by(self, c: float) -> Self:
|
||||
"""
|
||||
Scale this Pattern by the given value
|
||||
All shapes and (optionally) refs and their offsets are scaled,
|
||||
as are all label and port offsets.
|
||||
(all shapes and refs and their offsets are scaled,
|
||||
as are all label and port offsets)
|
||||
|
||||
Args:
|
||||
c: factor to scale by
|
||||
scale_refs: Whether to scale refs. Ref offsets are always scaled,
|
||||
but it may be desirable to not scale the ref itself (e.g. if
|
||||
the target cell was also scaled).
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in chain_elements(self.shapes, self.refs):
|
||||
cast('Positionable', entry).offset *= c
|
||||
if scale_refs or not isinstance(entry, Ref):
|
||||
cast('Scalable', entry).scale_by(c)
|
||||
cast(Positionable, entry).offset *= c
|
||||
cast(Scalable, entry).scale_by(c)
|
||||
|
||||
rep = cast('Repeatable', entry).repetition
|
||||
rep = cast(Repeatable, entry).repetition
|
||||
if rep:
|
||||
rep.scale_by(c)
|
||||
|
||||
for label in chain_elements(self.labels):
|
||||
cast('Positionable', label).offset *= c
|
||||
cast(Positionable, label).offset *= c
|
||||
|
||||
rep = cast('Repeatable', label).repetition
|
||||
rep = cast(Repeatable, label).repetition
|
||||
if rep:
|
||||
rep.scale_by(c)
|
||||
|
||||
@ -712,8 +708,8 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
self
|
||||
"""
|
||||
for entry in chain(chain_elements(self.shapes, self.refs, self.labels), self.ports.values()):
|
||||
old_offset = cast('Positionable', entry).offset
|
||||
cast('Positionable', entry).offset = numpy.dot(rotation_matrix_2d(rotation), old_offset)
|
||||
old_offset = cast(Positionable, entry).offset
|
||||
cast(Positionable, entry).offset = numpy.dot(rotation_matrix_2d(rotation), old_offset)
|
||||
return self
|
||||
|
||||
def rotate_elements(self, rotation: float) -> Self:
|
||||
@ -727,7 +723,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
self
|
||||
"""
|
||||
for entry in chain(chain_elements(self.shapes, self.refs), self.ports.values()):
|
||||
cast('Rotatable', entry).rotate(rotation)
|
||||
cast(Rotatable, entry).rotate(rotation)
|
||||
return self
|
||||
|
||||
def mirror_element_centers(self, across_axis: int = 0) -> Self:
|
||||
@ -742,7 +738,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
self
|
||||
"""
|
||||
for entry in chain(chain_elements(self.shapes, self.refs, self.labels), self.ports.values()):
|
||||
cast('Positionable', entry).offset[across_axis - 1] *= -1
|
||||
cast(Positionable, entry).offset[across_axis - 1] *= -1
|
||||
return self
|
||||
|
||||
def mirror_elements(self, across_axis: int = 0) -> Self:
|
||||
@ -758,7 +754,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
|
||||
self
|
||||
"""
|
||||
for entry in chain(chain_elements(self.shapes, self.refs), self.ports.values()):
|
||||
cast('Mirrorable', entry).mirror(across_axis)
|
||||
cast(Mirrorable, entry).mirror(across_axis)
|
||||
return self
|
||||
|
||||
def mirror(self, across_axis: int = 0) -> Self:
|
||||
|
@ -294,7 +294,7 @@ class Grid(Repetition):
|
||||
def __le__(self, other: Repetition) -> bool:
|
||||
if type(self) is not type(other):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
other = cast('Grid', other)
|
||||
other = cast(Grid, other)
|
||||
if self.a_count != other.a_count:
|
||||
return self.a_count < other.a_count
|
||||
if self.b_count != other.b_count:
|
||||
@ -357,7 +357,7 @@ class Arbitrary(Repetition):
|
||||
def __le__(self, other: Repetition) -> bool:
|
||||
if type(self) is not type(other):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
other = cast('Arbitrary', other)
|
||||
other = cast(Arbitrary, other)
|
||||
if self.displacements.size != other.displacements.size:
|
||||
return self.displacements.size < other.displacements.size
|
||||
|
||||
|
@ -206,7 +206,7 @@ class Arc(Shape):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
other = cast('Arc', other)
|
||||
other = cast(Arc, other)
|
||||
if self.width != other.width:
|
||||
return self.width < other.width
|
||||
if not numpy.array_equal(self.radii, other.radii):
|
||||
@ -233,7 +233,7 @@ class Arc(Shape):
|
||||
r0, r1 = self.radii
|
||||
|
||||
# Convert from polar angle to ellipse parameter (for [rx*cos(t), ry*sin(t)] representation)
|
||||
a_ranges = cast('_array2x2_t', self._angles_to_parameters())
|
||||
a_ranges = cast(_array2x2_t, self._angles_to_parameters())
|
||||
|
||||
# Approximate perimeter via numerical integration
|
||||
|
||||
@ -321,7 +321,7 @@ class Arc(Shape):
|
||||
|
||||
If the extrema are innaccessible due to arc constraints, check the arc endpoints instead.
|
||||
"""
|
||||
a_ranges = cast('_array2x2_t', self._angles_to_parameters())
|
||||
a_ranges = cast(_array2x2_t, self._angles_to_parameters())
|
||||
|
||||
mins = []
|
||||
maxs = []
|
||||
@ -432,7 +432,7 @@ class Arc(Shape):
|
||||
[[x2, y2], [x3, y3]]], would create this arc from its corresponding ellipse.
|
||||
```
|
||||
"""
|
||||
a_ranges = cast('_array2x2_t', self._angles_to_parameters())
|
||||
a_ranges = cast(_array2x2_t, self._angles_to_parameters())
|
||||
|
||||
mins = []
|
||||
maxs = []
|
||||
|
@ -84,7 +84,7 @@ class Circle(Shape):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
other = cast('Circle', other)
|
||||
other = cast(Circle, other)
|
||||
if not self.radius == other.radius:
|
||||
return self.radius < other.radius
|
||||
if not numpy.array_equal(self.offset, other.offset):
|
||||
|
@ -134,7 +134,7 @@ class Ellipse(Shape):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
other = cast('Ellipse', other)
|
||||
other = cast(Ellipse, other)
|
||||
if not numpy.array_equal(self.radii, other.radii):
|
||||
return tuple(self.radii) < tuple(other.radii)
|
||||
if not numpy.array_equal(self.offset, other.offset):
|
||||
|
@ -223,7 +223,7 @@ class Path(Shape):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
other = cast('Path', other)
|
||||
other = cast(Path, other)
|
||||
if self.width != other.width:
|
||||
return self.width < other.width
|
||||
if self.cap != other.cap:
|
||||
@ -405,7 +405,7 @@ class Path(Shape):
|
||||
x_min = rotated_vertices[:, 0].argmin()
|
||||
if not is_scalar(x_min):
|
||||
y_min = rotated_vertices[x_min, 1].argmin()
|
||||
x_min = cast('Sequence', x_min)[y_min]
|
||||
x_min = cast(Sequence, x_min)[y_min]
|
||||
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||
|
||||
width0 = self.width / norm_value
|
||||
|
210
masque/shapes/poly_collection.py
Normal file
210
masque/shapes/poly_collection.py
Normal file
@ -0,0 +1,210 @@
|
||||
from typing import Any, cast, Iterable
|
||||
from collections.abc import Sequence
|
||||
import copy
|
||||
import functools
|
||||
|
||||
import numpy
|
||||
from numpy import pi
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
|
||||
from . import Shape, normalized_shape_tuple
|
||||
from ..error import PatternError
|
||||
from ..repetition import Repetition
|
||||
from ..utils import is_scalar, rotation_matrix_2d, annotations_lt, annotations_eq, rep2key
|
||||
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class PolyCollection(Shape):
|
||||
"""
|
||||
A collection of polygons, consisting of list of vertex arrays (N_m x 2 ndarrays) which specify
|
||||
implicitly-closed boundaries, and an offset.
|
||||
|
||||
Note that the setter for `PolyCollection.vertex_list` creates a copy of the
|
||||
passed vertex coordinates.
|
||||
|
||||
A `normalized_form(...)` is available, but can be quite slow with lots of vertices.
|
||||
"""
|
||||
__slots__ = (
|
||||
'_vertex_lists',
|
||||
# Inherited
|
||||
'_offset', '_repetition', '_annotations',
|
||||
)
|
||||
|
||||
_vertex_lists: list[NDArray[numpy.float64]]
|
||||
""" List of ndarrays (N_m x 2) of vertices `[ [[x0, y0], [x1, y1], ...] ]` """
|
||||
|
||||
# vertex_lists property
|
||||
@property
|
||||
def vertex_lists(self) -> Any: # mypy#3004 NDArray[numpy.float64]:
|
||||
"""
|
||||
Vertices of the polygons (ist of ndarrays (N_m x 2) `[ [[x0, y0], [x1, y1], ...] ]`
|
||||
|
||||
When setting, note that a copy will be made,
|
||||
"""
|
||||
return self._vertex_lists
|
||||
|
||||
@vertex_lists.setter
|
||||
def vertex_lists(self, val: ArrayLike) -> None:
|
||||
val = [numpy.array(vv, dtype=float) for vv in val]
|
||||
for ii, vv in enumerate(val):
|
||||
if len(vv.shape) < 2 or vv.shape[1] != 2:
|
||||
raise PatternError(f'vertex_lists contents must be an Nx2 arrays (polygon #{ii} fails)')
|
||||
if vv.shape[0] < 3:
|
||||
raise PatternError(f'vertex_lists contents must have at least 3 vertices (Nx2 where N>2) (polygon ${ii} has shape {vv.shape})')
|
||||
self._vertices = val
|
||||
|
||||
# xs property
|
||||
@property
|
||||
def xs(self) -> NDArray[numpy.float64]:
|
||||
"""
|
||||
All vertex x coords as a 1D ndarray
|
||||
"""
|
||||
return self.vertices[:, 0]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
vertex_lists: Iterable[ArrayLike],
|
||||
*,
|
||||
offset: ArrayLike = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
repetition: Repetition | None = None,
|
||||
annotations: annotations_t | None = None,
|
||||
raw: bool = False,
|
||||
) -> None:
|
||||
if raw:
|
||||
assert isinstance(vertex_lists, list)
|
||||
assert all(isinstance(vv, numpy.ndarray) for vv in vertex_lists)
|
||||
assert isinstance(offset, numpy.ndarray)
|
||||
self._vertex_lists = vertex_lists
|
||||
self._offset = offset
|
||||
self._repetition = repetition
|
||||
self._annotations = annotations if annotations is not None else {}
|
||||
else:
|
||||
self.vertices = vertices
|
||||
self.offset = offset
|
||||
self.repetition = repetition
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.rotate(rotation)
|
||||
|
||||
def __deepcopy__(self, memo: dict | None = None) -> 'PolyCollection':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
new._vertex_lists = [vv.copy() for vv in self._vertex_lists]
|
||||
new._annotations = copy.deepcopy(self._annotations)
|
||||
return new
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return (
|
||||
type(self) is type(other)
|
||||
and numpy.array_equal(self.offset, other.offset)
|
||||
and all(numpy.array_equal(ss, oo) for ss, oo in zip(self.vertices, other.vertices))
|
||||
and self.repetition == other.repetition
|
||||
and annotations_eq(self.annotations, other.annotations)
|
||||
)
|
||||
|
||||
def __lt__(self, other: Shape) -> bool:
|
||||
if type(self) is not type(other):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
|
||||
other = cast(PolyCollection, other)
|
||||
for vv, oo in zip(self.vertices, other.vertices):
|
||||
if not numpy.array_equal(vv, oo):
|
||||
min_len = min(vv.shape[0], oo.shape[0])
|
||||
eq_mask = vv[:min_len] != oo[:min_len]
|
||||
eq_lt = vv[:min_len] < oo[:min_len]
|
||||
eq_lt_masked = eq_lt[eq_mask]
|
||||
if eq_lt_masked.size > 0:
|
||||
return eq_lt_masked.flat[0]
|
||||
return vv.shape[0] < oo.shape[0]
|
||||
if len(self.vertex_lists) != len(other.vertex_lists):
|
||||
return len(self.vertex_lists) < len(other.vertex_lists):
|
||||
if not numpy.array_equal(self.offset, other.offset):
|
||||
return tuple(self.offset) < tuple(other.offset)
|
||||
if self.repetition != other.repetition:
|
||||
return rep2key(self.repetition) < rep2key(other.repetition)
|
||||
return annotations_lt(self.annotations, other.annotations)
|
||||
|
||||
def pop_as_polygon(self, index: int) -> 'Polygon':
|
||||
"""
|
||||
Remove one polygon from the list, and return it as a `Polygon` object.
|
||||
|
||||
Args:
|
||||
index: which polygon to pop
|
||||
"""
|
||||
verts = self.vertex_lists.pop(index)
|
||||
return Polygon(
|
||||
vertices=verts,
|
||||
offset=self.offset,
|
||||
repetition=self.repetition.copy(),
|
||||
annotations=copy.deepcopy(self.annotations),
|
||||
)
|
||||
|
||||
def to_polygons(
|
||||
self,
|
||||
num_vertices: int | None = None, # unused # noqa: ARG002
|
||||
max_arclen: float | None = None, # unused # noqa: ARG002
|
||||
) -> list['Polygon']:
|
||||
return [Polygon(
|
||||
vertices=vv,
|
||||
offset=self.offset,
|
||||
repetition=self.repetition.copy(),
|
||||
annotations=copy.deepcopy(self.annotations),
|
||||
) for vv in self.vertex_lists]
|
||||
|
||||
def get_bounds_single(self) -> NDArray[numpy.float64]: # TODO note shape get_bounds doesn't include repetition
|
||||
mins = [numpy.min(vv, axis=0) for vv self.vertex_lists]
|
||||
maxs = [numpy.max(vv, axis=0) for vv self.vertex_lists]
|
||||
return numpy.vstack((self.offset + numpy.min(self.vertex_lists, axis=0),
|
||||
self.offset + numpy.max(self.vertex_lists, axis=0)))
|
||||
|
||||
def rotate(self, theta: float) -> 'Polygon':
|
||||
if theta != 0:
|
||||
for vv in self.vertex_lists:
|
||||
vv[:] = numpy.dot(rotation_matrix_2d(theta), vv.T).T
|
||||
return self
|
||||
|
||||
def mirror(self, axis: int = 0) -> 'Polygon':
|
||||
for vv in self.vertex_lists:
|
||||
vv[:, axis - 1] *= -1
|
||||
return self
|
||||
|
||||
def scale_by(self, c: float) -> 'Polygon':
|
||||
for vv in self.vertex_lists:
|
||||
vv *= c
|
||||
return self
|
||||
|
||||
def normalized_form(self, norm_value: float) -> normalized_shape_tuple:
|
||||
# Note: this function is going to be pretty slow for many-vertexed polygons, relative to
|
||||
# other shapes
|
||||
meanv = numpy.concatenate(self.vertex_lists).mean(axis=0)
|
||||
zeroed_vertices = [vv - meanv for vv in self.vertex_lists]
|
||||
offset = meanv + self.offset
|
||||
|
||||
scale = zeroed_vertices.std()
|
||||
normed_vertices = zeroed_vertices / scale
|
||||
|
||||
_, _, vertex_axis = numpy.linalg.svd(zeroed_vertices)
|
||||
rotation = numpy.arctan2(vertex_axis[0][1], vertex_axis[0][0]) % (2 * pi)
|
||||
rotated_vertices = numpy.vstack([numpy.dot(rotation_matrix_2d(-rotation), v)
|
||||
for v in normed_vertices])
|
||||
|
||||
# Reorder the vertices so that the one with lowest x, then y, comes first.
|
||||
x_min = rotated_vertices[:, 0].argmin()
|
||||
if not is_scalar(x_min):
|
||||
y_min = rotated_vertices[x_min, 1].argmin()
|
||||
x_min = cast(Sequence, x_min)[y_min]
|
||||
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||
|
||||
# TODO: normalize mirroring?
|
||||
|
||||
return ((type(self), reordered_vertices.data.tobytes()),
|
||||
(offset, scale / norm_value, rotation, False),
|
||||
lambda: Polygon(reordered_vertices * norm_value))
|
||||
|
||||
def __repr__(self) -> str:
|
||||
centroid = self.offset + numpy.concatenate(self.vertex_lists).mean(axis=0)
|
||||
return f'<PolyCollection centroid {centroid} p{len(self.vertex_lists)}>'
|
@ -1,4 +1,5 @@
|
||||
from typing import Any, cast, TYPE_CHECKING
|
||||
from typing import Any, cast
|
||||
from collections.abc import Sequence
|
||||
import copy
|
||||
import functools
|
||||
|
||||
@ -12,9 +13,6 @@ from ..repetition import Repetition
|
||||
from ..utils import is_scalar, rotation_matrix_2d, annotations_lt, annotations_eq, rep2key
|
||||
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class Polygon(Shape):
|
||||
@ -131,7 +129,7 @@ class Polygon(Shape):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
other = cast('Polygon', other)
|
||||
other = cast(Polygon, other)
|
||||
if not numpy.array_equal(self.vertices, other.vertices):
|
||||
min_len = min(self.vertices.shape[0], other.vertices.shape[0])
|
||||
eq_mask = self.vertices[:min_len] != other.vertices[:min_len]
|
||||
@ -397,7 +395,7 @@ class Polygon(Shape):
|
||||
x_min = rotated_vertices[:, 0].argmin()
|
||||
if not is_scalar(x_min):
|
||||
y_min = rotated_vertices[x_min, 1].argmin()
|
||||
x_min = cast('Sequence', x_min)[y_min]
|
||||
x_min = cast(Sequence, x_min)[y_min]
|
||||
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||
|
||||
# TODO: normalize mirroring?
|
||||
|
@ -115,7 +115,7 @@ class Text(RotatableImpl, Shape):
|
||||
if repr(type(self)) != repr(type(other)):
|
||||
return repr(type(self)) < repr(type(other))
|
||||
return id(type(self)) < id(type(other))
|
||||
other = cast('Text', other)
|
||||
other = cast(Text, other)
|
||||
if not self.height == other.height:
|
||||
return self.height < other.height
|
||||
if not self.string == other.string:
|
||||
|
@ -1,15 +1,14 @@
|
||||
from typing import Self, cast, Any, TYPE_CHECKING
|
||||
from typing import Self, cast, Any
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
import numpy
|
||||
from numpy import pi
|
||||
from numpy.typing import ArrayLike
|
||||
|
||||
from .positionable import Positionable
|
||||
from ..error import MasqueError
|
||||
from ..utils import rotation_matrix_2d
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .positionable import Positionable
|
||||
|
||||
_empty_slots = () # Workaround to get mypy to ignore intentionally empty slots for superclass
|
||||
|
||||
@ -114,9 +113,9 @@ class PivotableImpl(Pivotable, metaclass=ABCMeta):
|
||||
|
||||
def rotate_around(self, pivot: ArrayLike, rotation: float) -> Self:
|
||||
pivot = numpy.asarray(pivot, dtype=float)
|
||||
cast('Positionable', self).translate(-pivot)
|
||||
cast('Rotatable', self).rotate(rotation)
|
||||
cast(Positionable, self).translate(-pivot)
|
||||
cast(Rotatable, self).rotate(rotation)
|
||||
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset) # type: ignore # mypy#3004
|
||||
cast('Positionable', self).translate(+pivot)
|
||||
cast(Positionable, self).translate(+pivot)
|
||||
return self
|
||||
|
||||
|
@ -78,6 +78,7 @@ lint.ignore = [
|
||||
"ANN002", # *args
|
||||
"ANN003", # **kwargs
|
||||
"ANN401", # Any
|
||||
"ANN101", # self: Self
|
||||
"SIM108", # single-line if / else assignment
|
||||
"RET504", # x=y+z; return x
|
||||
"PIE790", # unnecessary pass
|
||||
|
Loading…
x
Reference in New Issue
Block a user