Compare commits

..

No commits in common. "db222373694a1911923c94c201cd41e39cc4cc46" and "963103b859494b60b87f2771c94c109d86c61699" have entirely different histories.

20 changed files with 151 additions and 418 deletions

View file

@ -106,7 +106,7 @@ def ell(
raise BuildError('Asked to find aggregation for ports that face in different directions:\n' raise BuildError('Asked to find aggregation for ports that face in different directions:\n'
+ pformat(port_rotations)) + pformat(port_rotations))
else: else:
if set_rotation is None: if set_rotation is not None:
raise BuildError('set_rotation must be specified if no ports have rotations!') raise BuildError('set_rotation must be specified if no ports have rotations!')
rotations = numpy.full_like(has_rotation, set_rotation, dtype=float) rotations = numpy.full_like(has_rotation, set_rotation, dtype=float)

View file

@ -16,7 +16,7 @@ import gzip
import numpy import numpy
import ezdxf import ezdxf
from ezdxf.enums import TextEntityAlignment from ezdxf.enums import TextEntityAlignment
from ezdxf.entities import LWPolyline, Polyline, Text, Insert, Solid, Trace from ezdxf.entities import LWPolyline, Polyline, Text, Insert
from .utils import is_gzipped, tmpfile from .utils import is_gzipped, tmpfile
from .. import Pattern, Ref, PatternError, Label from .. import Pattern, Ref, PatternError, Label
@ -217,54 +217,27 @@ def _read_block(block: ezdxf.layouts.BlockLayout | ezdxf.layouts.Modelspace) ->
attr = element.dxfattribs() attr = element.dxfattribs()
layer = attr.get('layer', DEFAULT_LAYER) layer = attr.get('layer', DEFAULT_LAYER)
width = 0 if points.shape[1] == 2:
if isinstance(element, LWPolyline): raise PatternError('Invalid or unimplemented polygon?')
# ezdxf 1.4+ get_points() returns (x, y, start_width, end_width, bulge)
if points.shape[1] >= 5:
if (points[:, 4] != 0).any():
raise PatternError('LWPolyline has bulge (not yet representable in masque!)')
if (points[:, 2] != points[:, 3]).any() or (points[:, 2] != points[0, 2]).any():
raise PatternError('LWPolyline has non-constant width (not yet representable in masque!)')
width = points[0, 2]
elif points.shape[1] == 3:
# width used to be in column 2
width = points[0, 2]
if width == 0: if points.shape[1] > 2:
width = attr.get('const_width', 0) if (points[0, 2] != points[:, 2]).any():
raise PatternError('PolyLine has non-constant width (not yet representable in masque!)')
if points.shape[1] == 4 and (points[:, 3] != 0).any():
raise PatternError('LWPolyLine has bulge (not yet representable in masque!)')
is_closed = element.closed width = points[0, 2]
verts = points[:, :2] if width == 0:
if is_closed and (len(verts) < 2 or not numpy.allclose(verts[0], verts[-1])): width = attr.get('const_width', 0)
verts = numpy.vstack((verts, verts[0]))
shape: Path | Polygon shape: Path | Polygon
if width == 0 and is_closed: if width == 0 and len(points) > 2 and numpy.array_equal(points[0], points[-1]):
# Use Polygon if it has at least 3 unique vertices shape = Polygon(vertices=points[:-1, :2])
shape_verts = verts[:-1] if len(verts) > 1 else verts
if len(shape_verts) >= 3:
shape = Polygon(vertices=shape_verts)
else: else:
shape = Path(width=width, vertices=verts) shape = Path(width=width, vertices=points[:, :2])
else:
shape = Path(width=width, vertices=verts)
pat.shapes[layer].append(shape) pat.shapes[layer].append(shape)
elif isinstance(element, Solid | Trace):
attr = element.dxfattribs()
layer = attr.get('layer', DEFAULT_LAYER)
points = numpy.array([element.get_dxf_attrib(f'vtx{i}') for i in range(4)
if element.has_dxf_attrib(f'vtx{i}')])
if len(points) >= 3:
# If vtx2 == vtx3, it's a triangle. ezdxf handles this.
if len(points) == 4 and numpy.allclose(points[2], points[3]):
verts = points[:3, :2]
# DXF Solid/Trace uses 0-1-3-2 vertex order for quadrilaterals!
elif len(points) == 4:
verts = points[[0, 1, 3, 2], :2]
else:
verts = points[:, :2]
pat.shapes[layer].append(Polygon(vertices=verts))
elif isinstance(element, Text): elif isinstance(element, Text):
args = dict( args = dict(
offset=numpy.asarray(element.get_placement()[1])[:2], offset=numpy.asarray(element.get_placement()[1])[:2],
@ -329,23 +302,15 @@ def _mrefs_to_drefs(
elif isinstance(rep, Grid): elif isinstance(rep, Grid):
a = rep.a_vector a = rep.a_vector
b = rep.b_vector if rep.b_vector is not None else numpy.zeros(2) b = rep.b_vector if rep.b_vector is not None else numpy.zeros(2)
# In masque, the grid basis vectors are NOT rotated by the reference's rotation. rotated_a = rotation_matrix_2d(-ref.rotation) @ a
# In DXF, the grid basis vectors are [column_spacing, 0] and [0, row_spacing], rotated_b = rotation_matrix_2d(-ref.rotation) @ b
# which ARE then rotated by the block reference's rotation. if rotated_a[1] == 0 and rotated_b[0] == 0:
# Therefore, we can only use a DXF array if ref.rotation is 0 (or a multiple of 90)
# AND the grid is already manhattan.
# Rotate basis vectors by the reference rotation to see where they end up in the DXF frame
rotated_a = rotation_matrix_2d(ref.rotation) @ a
rotated_b = rotation_matrix_2d(ref.rotation) @ b
if numpy.isclose(rotated_a[1], 0, atol=1e-8) and numpy.isclose(rotated_b[0], 0, atol=1e-8):
attribs['column_count'] = rep.a_count attribs['column_count'] = rep.a_count
attribs['row_count'] = rep.b_count attribs['row_count'] = rep.b_count
attribs['column_spacing'] = rotated_a[0] attribs['column_spacing'] = rotated_a[0]
attribs['row_spacing'] = rotated_b[1] attribs['row_spacing'] = rotated_b[1]
block.add_blockref(encoded_name, ref.offset, dxfattribs=attribs) block.add_blockref(encoded_name, ref.offset, dxfattribs=attribs)
elif numpy.isclose(rotated_a[0], 0, atol=1e-8) and numpy.isclose(rotated_b[1], 0, atol=1e-8): elif rotated_a[0] == 0 and rotated_b[1] == 0:
attribs['column_count'] = rep.b_count attribs['column_count'] = rep.b_count
attribs['row_count'] = rep.a_count attribs['row_count'] = rep.a_count
attribs['column_spacing'] = rotated_b[0] attribs['column_spacing'] = rotated_b[0]
@ -383,18 +348,10 @@ def _shapes_to_elements(
displacements = shape.repetition.displacements displacements = shape.repetition.displacements
for dd in displacements: for dd in displacements:
if isinstance(shape, Path): for polygon in shape.to_polygons():
# preserve path. xy_open = polygon.vertices + dd
# Note: DXF paths don't support endcaps well, so this is still a bit limited. xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
xy = shape.vertices + dd block.add_lwpolyline(xy_closed, dxfattribs=attribs)
attribs_path = {**attribs}
if shape.width > 0:
attribs_path['const_width'] = shape.width
block.add_lwpolyline(xy, dxfattribs=attribs_path)
else:
for polygon in shape.to_polygons():
xy_open = polygon.vertices + dd
block.add_lwpolyline(xy_open, close=True, dxfattribs=attribs)
def _labels_to_texts( def _labels_to_texts(

View file

@ -453,7 +453,7 @@ def _shapes_to_elements(
extension: tuple[int, int] extension: tuple[int, int]
if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None: if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None:
extension = tuple(rint_cast(shape.cap_extensions)) extension = tuple(shape.cap_extensions) # type: ignore
else: else:
extension = (0, 0) extension = (0, 0)
@ -617,12 +617,7 @@ def load_libraryfile(
stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ) # type: ignore stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ) # type: ignore
else: else:
stream = path.open(mode='rb') # noqa: SIM115 stream = path.open(mode='rb') # noqa: SIM115
return load_library(stream, full_load=full_load, postprocess=postprocess)
try:
return load_library(stream, full_load=full_load, postprocess=postprocess)
finally:
if full_load:
stream.close()
def check_valid_names( def check_valid_names(
@ -653,7 +648,7 @@ def check_valid_names(
logger.error('Names contain invalid characters:\n' + pformat(bad_chars)) logger.error('Names contain invalid characters:\n' + pformat(bad_chars))
if bad_lengths: if bad_lengths:
logger.error(f'Names too long (>{max_length}):\n' + pformat(bad_lengths)) logger.error(f'Names too long (>{max_length}:\n' + pformat(bad_chars))
if bad_chars or bad_lengths: if bad_chars or bad_lengths:
raise LibraryError('Library contains invalid names, see log above') raise LibraryError('Library contains invalid names, see log above')

View file

@ -182,8 +182,8 @@ def writefile(
Args: Args:
library: A {name: Pattern} mapping of patterns to write. library: A {name: Pattern} mapping of patterns to write.
filename: Filename to save to. filename: Filename to save to.
*args: passed to `oasis.build()` *args: passed to `oasis.write`
**kwargs: passed to `oasis.build()` **kwargs: passed to `oasis.write`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
@ -213,9 +213,9 @@ def readfile(
Will automatically decompress gzipped files. Will automatically decompress gzipped files.
Args: Args:
filename: Filename to load from. filename: Filename to save to.
*args: passed to `oasis.read()` *args: passed to `oasis.read`
**kwargs: passed to `oasis.read()` **kwargs: passed to `oasis.read`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if is_gzipped(path): if is_gzipped(path):
@ -717,6 +717,10 @@ def properties_to_annotations(
annotations[key] = values annotations[key] = values
return annotations return annotations
properties = [fatrec.Property(key, vals, is_standard=False)
for key, vals in annotations.items()]
return properties
def check_valid_names( def check_valid_names(
names: Iterable[str], names: Iterable[str],

View file

@ -186,9 +186,9 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
# Perform recursive lookups, but only once for each name # Perform recursive lookups, but only once for each name
for target in targets - skip: for target in targets - skip:
assert target is not None assert target is not None
skip.add(target)
if target in self: if target in self:
targets |= self.referenced_patterns(target, skip=skip) targets |= self.referenced_patterns(target, skip=skip)
skip.add(target)
return targets return targets
@ -466,11 +466,9 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
memo = {} memo = {}
if transform is None or transform is True: if transform is None or transform is True:
transform = numpy.array([0, 0, 0, 0, 1], dtype=float) transform = numpy.zeros(4)
elif transform is not False: elif transform is not False:
transform = numpy.asarray(transform, dtype=float) transform = numpy.asarray(transform, dtype=float)
if transform.size == 4:
transform = numpy.append(transform, 1.0)
original_pattern = pattern original_pattern = pattern
@ -1269,12 +1267,12 @@ class LazyLibrary(ILibrary):
""" """
mapping: dict[str, Callable[[], 'Pattern']] mapping: dict[str, Callable[[], 'Pattern']]
cache: dict[str, 'Pattern'] cache: dict[str, 'Pattern']
_lookups_in_progress: list[str] _lookups_in_progress: set[str]
def __init__(self) -> None: def __init__(self) -> None:
self.mapping = {} self.mapping = {}
self.cache = {} self.cache = {}
self._lookups_in_progress = [] self._lookups_in_progress = set()
def __setitem__( def __setitem__(
self, self,
@ -1305,20 +1303,16 @@ class LazyLibrary(ILibrary):
return self.cache[key] return self.cache[key]
if key in self._lookups_in_progress: if key in self._lookups_in_progress:
chain = ' -> '.join(self._lookups_in_progress + [key])
raise LibraryError( raise LibraryError(
f'Detected circular reference or recursive lookup of "{key}".\n' f'Detected multiple simultaneous lookups of "{key}".\n'
f'Lookup chain: {chain}\n'
'This may be caused by an invalid (cyclical) reference, or buggy code.\n' 'This may be caused by an invalid (cyclical) reference, or buggy code.\n'
'If you are lazy-loading a file, try a non-lazy load and check for reference cycles.' 'If you are lazy-loading a file, try a non-lazy load and check for reference cycles.' # TODO give advice on finding cycles
) )
self._lookups_in_progress.append(key) self._lookups_in_progress.add(key)
try: func = self.mapping[key]
func = self.mapping[key] pat = func()
pat = func() self._lookups_in_progress.remove(key)
finally:
self._lookups_in_progress.pop()
self.cache[key] = pat self.cache[key] = pat
return pat return pat

View file

@ -201,7 +201,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
def __lt__(self, other: 'Pattern') -> bool: def __lt__(self, other: 'Pattern') -> bool:
self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist] self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist]
other_nonempty_targets = [target for target, reflist in other.refs.items() if reflist] other_nonempty_targets = [target for target, reflist in self.refs.items() if reflist]
self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets)) self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets))
other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets)) other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets))
@ -215,7 +215,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
return refs_ours < refs_theirs return refs_ours < refs_theirs
self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems] self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems]
other_nonempty_layers = [ll for ll, elems in other.shapes.items() if elems] other_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems]
self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers)) self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers))
other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers)) other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers))
@ -224,21 +224,21 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
for _, _, layer in self_layerkeys: for _, _, layer in self_layerkeys:
shapes_ours = tuple(sorted(self.shapes[layer])) shapes_ours = tuple(sorted(self.shapes[layer]))
shapes_theirs = tuple(sorted(other.shapes[layer])) shapes_theirs = tuple(sorted(self.shapes[layer]))
if shapes_ours != shapes_theirs: if shapes_ours != shapes_theirs:
return shapes_ours < shapes_theirs return shapes_ours < shapes_theirs
self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems] self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems]
other_nonempty_txtlayers = [ll for ll, elems in other.labels.items() if elems] other_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems]
self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers)) self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers))
other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers)) other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers))
if self_txtlayerkeys != other_txtlayerkeys: if self_txtlayerkeys != other_txtlayerkeys:
return self_txtlayerkeys < other_txtlayerkeys return self_txtlayerkeys < other_txtlayerkeys
for _, _, layer in self_txtlayerkeys: for _, _, layer in self_layerkeys:
labels_ours = tuple(sorted(self.labels[layer])) labels_ours = tuple(sorted(self.labels[layer]))
labels_theirs = tuple(sorted(other.labels[layer])) labels_theirs = tuple(sorted(self.labels[layer]))
if labels_ours != labels_theirs: if labels_ours != labels_theirs:
return labels_ours < labels_theirs return labels_ours < labels_theirs
@ -255,7 +255,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
return False return False
self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist] self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist]
other_nonempty_targets = [target for target, reflist in other.refs.items() if reflist] other_nonempty_targets = [target for target, reflist in self.refs.items() if reflist]
self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets)) self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets))
other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets)) other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets))
@ -269,7 +269,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
return False return False
self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems] self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems]
other_nonempty_layers = [ll for ll, elems in other.shapes.items() if elems] other_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems]
self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers)) self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers))
other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers)) other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers))
@ -278,21 +278,21 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
for _, _, layer in self_layerkeys: for _, _, layer in self_layerkeys:
shapes_ours = tuple(sorted(self.shapes[layer])) shapes_ours = tuple(sorted(self.shapes[layer]))
shapes_theirs = tuple(sorted(other.shapes[layer])) shapes_theirs = tuple(sorted(self.shapes[layer]))
if shapes_ours != shapes_theirs: if shapes_ours != shapes_theirs:
return False return False
self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems] self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems]
other_nonempty_txtlayers = [ll for ll, elems in other.labels.items() if elems] other_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems]
self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers)) self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers))
other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers)) other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers))
if self_txtlayerkeys != other_txtlayerkeys: if self_txtlayerkeys != other_txtlayerkeys:
return False return False
for _, _, layer in self_txtlayerkeys: for _, _, layer in self_layerkeys:
labels_ours = tuple(sorted(self.labels[layer])) labels_ours = tuple(sorted(self.labels[layer]))
labels_theirs = tuple(sorted(other.labels[layer])) labels_theirs = tuple(sorted(self.labels[layer]))
if labels_ours != labels_theirs: if labels_ours != labels_theirs:
return False return False

View file

@ -630,7 +630,7 @@ class PortList(metaclass=ABCMeta):
rotations = numpy.mod(s_rotations - o_rotations - pi, 2 * pi) rotations = numpy.mod(s_rotations - o_rotations - pi, 2 * pi)
if not has_rot.any(): if not has_rot.any():
if set_rotation is None: if set_rotation is None:
raise PortError('Must provide set_rotation if rotation is indeterminate') PortError('Must provide set_rotation if rotation is indeterminate')
rotations[:] = set_rotation rotations[:] = set_rotation
else: else:
rotations[~has_rot] = rotations[has_rot][0] rotations[~has_rot] = rotations[has_rot][0]

View file

@ -92,22 +92,18 @@ class Ref(
rotation=self.rotation, rotation=self.rotation,
scale=self.scale, scale=self.scale,
mirrored=self.mirrored, mirrored=self.mirrored,
repetition=self.repetition, repetition=copy.deepcopy(self.repetition),
annotations=self.annotations, annotations=copy.deepcopy(self.annotations),
) )
return new return new
def __deepcopy__(self, memo: dict | None = None) -> 'Ref': def __deepcopy__(self, memo: dict | None = None) -> 'Ref':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
new._offset = self._offset.copy() #new.repetition = copy.deepcopy(self.repetition, memo)
new.repetition = copy.deepcopy(self.repetition, memo) #new.annotations = copy.deepcopy(self.annotations, memo)
new.annotations = copy.deepcopy(self.annotations, memo)
return new return new
def copy(self) -> 'Ref':
return self.deepcopy()
def __lt__(self, other: 'Ref') -> bool: def __lt__(self, other: 'Ref') -> bool:
if (self.offset != other.offset).any(): if (self.offset != other.offset).any():
return tuple(self.offset) < tuple(other.offset) return tuple(self.offset) < tuple(other.offset)
@ -191,11 +187,10 @@ class Ref(
xys = self.offset[None, :] xys = self.offset[None, :]
if self.repetition is not None: if self.repetition is not None:
xys = xys + self.repetition.displacements xys = xys + self.repetition.displacements
transforms = numpy.empty((xys.shape[0], 5)) transforms = numpy.empty((xys.shape[0], 4))
transforms[:, :2] = xys transforms[:, :2] = xys
transforms[:, 2] = self.rotation transforms[:, 2] = self.rotation
transforms[:, 3] = self.mirrored transforms[:, 3] = self.mirrored
transforms[:, 4] = self.scale
return transforms return transforms
def get_bounds_single( def get_bounds_single(

View file

@ -64,7 +64,7 @@ class Grid(Repetition):
_a_count: int _a_count: int
""" Number of instances along the direction specified by the `a_vector` """ """ Number of instances along the direction specified by the `a_vector` """
_b_vector: NDArray[numpy.float64] _b_vector: NDArray[numpy.float64] | None
""" Vector `[x, y]` specifying a second lattice vector for the grid. """ Vector `[x, y]` specifying a second lattice vector for the grid.
Specifies center-to-center spacing between adjacent elements. Specifies center-to-center spacing between adjacent elements.
Can be `None` for a 1D array. Can be `None` for a 1D array.
@ -199,6 +199,9 @@ class Grid(Repetition):
@property @property
def displacements(self) -> NDArray[numpy.float64]: def displacements(self) -> NDArray[numpy.float64]:
if self.b_vector is None:
return numpy.arange(self.a_count)[:, None] * self.a_vector[None, :]
aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij') aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij')
return (aa.flatten()[:, None] * self.a_vector[None, :] return (aa.flatten()[:, None] * self.a_vector[None, :]
+ bb.flatten()[:, None] * self.b_vector[None, :]) # noqa + bb.flatten()[:, None] * self.b_vector[None, :]) # noqa
@ -298,8 +301,12 @@ class Grid(Repetition):
return self.b_count < other.b_count return self.b_count < other.b_count
if not numpy.array_equal(self.a_vector, other.a_vector): if not numpy.array_equal(self.a_vector, other.a_vector):
return tuple(self.a_vector) < tuple(other.a_vector) return tuple(self.a_vector) < tuple(other.a_vector)
if self.b_vector is None:
return other.b_vector is not None
if other.b_vector is None:
return False
if not numpy.array_equal(self.b_vector, other.b_vector): if not numpy.array_equal(self.b_vector, other.b_vector):
return tuple(self.b_vector) < tuple(other.b_vector) return tuple(self.a_vector) < tuple(other.a_vector)
return False return False
@ -384,9 +391,7 @@ class Arbitrary(Repetition):
Returns: Returns:
self self
""" """
new_displacements = self.displacements.copy() self.displacements[:, 1 - axis] *= -1
new_displacements[:, 1 - axis] *= -1
self.displacements = new_displacements
return self return self
def get_bounds(self) -> NDArray[numpy.float64] | None: def get_bounds(self) -> NDArray[numpy.float64] | None:
@ -411,6 +416,6 @@ class Arbitrary(Repetition):
Returns: Returns:
self self
""" """
self.displacements = self.displacements * c self.displacements *= c
return self return self

View file

@ -24,16 +24,7 @@ class PathCap(Enum):
# # defined by path.cap_extensions # # defined by path.cap_extensions
def __lt__(self, other: Any) -> bool: def __lt__(self, other: Any) -> bool:
if self.__class__ is not other.__class__: return self.value == other.value
return self.__class__.__name__ < other.__class__.__name__
# Order: Flush, Square, Circle, SquareCustom
order = {
PathCap.Flush: 0,
PathCap.Square: 1,
PathCap.Circle: 2,
PathCap.SquareCustom: 3,
}
return order[self] < order[other]
@functools.total_ordering @functools.total_ordering
@ -88,10 +79,10 @@ class Path(Shape):
def cap(self, val: PathCap) -> None: def cap(self, val: PathCap) -> None:
self._cap = PathCap(val) self._cap = PathCap(val)
if self.cap != PathCap.SquareCustom: if self.cap != PathCap.SquareCustom:
self._cap_extensions = None self.cap_extensions = None
elif self._cap_extensions is None: elif self.cap_extensions is None:
# just got set to SquareCustom # just got set to SquareCustom
self._cap_extensions = numpy.zeros(2) self.cap_extensions = numpy.zeros(2)
# cap_extensions property # cap_extensions property
@property @property
@ -218,12 +209,9 @@ class Path(Shape):
self.vertices = vertices self.vertices = vertices
self.repetition = repetition self.repetition = repetition
self.annotations = annotations self.annotations = annotations
self._cap = cap
if cap == PathCap.SquareCustom and cap_extensions is None:
self._cap_extensions = numpy.zeros(2)
else:
self.cap_extensions = cap_extensions
self.width = width self.width = width
self.cap = cap
self.cap_extensions = cap_extensions
if rotation: if rotation:
self.rotate(rotation) self.rotate(rotation)
if numpy.any(offset): if numpy.any(offset):
@ -265,14 +253,6 @@ class Path(Shape):
if self.cap_extensions is None: if self.cap_extensions is None:
return True return True
return tuple(self.cap_extensions) < tuple(other.cap_extensions) return tuple(self.cap_extensions) < tuple(other.cap_extensions)
if not numpy.array_equal(self.vertices, other.vertices):
min_len = min(self.vertices.shape[0], other.vertices.shape[0])
eq_mask = self.vertices[:min_len] != other.vertices[:min_len]
eq_lt = self.vertices[:min_len] < other.vertices[:min_len]
eq_lt_masked = eq_lt[eq_mask]
if eq_lt_masked.size > 0:
return eq_lt_masked.flat[0]
return self.vertices.shape[0] < other.vertices.shape[0]
if self.repetition != other.repetition: if self.repetition != other.repetition:
return rep2key(self.repetition) < rep2key(other.repetition) return rep2key(self.repetition) < rep2key(other.repetition)
return annotations_lt(self.annotations, other.annotations) return annotations_lt(self.annotations, other.annotations)
@ -323,30 +303,9 @@ class Path(Shape):
) -> list['Polygon']: ) -> list['Polygon']:
extensions = self._calculate_cap_extensions() extensions = self._calculate_cap_extensions()
v = remove_colinear_vertices(self.vertices, closed_path=False, preserve_uturns=True) v = remove_colinear_vertices(self.vertices, closed_path=False)
dv = numpy.diff(v, axis=0) dv = numpy.diff(v, axis=0)
norms = numpy.sqrt((dv * dv).sum(axis=1)) dvdir = dv / numpy.sqrt((dv * dv).sum(axis=1))[:, None]
# Filter out zero-length segments if any remained after remove_colinear_vertices
valid = (norms > 1e-18)
if not numpy.all(valid):
# This shouldn't happen much if remove_colinear_vertices is working
v = v[numpy.append(valid, True)]
dv = numpy.diff(v, axis=0)
norms = norms[valid]
if dv.shape[0] == 0:
# All vertices were the same. It's a point.
if self.width == 0:
return [Polygon(vertices=numpy.zeros((3, 2)))] # Area-less degenerate
if self.cap == PathCap.Circle:
return Circle(radius=self.width / 2, offset=v[0]).to_polygons(num_vertices=num_vertices, max_arclen=max_arclen)
if self.cap == PathCap.Square:
return [Polygon.square(side_length=self.width, offset=v[0])]
# Flush or CustomSquare
return [Polygon(vertices=numpy.zeros((3, 2)))]
dvdir = dv / norms[:, None]
if self.width == 0: if self.width == 0:
verts = numpy.vstack((v, v[::-1])) verts = numpy.vstack((v, v[::-1]))
@ -365,21 +324,11 @@ class Path(Shape):
bs = v[1:-1] - v[:-2] + perp[1:] - perp[:-1] bs = v[1:-1] - v[:-2] + perp[1:] - perp[:-1]
ds = v[1:-1] - v[:-2] - perp[1:] + perp[:-1] ds = v[1:-1] - v[:-2] - perp[1:] + perp[:-1]
try: rp = numpy.linalg.solve(As, bs[:, :, None])[:, 0]
# Vectorized solve for all intersections rn = numpy.linalg.solve(As, ds[:, :, None])[:, 0]
# solve supports broadcasting: As (N-2, 2, 2), bs (N-2, 2, 1)
rp = numpy.linalg.solve(As, bs[:, :, None])[:, 0, 0]
rn = numpy.linalg.solve(As, ds[:, :, None])[:, 0, 0]
except numpy.linalg.LinAlgError:
# Fallback to slower lstsq if some segments are parallel (singular matrix)
rp = numpy.zeros(As.shape[0])
rn = numpy.zeros(As.shape[0])
for ii in range(As.shape[0]):
rp[ii] = numpy.linalg.lstsq(As[ii], bs[ii, :, None], rcond=1e-12)[0][0, 0]
rn[ii] = numpy.linalg.lstsq(As[ii], ds[ii, :, None], rcond=1e-12)[0][0, 0]
intersection_p = v[:-2] + rp[:, None] * dv[:-1] + perp[:-1] intersection_p = v[:-2] + rp * dv[:-1] + perp[:-1]
intersection_n = v[:-2] + rn[:, None] * dv[:-1] - perp[:-1] intersection_n = v[:-2] + rn * dv[:-1] - perp[:-1]
towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp? towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp?
# straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight # straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight
@ -469,11 +418,12 @@ class Path(Shape):
rotated_vertices = numpy.vstack([numpy.dot(rotation_matrix_2d(-rotation), v) rotated_vertices = numpy.vstack([numpy.dot(rotation_matrix_2d(-rotation), v)
for v in normed_vertices]) for v in normed_vertices])
# Canonical ordering for open paths: pick whichever of (v) or (v[::-1]) is smaller # Reorder the vertices so that the one with lowest x, then y, comes first.
if tuple(rotated_vertices.flat) > tuple(rotated_vertices[::-1].flat): x_min = rotated_vertices[:, 0].argmin()
reordered_vertices = rotated_vertices[::-1] if not is_scalar(x_min):
else: y_min = rotated_vertices[x_min, 1].argmin()
reordered_vertices = rotated_vertices x_min = cast('Sequence', x_min)[y_min]
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
width0 = self.width / norm_value width0 = self.width / norm_value
@ -512,7 +462,7 @@ class Path(Shape):
Returns: Returns:
self self
""" """
self.vertices = remove_colinear_vertices(self.vertices, closed_path=False, preserve_uturns=True) self.vertices = remove_colinear_vertices(self.vertices, closed_path=False)
return self return self
def _calculate_cap_extensions(self) -> NDArray[numpy.float64]: def _calculate_cap_extensions(self) -> NDArray[numpy.float64]:

View file

@ -321,7 +321,7 @@ class Polygon(Shape):
else: else:
raise PatternError('Two of ymin, yctr, ymax, ly must be None!') raise PatternError('Two of ymin, yctr, ymax, ly must be None!')
poly = Polygon.rectangle(abs(lx), abs(ly), offset=(xctr, yctr), repetition=repetition) poly = Polygon.rectangle(lx, ly, offset=(xctr, yctr), repetition=repetition)
return poly return poly
@staticmethod @staticmethod
@ -417,15 +417,11 @@ class Polygon(Shape):
for v in normed_vertices]) for v in normed_vertices])
# Reorder the vertices so that the one with lowest x, then y, comes first. # Reorder the vertices so that the one with lowest x, then y, comes first.
x_min_val = rotated_vertices[:, 0].min() x_min = rotated_vertices[:, 0].argmin()
x_min_inds = numpy.where(rotated_vertices[:, 0] == x_min_val)[0] if not is_scalar(x_min):
if x_min_inds.size > 1: y_min = rotated_vertices[x_min, 1].argmin()
y_min_val = rotated_vertices[x_min_inds, 1].min() x_min = cast('Sequence', x_min)[y_min]
tie_breaker = numpy.where(rotated_vertices[x_min_inds, 1] == y_min_val)[0][0] reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
start_ind = x_min_inds[tie_breaker]
else:
start_ind = x_min_inds[0]
reordered_vertices = numpy.roll(rotated_vertices, -start_ind, axis=0)
# TODO: normalize mirroring? # TODO: normalize mirroring?

View file

@ -1,111 +0,0 @@
import numpy
from numpy.testing import assert_allclose
from pathlib import Path
from ..pattern import Pattern
from ..library import Library
from ..shapes import Path as MPath, Polygon
from ..repetition import Grid
from ..file import dxf
def test_dxf_roundtrip(tmp_path: Path):
lib = Library()
pat = Pattern()
# 1. Polygon (closed)
poly_verts = numpy.array([[0, 0], [10, 0], [10, 10], [0, 10]])
pat.polygon("1", vertices=poly_verts)
# 2. Path (open, 3 points)
path_verts = numpy.array([[20, 0], [30, 0], [30, 10]])
pat.path("2", vertices=path_verts, width=2)
# 3. Path (open, 2 points) - Testing the fix for 2-point polylines
path2_verts = numpy.array([[40, 0], [50, 10]])
pat.path("3", vertices=path2_verts, width=0) # width 0 to be sure it's not a polygonized path if we're not careful
# 4. Ref with Grid repetition (Manhattan)
subpat = Pattern()
subpat.polygon("sub", vertices=[[0, 0], [1, 0], [1, 1]])
lib["sub"] = subpat
pat.ref("sub", offset=(100, 100), repetition=Grid(a_vector=(10, 0), a_count=2, b_vector=(0, 10), b_count=3))
lib["top"] = pat
dxf_file = tmp_path / "test.dxf"
dxf.writefile(lib, "top", dxf_file)
read_lib, _ = dxf.readfile(dxf_file)
# In DXF read, the top level is usually called "Model"
top_pat = read_lib.get("Model") or read_lib.get("top") or list(read_lib.values())[0]
# Verify Polygon
polys = [s for s in top_pat.shapes["1"] if isinstance(s, Polygon)]
assert len(polys) >= 1
poly_read = polys[0]
# DXF polyline might be shifted or vertices reordered, but here they should be simple
assert_allclose(poly_read.vertices, poly_verts)
# Verify 3-point Path
paths = [s for s in top_pat.shapes["2"] if isinstance(s, MPath)]
assert len(paths) >= 1
path_read = paths[0]
assert_allclose(path_read.vertices, path_verts)
assert path_read.width == 2
# Verify 2-point Path
paths2 = [s for s in top_pat.shapes["3"] if isinstance(s, MPath)]
assert len(paths2) >= 1
path2_read = paths2[0]
assert_allclose(path2_read.vertices, path2_verts)
assert path2_read.width == 0
# Verify Ref with Grid
# Finding the sub pattern name might be tricky because of how DXF stores blocks
# but "sub" should be in read_lib
assert "sub" in read_lib
# Check refs in the top pattern
found_grid = False
for target, reflist in top_pat.refs.items():
# DXF names might be case-insensitive or modified, but ezdxf usually preserves them
if target.upper() == "SUB":
for ref in reflist:
if isinstance(ref.repetition, Grid):
assert ref.repetition.a_count == 2
assert ref.repetition.b_count == 3
assert_allclose(ref.repetition.a_vector, (10, 0))
assert_allclose(ref.repetition.b_vector, (0, 10))
found_grid = True
assert found_grid, f"Manhattan Grid repetition should have been preserved. Targets: {list(top_pat.refs.keys())}"
def test_dxf_manhattan_precision(tmp_path: Path):
# Test that float precision doesn't break Manhattan grid detection
lib = Library()
sub = Pattern()
sub.polygon("1", vertices=[[0, 0], [1, 0], [1, 1]])
lib["sub"] = sub
top = Pattern()
# 90 degree rotation: in masque the grid is NOT rotated, so it stays [[10,0],[0,10]]
# In DXF, an array with rotation 90 has basis vectors [[0,10],[-10,0]].
# So a masque grid [[10,0],[0,10]] with ref rotation 90 matches a DXF array.
angle = numpy.pi / 2 # 90 degrees
top.ref("sub", offset=(0, 0), rotation=angle,
repetition=Grid(a_vector=(10, 0), a_count=2, b_vector=(0, 10), b_count=2))
lib["top"] = top
dxf_file = tmp_path / "precision.dxf"
dxf.writefile(lib, "top", dxf_file)
# If the isclose() fix works, this should still be a Grid when read back
read_lib, _ = dxf.readfile(dxf_file)
read_top = read_lib.get("Model") or read_lib.get("top") or list(read_lib.values())[0]
target_name = next(k for k in read_top.refs if k.upper() == "SUB")
ref = read_top.refs[target_name][0]
assert isinstance(ref.repetition, Grid), "Grid should be preserved for 90-degree rotation"

View file

@ -5,6 +5,7 @@ from numpy.testing import assert_allclose
from ..pattern import Pattern from ..pattern import Pattern
from ..library import Library from ..library import Library
from ..file import gdsii, oasis
from ..shapes import Path as MPath, Circle, Polygon from ..shapes import Path as MPath, Circle, Polygon
from ..repetition import Grid, Arbitrary from ..repetition import Grid, Arbitrary
@ -61,7 +62,6 @@ def create_test_library(for_gds: bool = False) -> Library:
return lib return lib
def test_gdsii_full_roundtrip(tmp_path: Path) -> None: def test_gdsii_full_roundtrip(tmp_path: Path) -> None:
from ..file import gdsii
lib = create_test_library(for_gds=True) lib = create_test_library(for_gds=True)
gds_file = tmp_path / "full_test.gds" gds_file = tmp_path / "full_test.gds"
gdsii.writefile(lib, gds_file, meters_per_unit=1e-9) gdsii.writefile(lib, gds_file, meters_per_unit=1e-9)
@ -110,7 +110,6 @@ def test_gdsii_full_roundtrip(tmp_path: Path) -> None:
def test_oasis_full_roundtrip(tmp_path: Path) -> None: def test_oasis_full_roundtrip(tmp_path: Path) -> None:
pytest.importorskip("fatamorgana") pytest.importorskip("fatamorgana")
from ..file import oasis
lib = create_test_library(for_gds=False) lib = create_test_library(for_gds=False)
oas_file = tmp_path / "full_test.oas" oas_file = tmp_path / "full_test.oas"
oasis.writefile(lib, oas_file, units_per_micron=1000) oasis.writefile(lib, oas_file, units_per_micron=1000)

View file

@ -4,10 +4,12 @@ from numpy.testing import assert_equal
from ..pattern import Pattern from ..pattern import Pattern
from ..library import Library from ..library import Library
from ..file import oasis
def test_oasis_roundtrip(tmp_path: Path) -> None: def test_oasis_roundtrip(tmp_path: Path) -> None:
# Skip if fatamorgana is not installed # Skip if fatamorgana is not installed
pytest.importorskip("fatamorgana") pytest.importorskip("fatamorgana")
from ..file import oasis
lib = Library() lib = Library()
pat1 = Pattern() pat1 = Pattern()

View file

@ -29,19 +29,14 @@ def test_remove_colinear_vertices() -> None:
def test_remove_colinear_vertices_exhaustive() -> None: def test_remove_colinear_vertices_exhaustive() -> None:
# U-turn # U-turn
v = [[0, 0], [10, 0], [0, 0]] v = [[0, 0], [10, 0], [0, 0]]
v_clean = remove_colinear_vertices(v, closed_path=False, preserve_uturns=True) v_clean = remove_colinear_vertices(v, closed_path=False)
# Open path should keep ends. [10,0] is between [0,0] and [0,0]? # Open path should keep ends. [10,0] is between [0,0] and [0,0]?
# They are colinear, but it's a 180 degree turn. # Yes, they are all on the same line.
# We preserve 180 degree turns if preserve_uturns is True. assert len(v_clean) == 2
assert len(v_clean) == 3
v_collapsed = remove_colinear_vertices(v, closed_path=False, preserve_uturns=False)
# If not preserving u-turns, it should collapse to just the endpoints
assert len(v_collapsed) == 2
# 180 degree U-turn in closed path # 180 degree U-turn in closed path
v = [[0, 0], [10, 0], [5, 0]] v = [[0, 0], [10, 0], [5, 0]]
v_clean = remove_colinear_vertices(v, closed_path=True, preserve_uturns=False) v_clean = remove_colinear_vertices(v, closed_path=True)
assert len(v_clean) == 2 assert len(v_clean) == 2
@ -69,7 +64,7 @@ def test_apply_transforms() -> None:
t1 = [10, 20, 0, 0] t1 = [10, 20, 0, 0]
t2 = [[5, 0, 0, 0], [0, 5, 0, 0]] t2 = [[5, 0, 0, 0], [0, 5, 0, 0]]
combined = apply_transforms(t1, t2) combined = apply_transforms(t1, t2)
assert_equal(combined, [[15, 20, 0, 0, 1], [10, 25, 0, 0, 1]]) assert_equal(combined, [[15, 20, 0, 0], [10, 25, 0, 0]])
def test_apply_transforms_advanced() -> None: def test_apply_transforms_advanced() -> None:
@ -85,4 +80,4 @@ def test_apply_transforms_advanced() -> None:
# 1. mirror inner y if outer mirrored: (10, 0) -> (10, 0) # 1. mirror inner y if outer mirrored: (10, 0) -> (10, 0)
# 2. rotate by outer rotation (pi/2): (10, 0) -> (0, 10) # 2. rotate by outer rotation (pi/2): (10, 0) -> (0, 10)
# 3. add outer offset (0, 0) -> (0, 10) # 3. add outer offset (0, 0) -> (0, 10)
assert_allclose(combined[0], [0, 10, pi / 2, 1, 1], atol=1e-10) assert_allclose(combined[0], [0, 10, pi / 2, 1], atol=1e-10)

View file

@ -60,4 +60,4 @@ class DeferredDict(dict, Generic[Key, Value]):
Convenience function to avoid having to manually wrap Convenience function to avoid having to manually wrap
constant values into callables. constant values into callables.
""" """
self[key] = lambda v=value: v self[key] = lambda: value

View file

@ -57,9 +57,11 @@ def data_to_ports(
name: str | None = None, # Note: name optional, but arg order different from read(postprocess=) name: str | None = None, # Note: name optional, but arg order different from read(postprocess=)
max_depth: int = 0, max_depth: int = 0,
skip_subcells: bool = True, skip_subcells: bool = True,
visited: set[int] | None = None, # TODO missing ok?
) -> Pattern: ) -> Pattern:
""" """
# TODO fixup documentation in ports2data
# TODO move to utils.file?
Examine `pattern` for labels specifying port info, and use that info Examine `pattern` for labels specifying port info, and use that info
to fill out its `ports` attribute. to fill out its `ports` attribute.
@ -68,30 +70,18 @@ def data_to_ports(
Args: Args:
layers: Search for labels on all the given layers. layers: Search for labels on all the given layers.
library: Mapping from pattern names to patterns.
pattern: Pattern object to scan for labels. pattern: Pattern object to scan for labels.
name: Name of the pattern object. max_depth: Maximum hierarcy depth to search. Default 999_999.
max_depth: Maximum hierarcy depth to search. Default 0.
Reduce this to 0 to avoid ever searching subcells. Reduce this to 0 to avoid ever searching subcells.
skip_subcells: If port labels are found at a given hierarcy level, skip_subcells: If port labels are found at a given hierarcy level,
do not continue searching at deeper levels. This allows subcells do not continue searching at deeper levels. This allows subcells
to contain their own port info without interfering with supercells' to contain their own port info without interfering with supercells'
port data. port data.
Default True. Default True.
visited: Set of object IDs which have already been processed.
Returns: Returns:
The updated `pattern`. Port labels are not removed. The updated `pattern`. Port labels are not removed.
""" """
if visited is None:
visited = set()
# Note: visited uses id(pattern) to detect cycles and avoid redundant processing.
# This may not catch identical patterns if they are loaded as separate object instances.
if id(pattern) in visited:
return pattern
visited.add(id(pattern))
if pattern.ports: if pattern.ports:
logger.warning(f'Pattern {name if name else pattern} already had ports, skipping data_to_ports') logger.warning(f'Pattern {name if name else pattern} already had ports, skipping data_to_ports')
return pattern return pattern
@ -109,13 +99,12 @@ def data_to_ports(
if target is None: if target is None:
continue continue
pp = data_to_ports( pp = data_to_ports(
layers = layers, layers=layers,
library = library, library=library,
pattern = library[target], pattern=library[target],
name = target, name=target,
max_depth = max_depth - 1, max_depth=max_depth - 1,
skip_subcells = skip_subcells, skip_subcells=skip_subcells,
visited = visited,
) )
found_ports |= bool(pp.ports) found_ports |= bool(pp.ports)
@ -171,17 +160,13 @@ def data_to_ports_flat(
local_ports = {} local_ports = {}
for label in labels: for label in labels:
if ':' not in label.string: name, property_string = label.string.split(':')
logger.warning(f'Invalid port label "{label.string}" in pattern "{pstr}" (missing ":")') properties = property_string.split(' ')
continue ptype = properties[0]
angle_deg = float(properties[1]) if len(ptype) else 0
name, property_string = label.string.split(':', 1)
properties = property_string.split()
ptype = properties[0] if len(properties) > 0 else 'unk'
angle_deg = float(properties[1]) if len(properties) > 1 else numpy.inf
xy = label.offset xy = label.offset
angle = numpy.deg2rad(angle_deg) if numpy.isfinite(angle_deg) else None angle = numpy.deg2rad(angle_deg)
if name in local_ports: if name in local_ports:
logger.warning(f'Duplicate port "{name}" in pattern "{pstr}"') logger.warning(f'Duplicate port "{name}" in pattern "{pstr}"')

View file

@ -28,9 +28,8 @@ def rotation_matrix_2d(theta: float) -> NDArray[numpy.float64]:
arr = numpy.array([[numpy.cos(theta), -numpy.sin(theta)], arr = numpy.array([[numpy.cos(theta), -numpy.sin(theta)],
[numpy.sin(theta), +numpy.cos(theta)]]) [numpy.sin(theta), +numpy.cos(theta)]])
# If this was a manhattan rotation, round to remove some inaccuracies in sin & cos # If this was a manhattan rotation, round to remove some inacuraccies in sin & cos
# cos(4*theta) is 1 for any multiple of pi/2. if numpy.isclose(theta % (pi / 2), 0):
if numpy.isclose(numpy.cos(4 * theta), 1, atol=1e-12):
arr = numpy.round(arr) arr = numpy.round(arr)
arr.flags.writeable = False arr.flags.writeable = False
@ -87,50 +86,37 @@ def apply_transforms(
Apply a set of transforms (`outer`) to a second set (`inner`). Apply a set of transforms (`outer`) to a second set (`inner`).
This is used to find the "absolute" transform for nested `Ref`s. This is used to find the "absolute" transform for nested `Ref`s.
The two transforms should be of shape Ox5 and Ix5. The two transforms should be of shape Ox4 and Ix4.
Rows should be of the form `(x_offset, y_offset, rotation_ccw_rad, mirror_across_x, scale)`. Rows should be of the form `(x_offset, y_offset, rotation_ccw_rad, mirror_across_x)`.
The output will be of the form (O*I)x5 (if `tensor=False`) or OxIx5 (`tensor=True`). The output will be of the form (O*I)x4 (if `tensor=False`) or OxIx4 (`tensor=True`).
Args: Args:
outer: Transforms for the container refs. Shape Ox5. outer: Transforms for the container refs. Shape Ox4.
inner: Transforms for the contained refs. Shape Ix5. inner: Transforms for the contained refs. Shape Ix4.
tensor: If `True`, an OxIx5 array is returned, with `result[oo, ii, :]` corresponding tensor: If `True`, an OxIx4 array is returned, with `result[oo, ii, :]` corresponding
to the `oo`th `outer` transform applied to the `ii`th inner transform. to the `oo`th `outer` transform applied to the `ii`th inner transform.
If `False` (default), this is concatenated into `(O*I)x5` to allow simple If `False` (default), this is concatenated into `(O*I)x4` to allow simple
chaining into additional `apply_transforms()` calls. chaining into additional `apply_transforms()` calls.
Returns: Returns:
OxIx5 or (O*I)x5 array. Final dimension is OxIx4 or (O*I)x4 array. Final dimension is
`(total_x, total_y, total_rotation_ccw_rad, net_mirrored_x, total_scale)`. `(total_x, total_y, total_rotation_ccw_rad, net_mirrored_x)`.
""" """
outer = numpy.atleast_2d(outer).astype(float, copy=False) outer = numpy.atleast_2d(outer).astype(float, copy=False)
inner = numpy.atleast_2d(inner).astype(float, copy=False) inner = numpy.atleast_2d(inner).astype(float, copy=False)
if outer.shape[1] == 4:
outer = numpy.pad(outer, ((0, 0), (0, 1)), constant_values=1.0)
if inner.shape[1] == 4:
inner = numpy.pad(inner, ((0, 0), (0, 1)), constant_values=1.0)
# If mirrored, flip y's # If mirrored, flip y's
xy_mir = numpy.tile(inner[:, :2], (outer.shape[0], 1, 1)) # dims are outer, inner, xyrm xy_mir = numpy.tile(inner[:, :2], (outer.shape[0], 1, 1)) # dims are outer, inner, xyrm
xy_mir[outer[:, 3].astype(bool), :, 1] *= -1 xy_mir[outer[:, 3].astype(bool), :, 1] *= -1
# Apply outer scale to inner offset
xy_mir *= outer[:, None, 4, None]
rot_mats = [rotation_matrix_2d(angle) for angle in outer[:, 2]] rot_mats = [rotation_matrix_2d(angle) for angle in outer[:, 2]]
xy = numpy.einsum('ort,oit->oir', rot_mats, xy_mir) xy = numpy.einsum('ort,oit->oir', rot_mats, xy_mir)
tot = numpy.empty((outer.shape[0], inner.shape[0], 5)) tot = numpy.empty((outer.shape[0], inner.shape[0], 4))
tot[:, :, :2] = outer[:, None, :2] + xy tot[:, :, :2] = outer[:, None, :2] + xy
tot[:, :, 2:] = outer[:, None, 2:] + inner[None, :, 2:] # sum rotations and mirrored
# If mirrored, flip inner rotation tot[:, :, 2] %= 2 * pi # clamp rot
mirrored_outer = outer[:, None, 3].astype(bool) tot[:, :, 3] %= 2 # clamp mirrored
rotations = outer[:, None, 2] + numpy.where(mirrored_outer, -inner[None, :, 2], inner[None, :, 2])
tot[:, :, 2] = rotations % (2 * pi)
tot[:, :, 3] = (outer[:, None, 3] + inner[None, :, 3]) % 2 # net mirrored
tot[:, :, 4] = outer[:, None, 4] * inner[None, :, 4] # net scale
if tensor: if tensor:
return tot return tot

View file

@ -30,11 +30,7 @@ def remove_duplicate_vertices(vertices: ArrayLike, closed_path: bool = True) ->
return result return result
def remove_colinear_vertices( def remove_colinear_vertices(vertices: ArrayLike, closed_path: bool = True) -> NDArray[numpy.float64]:
vertices: ArrayLike,
closed_path: bool = True,
preserve_uturns: bool = False,
) -> NDArray[numpy.float64]:
""" """
Given a list of vertices, remove any superflous vertices (i.e. Given a list of vertices, remove any superflous vertices (i.e.
those which lie along the line formed by their neighbors) those which lie along the line formed by their neighbors)
@ -43,33 +39,21 @@ def remove_colinear_vertices(
vertices: Nx2 ndarray of vertices vertices: Nx2 ndarray of vertices
closed_path: If `True`, the vertices are assumed to represent an implicitly closed_path: If `True`, the vertices are assumed to represent an implicitly
closed path. If `False`, the path is assumed to be open. Default `True`. closed path. If `False`, the path is assumed to be open. Default `True`.
preserve_uturns: If `True`, colinear vertices that correspond to a 180 degree
turn (a "spike") are preserved. Default `False`.
Returns: Returns:
`vertices` with colinear (superflous) vertices removed. May be a view into the original array. `vertices` with colinear (superflous) vertices removed. May be a view into the original array.
""" """
vertices = remove_duplicate_vertices(vertices, closed_path=closed_path) vertices = remove_duplicate_vertices(vertices)
# Check for dx0/dy0 == dx1/dy1 # Check for dx0/dy0 == dx1/dy1
dv = numpy.roll(vertices, -1, axis=0) - vertices
if not closed_path:
dv[-1] = 0
# dxdy[i] is based on dv[i] and dv[i-1] dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...]
# slopes_equal[i] refers to vertex i dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] # [[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dx0]]
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1]
dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0] dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0]
err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40 err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40
slopes_equal = (dxdy_diff / err_mult) < 1e-15 slopes_equal = (dxdy_diff / err_mult) < 1e-15
if preserve_uturns:
# Only merge if segments are in the same direction (avoid collapsing u-turns)
dot_prod = (dv * numpy.roll(dv, 1, axis=0)).sum(axis=1)
slopes_equal &= (dot_prod > 0)
if not closed_path: if not closed_path:
slopes_equal[[0, -1]] = False slopes_equal[[0, -1]] = False

View file

@ -65,7 +65,7 @@ path = "masque/__init__.py"
[project.optional-dependencies] [project.optional-dependencies]
oasis = ["fatamorgana~=0.11"] oasis = ["fatamorgana~=0.11"]
dxf = ["ezdxf~=1.4"] dxf = ["ezdxf~=1.0.2"]
svg = ["svgwrite"] svg = ["svgwrite"]
visualize = ["matplotlib"] visualize = ["matplotlib"]
text = ["matplotlib", "freetype-py"] text = ["matplotlib", "freetype-py"]
@ -110,9 +110,6 @@ lint.ignore = [
[tool.pytest.ini_options] [tool.pytest.ini_options]
addopts = "-rsXx" addopts = "-rsXx"
testpaths = ["masque"] testpaths = ["masque"]
filterwarnings = [
"ignore::DeprecationWarning:ezdxf.*",
]
[tool.mypy] [tool.mypy]
mypy_path = "stubs" mypy_path = "stubs"