Compare commits

...

43 commits

Author SHA1 Message Date
jan
db22237369 [PathCap] clean up comment 2026-03-09 11:20:04 -07:00
jan
a6ea5c08e6 [repetition.Grid] drop b_vector=None handling (guaranteed to be zeros now) 2026-03-09 11:19:42 -07:00
jan
3792248cd1 [dxf] improve dxf reader (ezdxf 1.4 related LWPolyLine changes) 2026-03-09 11:16:30 -07:00
jan
e8083cc24c [dxf] hide ezdxf warnings directly 2026-03-09 03:37:42 -07:00
jan
d307589995 [ports2data] add note about using id rather than name 2026-03-09 03:29:19 -07:00
jan
ea93a7ef37 [remove_colinear_vertices / Path] add preserve_uturns and use it for paths 2026-03-09 03:28:31 -07:00
jan
495babf837 [Path] revert endcap changes to avoid double-counting 2026-03-09 03:27:39 -07:00
jan
5d20a061fd [Path / Polygon] improve normalized_form approach to follow documented order 2026-03-09 02:42:13 -07:00
jan
25b8fe8448 [Path.to_polygons] Use linalg.solve() where possible; fallback to lstsq if singular 2026-03-09 02:41:15 -07:00
jan
f154303bef [remove_colinear_vertices] treat unclosed paths correctly 2026-03-09 02:38:33 -07:00
jan
5596e2b1af [tests] cover scale-aware transform 2026-03-09 02:35:35 -07:00
jan
6c42049b23 [PortList] actually raise the error 2026-03-09 02:34:57 -07:00
jan
da20922224 [apply_transform] include scale in transform 2026-03-09 02:34:11 -07:00
jan
b8ee4bb05d [ell] fix set_rotation check 2026-03-09 02:32:20 -07:00
jan
169f66cc85 [rotation_matrix_2d] improve manhattan angle detection
modulo causes issues with negative numbers
2026-03-09 01:16:54 -07:00
jan
a38c5bb085 [ports2data] deal with cycles better 2026-03-09 01:15:42 -07:00
jan
0ad89d6d95 [DeferredDict] capture value in set_const 2026-03-09 01:10:26 -07:00
jan
6c96968341 [Path] improve robustness of intersection calculations 2026-03-09 01:09:37 -07:00
jan
b7143e3287 [repetition.Grid] fix __le__ comparison of b_vector 2026-03-09 01:08:35 -07:00
jan
0cce5e0586 [Ref] misc copy fixes -- don't deepcopy repetition or annotations in __copy__ 2026-03-09 01:07:50 -07:00
jan
36cb86a15d [tests] clean unused imports 2026-03-09 00:20:29 -07:00
jan
5e0936e15f [dxf] update ezdxf dep 2026-03-09 00:18:06 -07:00
jan
a467a0baca [Path] simplify conditional 2026-03-09 00:17:50 -07:00
jan
564ff10db3 [dxf] add roundtrip dxf test, enable refs and improve path handling 2026-03-09 00:17:23 -07:00
jan
e261585894 [gdsii] Try to close files if able 2026-03-08 23:09:45 -07:00
jan
f42114bf43 [gdsii] explicitly cast cap_extensions to int 2026-03-08 22:47:22 -07:00
jan
5eb460ecb7 [repetition.Grid] disallow b_vector=None (except when initializing) 2026-03-08 22:43:58 -07:00
jan
fb822829ec [Polygon] rect() should call rectangle() with positive width/height
no big deal, but this makes vertex order consistent
2026-03-08 22:42:48 -07:00
jan
838c742651 [Path] Improve comparisons: compare vertices 2026-03-08 22:41:37 -07:00
jan
9a76ce5b66 [Path] cap_extensions=None should mean [0, 0] when using custom extensions 2026-03-08 22:41:11 -07:00
jan
2019fc0d74 [Path] Circular cap extensions should translate to square, not empty 2026-03-08 22:40:08 -07:00
jan
e3f8d28529 [Path] improve __lt__ for endcaps 2026-03-08 22:37:30 -07:00
jan
9296011d4b [Ref] deepcopy annotations and repetitions 2026-03-08 22:34:39 -07:00
jan
92d0140093 [Pattern] fix pattern comparisons 2026-03-08 22:33:59 -07:00
jan
c4dc9f9573 [oasis] comment and code cleanup 2026-03-08 22:32:16 -07:00
jan
0b8e11e8bf [dxf] improve manhattan check robustness 2026-03-08 22:31:18 -07:00
jan
5989e45906 [apply_transforms] fix handling of rotations while mirrored 2026-03-08 21:38:47 -07:00
jan
7eec2b7acf [LazyLibrary] report full cycle when one is detected 2026-03-08 21:18:54 -07:00
jan
2a6458b1ac [repetitions.Arbitrary] reassign to displacements when scaling or mirroring to trigger re-sort 2026-03-08 20:43:33 -07:00
jan
9ee3c7ff89 [ILibrary] make referenced_patterns more robust to cyclical dependencies 2026-03-08 20:01:00 -07:00
jan
3bedab2301 [ports2data] Make port label parsing more robust 2026-03-08 19:58:56 -07:00
jan
4eb1d8d486 [gdsii] fix missing paren in message 2026-03-08 19:57:49 -07:00
jan
3ceeba23b8 [tests] move imports into functions 2026-03-08 19:00:20 -07:00
20 changed files with 418 additions and 151 deletions

View file

@ -106,7 +106,7 @@ def ell(
raise BuildError('Asked to find aggregation for ports that face in different directions:\n' raise BuildError('Asked to find aggregation for ports that face in different directions:\n'
+ pformat(port_rotations)) + pformat(port_rotations))
else: else:
if set_rotation is not None: if set_rotation is None:
raise BuildError('set_rotation must be specified if no ports have rotations!') raise BuildError('set_rotation must be specified if no ports have rotations!')
rotations = numpy.full_like(has_rotation, set_rotation, dtype=float) rotations = numpy.full_like(has_rotation, set_rotation, dtype=float)

View file

@ -16,7 +16,7 @@ import gzip
import numpy import numpy
import ezdxf import ezdxf
from ezdxf.enums import TextEntityAlignment from ezdxf.enums import TextEntityAlignment
from ezdxf.entities import LWPolyline, Polyline, Text, Insert from ezdxf.entities import LWPolyline, Polyline, Text, Insert, Solid, Trace
from .utils import is_gzipped, tmpfile from .utils import is_gzipped, tmpfile
from .. import Pattern, Ref, PatternError, Label from .. import Pattern, Ref, PatternError, Label
@ -217,27 +217,54 @@ def _read_block(block: ezdxf.layouts.BlockLayout | ezdxf.layouts.Modelspace) ->
attr = element.dxfattribs() attr = element.dxfattribs()
layer = attr.get('layer', DEFAULT_LAYER) layer = attr.get('layer', DEFAULT_LAYER)
if points.shape[1] == 2: width = 0
raise PatternError('Invalid or unimplemented polygon?') if isinstance(element, LWPolyline):
# ezdxf 1.4+ get_points() returns (x, y, start_width, end_width, bulge)
if points.shape[1] > 2: if points.shape[1] >= 5:
if (points[0, 2] != points[:, 2]).any(): if (points[:, 4] != 0).any():
raise PatternError('PolyLine has non-constant width (not yet representable in masque!)') raise PatternError('LWPolyline has bulge (not yet representable in masque!)')
if points.shape[1] == 4 and (points[:, 3] != 0).any(): if (points[:, 2] != points[:, 3]).any() or (points[:, 2] != points[0, 2]).any():
raise PatternError('LWPolyLine has bulge (not yet representable in masque!)') raise PatternError('LWPolyline has non-constant width (not yet representable in masque!)')
width = points[0, 2] width = points[0, 2]
elif points.shape[1] == 3:
# width used to be in column 2
width = points[0, 2]
if width == 0: if width == 0:
width = attr.get('const_width', 0) width = attr.get('const_width', 0)
is_closed = element.closed
verts = points[:, :2]
if is_closed and (len(verts) < 2 or not numpy.allclose(verts[0], verts[-1])):
verts = numpy.vstack((verts, verts[0]))
shape: Path | Polygon shape: Path | Polygon
if width == 0 and len(points) > 2 and numpy.array_equal(points[0], points[-1]): if width == 0 and is_closed:
shape = Polygon(vertices=points[:-1, :2]) # Use Polygon if it has at least 3 unique vertices
shape_verts = verts[:-1] if len(verts) > 1 else verts
if len(shape_verts) >= 3:
shape = Polygon(vertices=shape_verts)
else: else:
shape = Path(width=width, vertices=points[:, :2]) shape = Path(width=width, vertices=verts)
else:
shape = Path(width=width, vertices=verts)
pat.shapes[layer].append(shape) pat.shapes[layer].append(shape)
elif isinstance(element, Solid | Trace):
attr = element.dxfattribs()
layer = attr.get('layer', DEFAULT_LAYER)
points = numpy.array([element.get_dxf_attrib(f'vtx{i}') for i in range(4)
if element.has_dxf_attrib(f'vtx{i}')])
if len(points) >= 3:
# If vtx2 == vtx3, it's a triangle. ezdxf handles this.
if len(points) == 4 and numpy.allclose(points[2], points[3]):
verts = points[:3, :2]
# DXF Solid/Trace uses 0-1-3-2 vertex order for quadrilaterals!
elif len(points) == 4:
verts = points[[0, 1, 3, 2], :2]
else:
verts = points[:, :2]
pat.shapes[layer].append(Polygon(vertices=verts))
elif isinstance(element, Text): elif isinstance(element, Text):
args = dict( args = dict(
offset=numpy.asarray(element.get_placement()[1])[:2], offset=numpy.asarray(element.get_placement()[1])[:2],
@ -302,15 +329,23 @@ def _mrefs_to_drefs(
elif isinstance(rep, Grid): elif isinstance(rep, Grid):
a = rep.a_vector a = rep.a_vector
b = rep.b_vector if rep.b_vector is not None else numpy.zeros(2) b = rep.b_vector if rep.b_vector is not None else numpy.zeros(2)
rotated_a = rotation_matrix_2d(-ref.rotation) @ a # In masque, the grid basis vectors are NOT rotated by the reference's rotation.
rotated_b = rotation_matrix_2d(-ref.rotation) @ b # In DXF, the grid basis vectors are [column_spacing, 0] and [0, row_spacing],
if rotated_a[1] == 0 and rotated_b[0] == 0: # which ARE then rotated by the block reference's rotation.
# Therefore, we can only use a DXF array if ref.rotation is 0 (or a multiple of 90)
# AND the grid is already manhattan.
# Rotate basis vectors by the reference rotation to see where they end up in the DXF frame
rotated_a = rotation_matrix_2d(ref.rotation) @ a
rotated_b = rotation_matrix_2d(ref.rotation) @ b
if numpy.isclose(rotated_a[1], 0, atol=1e-8) and numpy.isclose(rotated_b[0], 0, atol=1e-8):
attribs['column_count'] = rep.a_count attribs['column_count'] = rep.a_count
attribs['row_count'] = rep.b_count attribs['row_count'] = rep.b_count
attribs['column_spacing'] = rotated_a[0] attribs['column_spacing'] = rotated_a[0]
attribs['row_spacing'] = rotated_b[1] attribs['row_spacing'] = rotated_b[1]
block.add_blockref(encoded_name, ref.offset, dxfattribs=attribs) block.add_blockref(encoded_name, ref.offset, dxfattribs=attribs)
elif rotated_a[0] == 0 and rotated_b[1] == 0: elif numpy.isclose(rotated_a[0], 0, atol=1e-8) and numpy.isclose(rotated_b[1], 0, atol=1e-8):
attribs['column_count'] = rep.b_count attribs['column_count'] = rep.b_count
attribs['row_count'] = rep.a_count attribs['row_count'] = rep.a_count
attribs['column_spacing'] = rotated_b[0] attribs['column_spacing'] = rotated_b[0]
@ -348,10 +383,18 @@ def _shapes_to_elements(
displacements = shape.repetition.displacements displacements = shape.repetition.displacements
for dd in displacements: for dd in displacements:
if isinstance(shape, Path):
# preserve path.
# Note: DXF paths don't support endcaps well, so this is still a bit limited.
xy = shape.vertices + dd
attribs_path = {**attribs}
if shape.width > 0:
attribs_path['const_width'] = shape.width
block.add_lwpolyline(xy, dxfattribs=attribs_path)
else:
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
xy_open = polygon.vertices + dd xy_open = polygon.vertices + dd
xy_closed = numpy.vstack((xy_open, xy_open[0, :])) block.add_lwpolyline(xy_open, close=True, dxfattribs=attribs)
block.add_lwpolyline(xy_closed, dxfattribs=attribs)
def _labels_to_texts( def _labels_to_texts(

View file

@ -453,7 +453,7 @@ def _shapes_to_elements(
extension: tuple[int, int] extension: tuple[int, int]
if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None: if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None:
extension = tuple(shape.cap_extensions) # type: ignore extension = tuple(rint_cast(shape.cap_extensions))
else: else:
extension = (0, 0) extension = (0, 0)
@ -617,7 +617,12 @@ def load_libraryfile(
stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ) # type: ignore stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ) # type: ignore
else: else:
stream = path.open(mode='rb') # noqa: SIM115 stream = path.open(mode='rb') # noqa: SIM115
try:
return load_library(stream, full_load=full_load, postprocess=postprocess) return load_library(stream, full_load=full_load, postprocess=postprocess)
finally:
if full_load:
stream.close()
def check_valid_names( def check_valid_names(
@ -648,7 +653,7 @@ def check_valid_names(
logger.error('Names contain invalid characters:\n' + pformat(bad_chars)) logger.error('Names contain invalid characters:\n' + pformat(bad_chars))
if bad_lengths: if bad_lengths:
logger.error(f'Names too long (>{max_length}:\n' + pformat(bad_chars)) logger.error(f'Names too long (>{max_length}):\n' + pformat(bad_lengths))
if bad_chars or bad_lengths: if bad_chars or bad_lengths:
raise LibraryError('Library contains invalid names, see log above') raise LibraryError('Library contains invalid names, see log above')

View file

@ -182,8 +182,8 @@ def writefile(
Args: Args:
library: A {name: Pattern} mapping of patterns to write. library: A {name: Pattern} mapping of patterns to write.
filename: Filename to save to. filename: Filename to save to.
*args: passed to `oasis.write` *args: passed to `oasis.build()`
**kwargs: passed to `oasis.write` **kwargs: passed to `oasis.build()`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
@ -213,9 +213,9 @@ def readfile(
Will automatically decompress gzipped files. Will automatically decompress gzipped files.
Args: Args:
filename: Filename to save to. filename: Filename to load from.
*args: passed to `oasis.read` *args: passed to `oasis.read()`
**kwargs: passed to `oasis.read` **kwargs: passed to `oasis.read()`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if is_gzipped(path): if is_gzipped(path):
@ -717,10 +717,6 @@ def properties_to_annotations(
annotations[key] = values annotations[key] = values
return annotations return annotations
properties = [fatrec.Property(key, vals, is_standard=False)
for key, vals in annotations.items()]
return properties
def check_valid_names( def check_valid_names(
names: Iterable[str], names: Iterable[str],

View file

@ -186,9 +186,9 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
# Perform recursive lookups, but only once for each name # Perform recursive lookups, but only once for each name
for target in targets - skip: for target in targets - skip:
assert target is not None assert target is not None
skip.add(target)
if target in self: if target in self:
targets |= self.referenced_patterns(target, skip=skip) targets |= self.referenced_patterns(target, skip=skip)
skip.add(target)
return targets return targets
@ -466,9 +466,11 @@ class ILibraryView(Mapping[str, 'Pattern'], metaclass=ABCMeta):
memo = {} memo = {}
if transform is None or transform is True: if transform is None or transform is True:
transform = numpy.zeros(4) transform = numpy.array([0, 0, 0, 0, 1], dtype=float)
elif transform is not False: elif transform is not False:
transform = numpy.asarray(transform, dtype=float) transform = numpy.asarray(transform, dtype=float)
if transform.size == 4:
transform = numpy.append(transform, 1.0)
original_pattern = pattern original_pattern = pattern
@ -1267,12 +1269,12 @@ class LazyLibrary(ILibrary):
""" """
mapping: dict[str, Callable[[], 'Pattern']] mapping: dict[str, Callable[[], 'Pattern']]
cache: dict[str, 'Pattern'] cache: dict[str, 'Pattern']
_lookups_in_progress: set[str] _lookups_in_progress: list[str]
def __init__(self) -> None: def __init__(self) -> None:
self.mapping = {} self.mapping = {}
self.cache = {} self.cache = {}
self._lookups_in_progress = set() self._lookups_in_progress = []
def __setitem__( def __setitem__(
self, self,
@ -1303,16 +1305,20 @@ class LazyLibrary(ILibrary):
return self.cache[key] return self.cache[key]
if key in self._lookups_in_progress: if key in self._lookups_in_progress:
chain = ' -> '.join(self._lookups_in_progress + [key])
raise LibraryError( raise LibraryError(
f'Detected multiple simultaneous lookups of "{key}".\n' f'Detected circular reference or recursive lookup of "{key}".\n'
f'Lookup chain: {chain}\n'
'This may be caused by an invalid (cyclical) reference, or buggy code.\n' 'This may be caused by an invalid (cyclical) reference, or buggy code.\n'
'If you are lazy-loading a file, try a non-lazy load and check for reference cycles.' # TODO give advice on finding cycles 'If you are lazy-loading a file, try a non-lazy load and check for reference cycles.'
) )
self._lookups_in_progress.add(key) self._lookups_in_progress.append(key)
try:
func = self.mapping[key] func = self.mapping[key]
pat = func() pat = func()
self._lookups_in_progress.remove(key) finally:
self._lookups_in_progress.pop()
self.cache[key] = pat self.cache[key] = pat
return pat return pat

View file

@ -201,7 +201,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
def __lt__(self, other: 'Pattern') -> bool: def __lt__(self, other: 'Pattern') -> bool:
self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist] self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist]
other_nonempty_targets = [target for target, reflist in self.refs.items() if reflist] other_nonempty_targets = [target for target, reflist in other.refs.items() if reflist]
self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets)) self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets))
other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets)) other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets))
@ -215,7 +215,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
return refs_ours < refs_theirs return refs_ours < refs_theirs
self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems] self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems]
other_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems] other_nonempty_layers = [ll for ll, elems in other.shapes.items() if elems]
self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers)) self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers))
other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers)) other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers))
@ -224,21 +224,21 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
for _, _, layer in self_layerkeys: for _, _, layer in self_layerkeys:
shapes_ours = tuple(sorted(self.shapes[layer])) shapes_ours = tuple(sorted(self.shapes[layer]))
shapes_theirs = tuple(sorted(self.shapes[layer])) shapes_theirs = tuple(sorted(other.shapes[layer]))
if shapes_ours != shapes_theirs: if shapes_ours != shapes_theirs:
return shapes_ours < shapes_theirs return shapes_ours < shapes_theirs
self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems] self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems]
other_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems] other_nonempty_txtlayers = [ll for ll, elems in other.labels.items() if elems]
self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers)) self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers))
other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers)) other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers))
if self_txtlayerkeys != other_txtlayerkeys: if self_txtlayerkeys != other_txtlayerkeys:
return self_txtlayerkeys < other_txtlayerkeys return self_txtlayerkeys < other_txtlayerkeys
for _, _, layer in self_layerkeys: for _, _, layer in self_txtlayerkeys:
labels_ours = tuple(sorted(self.labels[layer])) labels_ours = tuple(sorted(self.labels[layer]))
labels_theirs = tuple(sorted(self.labels[layer])) labels_theirs = tuple(sorted(other.labels[layer]))
if labels_ours != labels_theirs: if labels_ours != labels_theirs:
return labels_ours < labels_theirs return labels_ours < labels_theirs
@ -255,7 +255,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
return False return False
self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist] self_nonempty_targets = [target for target, reflist in self.refs.items() if reflist]
other_nonempty_targets = [target for target, reflist in self.refs.items() if reflist] other_nonempty_targets = [target for target, reflist in other.refs.items() if reflist]
self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets)) self_tgtkeys = tuple(sorted((target is None, target) for target in self_nonempty_targets))
other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets)) other_tgtkeys = tuple(sorted((target is None, target) for target in other_nonempty_targets))
@ -269,7 +269,7 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
return False return False
self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems] self_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems]
other_nonempty_layers = [ll for ll, elems in self.shapes.items() if elems] other_nonempty_layers = [ll for ll, elems in other.shapes.items() if elems]
self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers)) self_layerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_layers))
other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers)) other_layerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_layers))
@ -278,21 +278,21 @@ class Pattern(PortList, AnnotatableImpl, Mirrorable):
for _, _, layer in self_layerkeys: for _, _, layer in self_layerkeys:
shapes_ours = tuple(sorted(self.shapes[layer])) shapes_ours = tuple(sorted(self.shapes[layer]))
shapes_theirs = tuple(sorted(self.shapes[layer])) shapes_theirs = tuple(sorted(other.shapes[layer]))
if shapes_ours != shapes_theirs: if shapes_ours != shapes_theirs:
return False return False
self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems] self_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems]
other_nonempty_txtlayers = [ll for ll, elems in self.labels.items() if elems] other_nonempty_txtlayers = [ll for ll, elems in other.labels.items() if elems]
self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers)) self_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in self_nonempty_txtlayers))
other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers)) other_txtlayerkeys = tuple(sorted(layer2key(ll) for ll in other_nonempty_txtlayers))
if self_txtlayerkeys != other_txtlayerkeys: if self_txtlayerkeys != other_txtlayerkeys:
return False return False
for _, _, layer in self_layerkeys: for _, _, layer in self_txtlayerkeys:
labels_ours = tuple(sorted(self.labels[layer])) labels_ours = tuple(sorted(self.labels[layer]))
labels_theirs = tuple(sorted(self.labels[layer])) labels_theirs = tuple(sorted(other.labels[layer]))
if labels_ours != labels_theirs: if labels_ours != labels_theirs:
return False return False

View file

@ -630,7 +630,7 @@ class PortList(metaclass=ABCMeta):
rotations = numpy.mod(s_rotations - o_rotations - pi, 2 * pi) rotations = numpy.mod(s_rotations - o_rotations - pi, 2 * pi)
if not has_rot.any(): if not has_rot.any():
if set_rotation is None: if set_rotation is None:
PortError('Must provide set_rotation if rotation is indeterminate') raise PortError('Must provide set_rotation if rotation is indeterminate')
rotations[:] = set_rotation rotations[:] = set_rotation
else: else:
rotations[~has_rot] = rotations[has_rot][0] rotations[~has_rot] = rotations[has_rot][0]

View file

@ -92,18 +92,22 @@ class Ref(
rotation=self.rotation, rotation=self.rotation,
scale=self.scale, scale=self.scale,
mirrored=self.mirrored, mirrored=self.mirrored,
repetition=copy.deepcopy(self.repetition), repetition=self.repetition,
annotations=copy.deepcopy(self.annotations), annotations=self.annotations,
) )
return new return new
def __deepcopy__(self, memo: dict | None = None) -> 'Ref': def __deepcopy__(self, memo: dict | None = None) -> 'Ref':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self) new = copy.copy(self)
#new.repetition = copy.deepcopy(self.repetition, memo) new._offset = self._offset.copy()
#new.annotations = copy.deepcopy(self.annotations, memo) new.repetition = copy.deepcopy(self.repetition, memo)
new.annotations = copy.deepcopy(self.annotations, memo)
return new return new
def copy(self) -> 'Ref':
return self.deepcopy()
def __lt__(self, other: 'Ref') -> bool: def __lt__(self, other: 'Ref') -> bool:
if (self.offset != other.offset).any(): if (self.offset != other.offset).any():
return tuple(self.offset) < tuple(other.offset) return tuple(self.offset) < tuple(other.offset)
@ -187,10 +191,11 @@ class Ref(
xys = self.offset[None, :] xys = self.offset[None, :]
if self.repetition is not None: if self.repetition is not None:
xys = xys + self.repetition.displacements xys = xys + self.repetition.displacements
transforms = numpy.empty((xys.shape[0], 4)) transforms = numpy.empty((xys.shape[0], 5))
transforms[:, :2] = xys transforms[:, :2] = xys
transforms[:, 2] = self.rotation transforms[:, 2] = self.rotation
transforms[:, 3] = self.mirrored transforms[:, 3] = self.mirrored
transforms[:, 4] = self.scale
return transforms return transforms
def get_bounds_single( def get_bounds_single(

View file

@ -64,7 +64,7 @@ class Grid(Repetition):
_a_count: int _a_count: int
""" Number of instances along the direction specified by the `a_vector` """ """ Number of instances along the direction specified by the `a_vector` """
_b_vector: NDArray[numpy.float64] | None _b_vector: NDArray[numpy.float64]
""" Vector `[x, y]` specifying a second lattice vector for the grid. """ Vector `[x, y]` specifying a second lattice vector for the grid.
Specifies center-to-center spacing between adjacent elements. Specifies center-to-center spacing between adjacent elements.
Can be `None` for a 1D array. Can be `None` for a 1D array.
@ -199,9 +199,6 @@ class Grid(Repetition):
@property @property
def displacements(self) -> NDArray[numpy.float64]: def displacements(self) -> NDArray[numpy.float64]:
if self.b_vector is None:
return numpy.arange(self.a_count)[:, None] * self.a_vector[None, :]
aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij') aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij')
return (aa.flatten()[:, None] * self.a_vector[None, :] return (aa.flatten()[:, None] * self.a_vector[None, :]
+ bb.flatten()[:, None] * self.b_vector[None, :]) # noqa + bb.flatten()[:, None] * self.b_vector[None, :]) # noqa
@ -301,12 +298,8 @@ class Grid(Repetition):
return self.b_count < other.b_count return self.b_count < other.b_count
if not numpy.array_equal(self.a_vector, other.a_vector): if not numpy.array_equal(self.a_vector, other.a_vector):
return tuple(self.a_vector) < tuple(other.a_vector) return tuple(self.a_vector) < tuple(other.a_vector)
if self.b_vector is None:
return other.b_vector is not None
if other.b_vector is None:
return False
if not numpy.array_equal(self.b_vector, other.b_vector): if not numpy.array_equal(self.b_vector, other.b_vector):
return tuple(self.a_vector) < tuple(other.a_vector) return tuple(self.b_vector) < tuple(other.b_vector)
return False return False
@ -391,7 +384,9 @@ class Arbitrary(Repetition):
Returns: Returns:
self self
""" """
self.displacements[:, 1 - axis] *= -1 new_displacements = self.displacements.copy()
new_displacements[:, 1 - axis] *= -1
self.displacements = new_displacements
return self return self
def get_bounds(self) -> NDArray[numpy.float64] | None: def get_bounds(self) -> NDArray[numpy.float64] | None:
@ -416,6 +411,6 @@ class Arbitrary(Repetition):
Returns: Returns:
self self
""" """
self.displacements *= c self.displacements = self.displacements * c
return self return self

View file

@ -24,7 +24,16 @@ class PathCap(Enum):
# # defined by path.cap_extensions # # defined by path.cap_extensions
def __lt__(self, other: Any) -> bool: def __lt__(self, other: Any) -> bool:
return self.value == other.value if self.__class__ is not other.__class__:
return self.__class__.__name__ < other.__class__.__name__
# Order: Flush, Square, Circle, SquareCustom
order = {
PathCap.Flush: 0,
PathCap.Square: 1,
PathCap.Circle: 2,
PathCap.SquareCustom: 3,
}
return order[self] < order[other]
@functools.total_ordering @functools.total_ordering
@ -79,10 +88,10 @@ class Path(Shape):
def cap(self, val: PathCap) -> None: def cap(self, val: PathCap) -> None:
self._cap = PathCap(val) self._cap = PathCap(val)
if self.cap != PathCap.SquareCustom: if self.cap != PathCap.SquareCustom:
self.cap_extensions = None self._cap_extensions = None
elif self.cap_extensions is None: elif self._cap_extensions is None:
# just got set to SquareCustom # just got set to SquareCustom
self.cap_extensions = numpy.zeros(2) self._cap_extensions = numpy.zeros(2)
# cap_extensions property # cap_extensions property
@property @property
@ -209,9 +218,12 @@ class Path(Shape):
self.vertices = vertices self.vertices = vertices
self.repetition = repetition self.repetition = repetition
self.annotations = annotations self.annotations = annotations
self.width = width self._cap = cap
self.cap = cap if cap == PathCap.SquareCustom and cap_extensions is None:
self._cap_extensions = numpy.zeros(2)
else:
self.cap_extensions = cap_extensions self.cap_extensions = cap_extensions
self.width = width
if rotation: if rotation:
self.rotate(rotation) self.rotate(rotation)
if numpy.any(offset): if numpy.any(offset):
@ -253,6 +265,14 @@ class Path(Shape):
if self.cap_extensions is None: if self.cap_extensions is None:
return True return True
return tuple(self.cap_extensions) < tuple(other.cap_extensions) return tuple(self.cap_extensions) < tuple(other.cap_extensions)
if not numpy.array_equal(self.vertices, other.vertices):
min_len = min(self.vertices.shape[0], other.vertices.shape[0])
eq_mask = self.vertices[:min_len] != other.vertices[:min_len]
eq_lt = self.vertices[:min_len] < other.vertices[:min_len]
eq_lt_masked = eq_lt[eq_mask]
if eq_lt_masked.size > 0:
return eq_lt_masked.flat[0]
return self.vertices.shape[0] < other.vertices.shape[0]
if self.repetition != other.repetition: if self.repetition != other.repetition:
return rep2key(self.repetition) < rep2key(other.repetition) return rep2key(self.repetition) < rep2key(other.repetition)
return annotations_lt(self.annotations, other.annotations) return annotations_lt(self.annotations, other.annotations)
@ -303,9 +323,30 @@ class Path(Shape):
) -> list['Polygon']: ) -> list['Polygon']:
extensions = self._calculate_cap_extensions() extensions = self._calculate_cap_extensions()
v = remove_colinear_vertices(self.vertices, closed_path=False) v = remove_colinear_vertices(self.vertices, closed_path=False, preserve_uturns=True)
dv = numpy.diff(v, axis=0) dv = numpy.diff(v, axis=0)
dvdir = dv / numpy.sqrt((dv * dv).sum(axis=1))[:, None] norms = numpy.sqrt((dv * dv).sum(axis=1))
# Filter out zero-length segments if any remained after remove_colinear_vertices
valid = (norms > 1e-18)
if not numpy.all(valid):
# This shouldn't happen much if remove_colinear_vertices is working
v = v[numpy.append(valid, True)]
dv = numpy.diff(v, axis=0)
norms = norms[valid]
if dv.shape[0] == 0:
# All vertices were the same. It's a point.
if self.width == 0:
return [Polygon(vertices=numpy.zeros((3, 2)))] # Area-less degenerate
if self.cap == PathCap.Circle:
return Circle(radius=self.width / 2, offset=v[0]).to_polygons(num_vertices=num_vertices, max_arclen=max_arclen)
if self.cap == PathCap.Square:
return [Polygon.square(side_length=self.width, offset=v[0])]
# Flush or CustomSquare
return [Polygon(vertices=numpy.zeros((3, 2)))]
dvdir = dv / norms[:, None]
if self.width == 0: if self.width == 0:
verts = numpy.vstack((v, v[::-1])) verts = numpy.vstack((v, v[::-1]))
@ -324,11 +365,21 @@ class Path(Shape):
bs = v[1:-1] - v[:-2] + perp[1:] - perp[:-1] bs = v[1:-1] - v[:-2] + perp[1:] - perp[:-1]
ds = v[1:-1] - v[:-2] - perp[1:] + perp[:-1] ds = v[1:-1] - v[:-2] - perp[1:] + perp[:-1]
rp = numpy.linalg.solve(As, bs[:, :, None])[:, 0] try:
rn = numpy.linalg.solve(As, ds[:, :, None])[:, 0] # Vectorized solve for all intersections
# solve supports broadcasting: As (N-2, 2, 2), bs (N-2, 2, 1)
rp = numpy.linalg.solve(As, bs[:, :, None])[:, 0, 0]
rn = numpy.linalg.solve(As, ds[:, :, None])[:, 0, 0]
except numpy.linalg.LinAlgError:
# Fallback to slower lstsq if some segments are parallel (singular matrix)
rp = numpy.zeros(As.shape[0])
rn = numpy.zeros(As.shape[0])
for ii in range(As.shape[0]):
rp[ii] = numpy.linalg.lstsq(As[ii], bs[ii, :, None], rcond=1e-12)[0][0, 0]
rn[ii] = numpy.linalg.lstsq(As[ii], ds[ii, :, None], rcond=1e-12)[0][0, 0]
intersection_p = v[:-2] + rp * dv[:-1] + perp[:-1] intersection_p = v[:-2] + rp[:, None] * dv[:-1] + perp[:-1]
intersection_n = v[:-2] + rn * dv[:-1] - perp[:-1] intersection_n = v[:-2] + rn[:, None] * dv[:-1] - perp[:-1]
towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp? towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp?
# straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight # straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight
@ -418,12 +469,11 @@ class Path(Shape):
rotated_vertices = numpy.vstack([numpy.dot(rotation_matrix_2d(-rotation), v) rotated_vertices = numpy.vstack([numpy.dot(rotation_matrix_2d(-rotation), v)
for v in normed_vertices]) for v in normed_vertices])
# Reorder the vertices so that the one with lowest x, then y, comes first. # Canonical ordering for open paths: pick whichever of (v) or (v[::-1]) is smaller
x_min = rotated_vertices[:, 0].argmin() if tuple(rotated_vertices.flat) > tuple(rotated_vertices[::-1].flat):
if not is_scalar(x_min): reordered_vertices = rotated_vertices[::-1]
y_min = rotated_vertices[x_min, 1].argmin() else:
x_min = cast('Sequence', x_min)[y_min] reordered_vertices = rotated_vertices
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
width0 = self.width / norm_value width0 = self.width / norm_value
@ -462,7 +512,7 @@ class Path(Shape):
Returns: Returns:
self self
""" """
self.vertices = remove_colinear_vertices(self.vertices, closed_path=False) self.vertices = remove_colinear_vertices(self.vertices, closed_path=False, preserve_uturns=True)
return self return self
def _calculate_cap_extensions(self) -> NDArray[numpy.float64]: def _calculate_cap_extensions(self) -> NDArray[numpy.float64]:

View file

@ -321,7 +321,7 @@ class Polygon(Shape):
else: else:
raise PatternError('Two of ymin, yctr, ymax, ly must be None!') raise PatternError('Two of ymin, yctr, ymax, ly must be None!')
poly = Polygon.rectangle(lx, ly, offset=(xctr, yctr), repetition=repetition) poly = Polygon.rectangle(abs(lx), abs(ly), offset=(xctr, yctr), repetition=repetition)
return poly return poly
@staticmethod @staticmethod
@ -417,11 +417,15 @@ class Polygon(Shape):
for v in normed_vertices]) for v in normed_vertices])
# Reorder the vertices so that the one with lowest x, then y, comes first. # Reorder the vertices so that the one with lowest x, then y, comes first.
x_min = rotated_vertices[:, 0].argmin() x_min_val = rotated_vertices[:, 0].min()
if not is_scalar(x_min): x_min_inds = numpy.where(rotated_vertices[:, 0] == x_min_val)[0]
y_min = rotated_vertices[x_min, 1].argmin() if x_min_inds.size > 1:
x_min = cast('Sequence', x_min)[y_min] y_min_val = rotated_vertices[x_min_inds, 1].min()
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0) tie_breaker = numpy.where(rotated_vertices[x_min_inds, 1] == y_min_val)[0][0]
start_ind = x_min_inds[tie_breaker]
else:
start_ind = x_min_inds[0]
reordered_vertices = numpy.roll(rotated_vertices, -start_ind, axis=0)
# TODO: normalize mirroring? # TODO: normalize mirroring?

111
masque/test/test_dxf.py Normal file
View file

@ -0,0 +1,111 @@
import numpy
from numpy.testing import assert_allclose
from pathlib import Path
from ..pattern import Pattern
from ..library import Library
from ..shapes import Path as MPath, Polygon
from ..repetition import Grid
from ..file import dxf
def test_dxf_roundtrip(tmp_path: Path):
lib = Library()
pat = Pattern()
# 1. Polygon (closed)
poly_verts = numpy.array([[0, 0], [10, 0], [10, 10], [0, 10]])
pat.polygon("1", vertices=poly_verts)
# 2. Path (open, 3 points)
path_verts = numpy.array([[20, 0], [30, 0], [30, 10]])
pat.path("2", vertices=path_verts, width=2)
# 3. Path (open, 2 points) - Testing the fix for 2-point polylines
path2_verts = numpy.array([[40, 0], [50, 10]])
pat.path("3", vertices=path2_verts, width=0) # width 0 to be sure it's not a polygonized path if we're not careful
# 4. Ref with Grid repetition (Manhattan)
subpat = Pattern()
subpat.polygon("sub", vertices=[[0, 0], [1, 0], [1, 1]])
lib["sub"] = subpat
pat.ref("sub", offset=(100, 100), repetition=Grid(a_vector=(10, 0), a_count=2, b_vector=(0, 10), b_count=3))
lib["top"] = pat
dxf_file = tmp_path / "test.dxf"
dxf.writefile(lib, "top", dxf_file)
read_lib, _ = dxf.readfile(dxf_file)
# In DXF read, the top level is usually called "Model"
top_pat = read_lib.get("Model") or read_lib.get("top") or list(read_lib.values())[0]
# Verify Polygon
polys = [s for s in top_pat.shapes["1"] if isinstance(s, Polygon)]
assert len(polys) >= 1
poly_read = polys[0]
# DXF polyline might be shifted or vertices reordered, but here they should be simple
assert_allclose(poly_read.vertices, poly_verts)
# Verify 3-point Path
paths = [s for s in top_pat.shapes["2"] if isinstance(s, MPath)]
assert len(paths) >= 1
path_read = paths[0]
assert_allclose(path_read.vertices, path_verts)
assert path_read.width == 2
# Verify 2-point Path
paths2 = [s for s in top_pat.shapes["3"] if isinstance(s, MPath)]
assert len(paths2) >= 1
path2_read = paths2[0]
assert_allclose(path2_read.vertices, path2_verts)
assert path2_read.width == 0
# Verify Ref with Grid
# Finding the sub pattern name might be tricky because of how DXF stores blocks
# but "sub" should be in read_lib
assert "sub" in read_lib
# Check refs in the top pattern
found_grid = False
for target, reflist in top_pat.refs.items():
# DXF names might be case-insensitive or modified, but ezdxf usually preserves them
if target.upper() == "SUB":
for ref in reflist:
if isinstance(ref.repetition, Grid):
assert ref.repetition.a_count == 2
assert ref.repetition.b_count == 3
assert_allclose(ref.repetition.a_vector, (10, 0))
assert_allclose(ref.repetition.b_vector, (0, 10))
found_grid = True
assert found_grid, f"Manhattan Grid repetition should have been preserved. Targets: {list(top_pat.refs.keys())}"
def test_dxf_manhattan_precision(tmp_path: Path):
# Test that float precision doesn't break Manhattan grid detection
lib = Library()
sub = Pattern()
sub.polygon("1", vertices=[[0, 0], [1, 0], [1, 1]])
lib["sub"] = sub
top = Pattern()
# 90 degree rotation: in masque the grid is NOT rotated, so it stays [[10,0],[0,10]]
# In DXF, an array with rotation 90 has basis vectors [[0,10],[-10,0]].
# So a masque grid [[10,0],[0,10]] with ref rotation 90 matches a DXF array.
angle = numpy.pi / 2 # 90 degrees
top.ref("sub", offset=(0, 0), rotation=angle,
repetition=Grid(a_vector=(10, 0), a_count=2, b_vector=(0, 10), b_count=2))
lib["top"] = top
dxf_file = tmp_path / "precision.dxf"
dxf.writefile(lib, "top", dxf_file)
# If the isclose() fix works, this should still be a Grid when read back
read_lib, _ = dxf.readfile(dxf_file)
read_top = read_lib.get("Model") or read_lib.get("top") or list(read_lib.values())[0]
target_name = next(k for k in read_top.refs if k.upper() == "SUB")
ref = read_top.refs[target_name][0]
assert isinstance(ref.repetition, Grid), "Grid should be preserved for 90-degree rotation"

View file

@ -5,7 +5,6 @@ from numpy.testing import assert_allclose
from ..pattern import Pattern from ..pattern import Pattern
from ..library import Library from ..library import Library
from ..file import gdsii, oasis
from ..shapes import Path as MPath, Circle, Polygon from ..shapes import Path as MPath, Circle, Polygon
from ..repetition import Grid, Arbitrary from ..repetition import Grid, Arbitrary
@ -62,6 +61,7 @@ def create_test_library(for_gds: bool = False) -> Library:
return lib return lib
def test_gdsii_full_roundtrip(tmp_path: Path) -> None: def test_gdsii_full_roundtrip(tmp_path: Path) -> None:
from ..file import gdsii
lib = create_test_library(for_gds=True) lib = create_test_library(for_gds=True)
gds_file = tmp_path / "full_test.gds" gds_file = tmp_path / "full_test.gds"
gdsii.writefile(lib, gds_file, meters_per_unit=1e-9) gdsii.writefile(lib, gds_file, meters_per_unit=1e-9)
@ -110,6 +110,7 @@ def test_gdsii_full_roundtrip(tmp_path: Path) -> None:
def test_oasis_full_roundtrip(tmp_path: Path) -> None: def test_oasis_full_roundtrip(tmp_path: Path) -> None:
pytest.importorskip("fatamorgana") pytest.importorskip("fatamorgana")
from ..file import oasis
lib = create_test_library(for_gds=False) lib = create_test_library(for_gds=False)
oas_file = tmp_path / "full_test.oas" oas_file = tmp_path / "full_test.oas"
oasis.writefile(lib, oas_file, units_per_micron=1000) oasis.writefile(lib, oas_file, units_per_micron=1000)

View file

@ -4,12 +4,10 @@ from numpy.testing import assert_equal
from ..pattern import Pattern from ..pattern import Pattern
from ..library import Library from ..library import Library
from ..file import oasis
def test_oasis_roundtrip(tmp_path: Path) -> None: def test_oasis_roundtrip(tmp_path: Path) -> None:
# Skip if fatamorgana is not installed # Skip if fatamorgana is not installed
pytest.importorskip("fatamorgana") pytest.importorskip("fatamorgana")
from ..file import oasis
lib = Library() lib = Library()
pat1 = Pattern() pat1 = Pattern()

View file

@ -29,14 +29,19 @@ def test_remove_colinear_vertices() -> None:
def test_remove_colinear_vertices_exhaustive() -> None: def test_remove_colinear_vertices_exhaustive() -> None:
# U-turn # U-turn
v = [[0, 0], [10, 0], [0, 0]] v = [[0, 0], [10, 0], [0, 0]]
v_clean = remove_colinear_vertices(v, closed_path=False) v_clean = remove_colinear_vertices(v, closed_path=False, preserve_uturns=True)
# Open path should keep ends. [10,0] is between [0,0] and [0,0]? # Open path should keep ends. [10,0] is between [0,0] and [0,0]?
# Yes, they are all on the same line. # They are colinear, but it's a 180 degree turn.
assert len(v_clean) == 2 # We preserve 180 degree turns if preserve_uturns is True.
assert len(v_clean) == 3
v_collapsed = remove_colinear_vertices(v, closed_path=False, preserve_uturns=False)
# If not preserving u-turns, it should collapse to just the endpoints
assert len(v_collapsed) == 2
# 180 degree U-turn in closed path # 180 degree U-turn in closed path
v = [[0, 0], [10, 0], [5, 0]] v = [[0, 0], [10, 0], [5, 0]]
v_clean = remove_colinear_vertices(v, closed_path=True) v_clean = remove_colinear_vertices(v, closed_path=True, preserve_uturns=False)
assert len(v_clean) == 2 assert len(v_clean) == 2
@ -64,7 +69,7 @@ def test_apply_transforms() -> None:
t1 = [10, 20, 0, 0] t1 = [10, 20, 0, 0]
t2 = [[5, 0, 0, 0], [0, 5, 0, 0]] t2 = [[5, 0, 0, 0], [0, 5, 0, 0]]
combined = apply_transforms(t1, t2) combined = apply_transforms(t1, t2)
assert_equal(combined, [[15, 20, 0, 0], [10, 25, 0, 0]]) assert_equal(combined, [[15, 20, 0, 0, 1], [10, 25, 0, 0, 1]])
def test_apply_transforms_advanced() -> None: def test_apply_transforms_advanced() -> None:
@ -80,4 +85,4 @@ def test_apply_transforms_advanced() -> None:
# 1. mirror inner y if outer mirrored: (10, 0) -> (10, 0) # 1. mirror inner y if outer mirrored: (10, 0) -> (10, 0)
# 2. rotate by outer rotation (pi/2): (10, 0) -> (0, 10) # 2. rotate by outer rotation (pi/2): (10, 0) -> (0, 10)
# 3. add outer offset (0, 0) -> (0, 10) # 3. add outer offset (0, 0) -> (0, 10)
assert_allclose(combined[0], [0, 10, pi / 2, 1], atol=1e-10) assert_allclose(combined[0], [0, 10, pi / 2, 1, 1], atol=1e-10)

View file

@ -60,4 +60,4 @@ class DeferredDict(dict, Generic[Key, Value]):
Convenience function to avoid having to manually wrap Convenience function to avoid having to manually wrap
constant values into callables. constant values into callables.
""" """
self[key] = lambda: value self[key] = lambda v=value: v

View file

@ -57,11 +57,9 @@ def data_to_ports(
name: str | None = None, # Note: name optional, but arg order different from read(postprocess=) name: str | None = None, # Note: name optional, but arg order different from read(postprocess=)
max_depth: int = 0, max_depth: int = 0,
skip_subcells: bool = True, skip_subcells: bool = True,
# TODO missing ok? visited: set[int] | None = None,
) -> Pattern: ) -> Pattern:
""" """
# TODO fixup documentation in ports2data
# TODO move to utils.file?
Examine `pattern` for labels specifying port info, and use that info Examine `pattern` for labels specifying port info, and use that info
to fill out its `ports` attribute. to fill out its `ports` attribute.
@ -70,18 +68,30 @@ def data_to_ports(
Args: Args:
layers: Search for labels on all the given layers. layers: Search for labels on all the given layers.
library: Mapping from pattern names to patterns.
pattern: Pattern object to scan for labels. pattern: Pattern object to scan for labels.
max_depth: Maximum hierarcy depth to search. Default 999_999. name: Name of the pattern object.
max_depth: Maximum hierarcy depth to search. Default 0.
Reduce this to 0 to avoid ever searching subcells. Reduce this to 0 to avoid ever searching subcells.
skip_subcells: If port labels are found at a given hierarcy level, skip_subcells: If port labels are found at a given hierarcy level,
do not continue searching at deeper levels. This allows subcells do not continue searching at deeper levels. This allows subcells
to contain their own port info without interfering with supercells' to contain their own port info without interfering with supercells'
port data. port data.
Default True. Default True.
visited: Set of object IDs which have already been processed.
Returns: Returns:
The updated `pattern`. Port labels are not removed. The updated `pattern`. Port labels are not removed.
""" """
if visited is None:
visited = set()
# Note: visited uses id(pattern) to detect cycles and avoid redundant processing.
# This may not catch identical patterns if they are loaded as separate object instances.
if id(pattern) in visited:
return pattern
visited.add(id(pattern))
if pattern.ports: if pattern.ports:
logger.warning(f'Pattern {name if name else pattern} already had ports, skipping data_to_ports') logger.warning(f'Pattern {name if name else pattern} already had ports, skipping data_to_ports')
return pattern return pattern
@ -99,12 +109,13 @@ def data_to_ports(
if target is None: if target is None:
continue continue
pp = data_to_ports( pp = data_to_ports(
layers=layers, layers = layers,
library=library, library = library,
pattern=library[target], pattern = library[target],
name=target, name = target,
max_depth=max_depth - 1, max_depth = max_depth - 1,
skip_subcells=skip_subcells, skip_subcells = skip_subcells,
visited = visited,
) )
found_ports |= bool(pp.ports) found_ports |= bool(pp.ports)
@ -160,13 +171,17 @@ def data_to_ports_flat(
local_ports = {} local_ports = {}
for label in labels: for label in labels:
name, property_string = label.string.split(':') if ':' not in label.string:
properties = property_string.split(' ') logger.warning(f'Invalid port label "{label.string}" in pattern "{pstr}" (missing ":")')
ptype = properties[0] continue
angle_deg = float(properties[1]) if len(ptype) else 0
name, property_string = label.string.split(':', 1)
properties = property_string.split()
ptype = properties[0] if len(properties) > 0 else 'unk'
angle_deg = float(properties[1]) if len(properties) > 1 else numpy.inf
xy = label.offset xy = label.offset
angle = numpy.deg2rad(angle_deg) angle = numpy.deg2rad(angle_deg) if numpy.isfinite(angle_deg) else None
if name in local_ports: if name in local_ports:
logger.warning(f'Duplicate port "{name}" in pattern "{pstr}"') logger.warning(f'Duplicate port "{name}" in pattern "{pstr}"')

View file

@ -28,8 +28,9 @@ def rotation_matrix_2d(theta: float) -> NDArray[numpy.float64]:
arr = numpy.array([[numpy.cos(theta), -numpy.sin(theta)], arr = numpy.array([[numpy.cos(theta), -numpy.sin(theta)],
[numpy.sin(theta), +numpy.cos(theta)]]) [numpy.sin(theta), +numpy.cos(theta)]])
# If this was a manhattan rotation, round to remove some inacuraccies in sin & cos # If this was a manhattan rotation, round to remove some inaccuracies in sin & cos
if numpy.isclose(theta % (pi / 2), 0): # cos(4*theta) is 1 for any multiple of pi/2.
if numpy.isclose(numpy.cos(4 * theta), 1, atol=1e-12):
arr = numpy.round(arr) arr = numpy.round(arr)
arr.flags.writeable = False arr.flags.writeable = False
@ -86,37 +87,50 @@ def apply_transforms(
Apply a set of transforms (`outer`) to a second set (`inner`). Apply a set of transforms (`outer`) to a second set (`inner`).
This is used to find the "absolute" transform for nested `Ref`s. This is used to find the "absolute" transform for nested `Ref`s.
The two transforms should be of shape Ox4 and Ix4. The two transforms should be of shape Ox5 and Ix5.
Rows should be of the form `(x_offset, y_offset, rotation_ccw_rad, mirror_across_x)`. Rows should be of the form `(x_offset, y_offset, rotation_ccw_rad, mirror_across_x, scale)`.
The output will be of the form (O*I)x4 (if `tensor=False`) or OxIx4 (`tensor=True`). The output will be of the form (O*I)x5 (if `tensor=False`) or OxIx5 (`tensor=True`).
Args: Args:
outer: Transforms for the container refs. Shape Ox4. outer: Transforms for the container refs. Shape Ox5.
inner: Transforms for the contained refs. Shape Ix4. inner: Transforms for the contained refs. Shape Ix5.
tensor: If `True`, an OxIx4 array is returned, with `result[oo, ii, :]` corresponding tensor: If `True`, an OxIx5 array is returned, with `result[oo, ii, :]` corresponding
to the `oo`th `outer` transform applied to the `ii`th inner transform. to the `oo`th `outer` transform applied to the `ii`th inner transform.
If `False` (default), this is concatenated into `(O*I)x4` to allow simple If `False` (default), this is concatenated into `(O*I)x5` to allow simple
chaining into additional `apply_transforms()` calls. chaining into additional `apply_transforms()` calls.
Returns: Returns:
OxIx4 or (O*I)x4 array. Final dimension is OxIx5 or (O*I)x5 array. Final dimension is
`(total_x, total_y, total_rotation_ccw_rad, net_mirrored_x)`. `(total_x, total_y, total_rotation_ccw_rad, net_mirrored_x, total_scale)`.
""" """
outer = numpy.atleast_2d(outer).astype(float, copy=False) outer = numpy.atleast_2d(outer).astype(float, copy=False)
inner = numpy.atleast_2d(inner).astype(float, copy=False) inner = numpy.atleast_2d(inner).astype(float, copy=False)
if outer.shape[1] == 4:
outer = numpy.pad(outer, ((0, 0), (0, 1)), constant_values=1.0)
if inner.shape[1] == 4:
inner = numpy.pad(inner, ((0, 0), (0, 1)), constant_values=1.0)
# If mirrored, flip y's # If mirrored, flip y's
xy_mir = numpy.tile(inner[:, :2], (outer.shape[0], 1, 1)) # dims are outer, inner, xyrm xy_mir = numpy.tile(inner[:, :2], (outer.shape[0], 1, 1)) # dims are outer, inner, xyrm
xy_mir[outer[:, 3].astype(bool), :, 1] *= -1 xy_mir[outer[:, 3].astype(bool), :, 1] *= -1
# Apply outer scale to inner offset
xy_mir *= outer[:, None, 4, None]
rot_mats = [rotation_matrix_2d(angle) for angle in outer[:, 2]] rot_mats = [rotation_matrix_2d(angle) for angle in outer[:, 2]]
xy = numpy.einsum('ort,oit->oir', rot_mats, xy_mir) xy = numpy.einsum('ort,oit->oir', rot_mats, xy_mir)
tot = numpy.empty((outer.shape[0], inner.shape[0], 4)) tot = numpy.empty((outer.shape[0], inner.shape[0], 5))
tot[:, :, :2] = outer[:, None, :2] + xy tot[:, :, :2] = outer[:, None, :2] + xy
tot[:, :, 2:] = outer[:, None, 2:] + inner[None, :, 2:] # sum rotations and mirrored
tot[:, :, 2] %= 2 * pi # clamp rot # If mirrored, flip inner rotation
tot[:, :, 3] %= 2 # clamp mirrored mirrored_outer = outer[:, None, 3].astype(bool)
rotations = outer[:, None, 2] + numpy.where(mirrored_outer, -inner[None, :, 2], inner[None, :, 2])
tot[:, :, 2] = rotations % (2 * pi)
tot[:, :, 3] = (outer[:, None, 3] + inner[None, :, 3]) % 2 # net mirrored
tot[:, :, 4] = outer[:, None, 4] * inner[None, :, 4] # net scale
if tensor: if tensor:
return tot return tot

View file

@ -30,7 +30,11 @@ def remove_duplicate_vertices(vertices: ArrayLike, closed_path: bool = True) ->
return result return result
def remove_colinear_vertices(vertices: ArrayLike, closed_path: bool = True) -> NDArray[numpy.float64]: def remove_colinear_vertices(
vertices: ArrayLike,
closed_path: bool = True,
preserve_uturns: bool = False,
) -> NDArray[numpy.float64]:
""" """
Given a list of vertices, remove any superflous vertices (i.e. Given a list of vertices, remove any superflous vertices (i.e.
those which lie along the line formed by their neighbors) those which lie along the line formed by their neighbors)
@ -39,21 +43,33 @@ def remove_colinear_vertices(vertices: ArrayLike, closed_path: bool = True) -> N
vertices: Nx2 ndarray of vertices vertices: Nx2 ndarray of vertices
closed_path: If `True`, the vertices are assumed to represent an implicitly closed_path: If `True`, the vertices are assumed to represent an implicitly
closed path. If `False`, the path is assumed to be open. Default `True`. closed path. If `False`, the path is assumed to be open. Default `True`.
preserve_uturns: If `True`, colinear vertices that correspond to a 180 degree
turn (a "spike") are preserved. Default `False`.
Returns: Returns:
`vertices` with colinear (superflous) vertices removed. May be a view into the original array. `vertices` with colinear (superflous) vertices removed. May be a view into the original array.
""" """
vertices = remove_duplicate_vertices(vertices) vertices = remove_duplicate_vertices(vertices, closed_path=closed_path)
# Check for dx0/dy0 == dx1/dy1 # Check for dx0/dy0 == dx1/dy1
dv = numpy.roll(vertices, -1, axis=0) - vertices
if not closed_path:
dv[-1] = 0
dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...] # dxdy[i] is based on dv[i] and dv[i-1]
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] # [[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dx0]] # slopes_equal[i] refers to vertex i
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1]
dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0] dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0]
err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40 err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40
slopes_equal = (dxdy_diff / err_mult) < 1e-15 slopes_equal = (dxdy_diff / err_mult) < 1e-15
if preserve_uturns:
# Only merge if segments are in the same direction (avoid collapsing u-turns)
dot_prod = (dv * numpy.roll(dv, 1, axis=0)).sum(axis=1)
slopes_equal &= (dot_prod > 0)
if not closed_path: if not closed_path:
slopes_equal[[0, -1]] = False slopes_equal[[0, -1]] = False

View file

@ -65,7 +65,7 @@ path = "masque/__init__.py"
[project.optional-dependencies] [project.optional-dependencies]
oasis = ["fatamorgana~=0.11"] oasis = ["fatamorgana~=0.11"]
dxf = ["ezdxf~=1.0.2"] dxf = ["ezdxf~=1.4"]
svg = ["svgwrite"] svg = ["svgwrite"]
visualize = ["matplotlib"] visualize = ["matplotlib"]
text = ["matplotlib", "freetype-py"] text = ["matplotlib", "freetype-py"]
@ -110,6 +110,9 @@ lint.ignore = [
[tool.pytest.ini_options] [tool.pytest.ini_options]
addopts = "-rsXx" addopts = "-rsXx"
testpaths = ["masque"] testpaths = ["masque"]
filterwarnings = [
"ignore::DeprecationWarning:ezdxf.*",
]
[tool.mypy] [tool.mypy]
mypy_path = "stubs" mypy_path = "stubs"