From 7aaf73cb375bccbbfbb3f02792f1f140c6a4f64a Mon Sep 17 00:00:00 2001 From: jan Date: Thu, 7 Jul 2022 11:27:29 -0700 Subject: [PATCH] WIP: make libraries and names first-class! --- examples/nested_poly_test.py | 28 + examples/pcgen.py | 298 ++++++++ examples/phc.py | 178 +++++ masque/__init__.py | 6 +- masque/builder/__init__.py | 1 + masque/{library => builder}/device_library.py | 15 +- masque/builder/devices.py | 93 +-- masque/error.py | 7 - masque/file/dxf.py | 102 +-- masque/file/gdsii.py | 323 ++++----- masque/file/oasis.py | 214 +++--- masque/file/python_gdsii.py | 122 ++-- masque/file/svg.py | 33 +- masque/file/utils.py | 80 +- masque/label.py | 36 +- masque/library.py | 594 +++++++++++++++ masque/library/__init__.py | 2 - masque/library/library.py | 355 --------- masque/pattern.py | 682 +++--------------- masque/repetition.py | 85 +-- masque/shapes/arc.py | 21 +- masque/shapes/circle.py | 9 +- masque/shapes/ellipse.py | 19 +- masque/shapes/path.py | 23 +- masque/shapes/polygon.py | 19 +- masque/shapes/shape.py | 21 +- masque/shapes/text.py | 19 +- masque/subpattern.py | 172 ++--- masque/traits/__init__.py | 1 - masque/traits/annotatable.py | 3 - masque/traits/positionable.py | 20 - masque/utils/__init__.py | 1 + .../utils.py => utils/deferreddict.py} | 0 masque/utils/pack2d.py | 20 +- 34 files changed, 1785 insertions(+), 1817 deletions(-) create mode 100644 examples/nested_poly_test.py create mode 100644 examples/pcgen.py create mode 100644 examples/phc.py rename masque/{library => builder}/device_library.py (96%) create mode 100644 masque/library.py delete mode 100644 masque/library/__init__.py delete mode 100644 masque/library/library.py rename masque/{library/utils.py => utils/deferreddict.py} (100%) diff --git a/examples/nested_poly_test.py b/examples/nested_poly_test.py new file mode 100644 index 0000000..ba0c830 --- /dev/null +++ b/examples/nested_poly_test.py @@ -0,0 +1,28 @@ +import numpy +from pyclipper import ( + Pyclipper, PT_CLIP, PT_SUBJECT, CT_UNION, CT_INTERSECTION, PFT_NONZERO, + scale_to_clipper, scale_from_clipper, + ) +p = Pyclipper() +p.AddPaths([ + [(-10, -10), (-10, 10), (-9, 10), (-9, -10)], + [(-10, 10), (10, 10), (10, 9), (-10, 9)], + [(10, 10), (10, -10), (9, -10), (9, 10)], + [(10, -10), (-10, -10), (-10, -9), (10, -9)], + ], PT_SUBJECT, closed=True) +p.Execute2? +p.Execute? +p.Execute(PT_UNION, PT_NONZERO, PT_NONZERO) +p.Execute(CT_UNION, PT_NONZERO, PT_NONZERO) +p.Execute(CT_UNION, PFT_NONZERO, PFT_NONZERO) +p = Pyclipper() +p.AddPaths([ + [(-10, -10), (-10, 10), (-9, 10), (-9, -10)], + [(-10, 10), (10, 10), (10, 9), (-10, 9)], + [(10, 10), (10, -10), (9, -10), (9, 10)], + [(10, -10), (-10, -10), (-10, -9), (10, -9)], + ], PT_SUBJECT, closed=True) +r = p.Execute2(CT_UNION, PFT_NONZERO, PFT_NONZERO) +r +r.Childs +%history -f nested_poly_test.py diff --git a/examples/pcgen.py b/examples/pcgen.py new file mode 100644 index 0000000..3c25ed6 --- /dev/null +++ b/examples/pcgen.py @@ -0,0 +1,298 @@ +""" +Routines for creating normalized 2D lattices and common photonic crystal + cavity designs. +""" + +from typing import Sequence, Tuple + +import numpy # type: ignore + + +def triangular_lattice(dims: Tuple[int, int], + asymmetric: bool = False, + origin: str = 'center', + ) -> numpy.ndarray: + """ + Return an ndarray of `[[x0, y0], [x1, y1], ...]` denoting lattice sites for + a triangular lattice in 2D. + + Args: + dims: Number of lattice sites in the [x, y] directions. + asymmetric: If true, each row will contain the same number of + x-coord lattice sites. If false, every other row will be + one site shorter (to make the structure symmetric). + origin: If 'corner', the least-(x,y) lattice site is placed at (0, 0) + If 'center', the center of the lattice (not necessarily a + lattice site) is placed at (0, 0). + + Returns: + `[[x0, y0], [x1, 1], ...]` denoting lattice sites. + """ + sx, sy = numpy.meshgrid(numpy.arange(dims[0], dtype=float), + numpy.arange(dims[1], dtype=float), indexing='ij') + + sx[sy % 2 == 1] += 0.5 + sy *= numpy.sqrt(3) / 2 + + if not asymmetric: + which = sx != sx.max() + sx = sx[which] + sy = sy[which] + + xy = numpy.column_stack((sx.flat, sy.flat)) + + if origin == 'center': + xy -= (xy.max(axis=0) - xy.min(axis=0)) / 2 + elif origin == 'corner': + pass + else: + raise Exception(f'Invalid value for `origin`: {origin}') + + return xy[xy[:, 0].argsort(), :] + + +def square_lattice(dims: Tuple[int, int]) -> numpy.ndarray: + """ + Return an ndarray of `[[x0, y0], [x1, y1], ...]` denoting lattice sites for + a square lattice in 2D. The lattice will be centered around (0, 0). + + Args: + dims: Number of lattice sites in the [x, y] directions. + + Returns: + `[[x0, y0], [x1, 1], ...]` denoting lattice sites. + """ + xs, ys = numpy.meshgrid(range(dims[0]), range(dims[1]), 'xy') + xs -= dims[0]/2 + ys -= dims[1]/2 + xy = numpy.vstack((xs.flatten(), ys.flatten())).T + return xy[xy[:, 0].argsort(), ] + + +# ### Photonic crystal functions ### + + +def nanobeam_holes(a_defect: float, + num_defect_holes: int, + num_mirror_holes: int + ) -> numpy.ndarray: + """ + Returns a list of `[[x0, r0], [x1, r1], ...]` of nanobeam hole positions and radii. + Creates a region in which the lattice constant and radius are progressively + (linearly) altered over num_defect_holes holes until they reach the value + specified by a_defect, then symmetrically returned to a lattice constant and + radius of 1, which is repeated num_mirror_holes times on each side. + + Args: + a_defect: Minimum lattice constant for the defect, as a fraction of the + mirror lattice constant (ie., for no defect, a_defect = 1). + num_defect_holes: How many holes form the defect (per-side) + num_mirror_holes: How many holes form the mirror (per-side) + + Returns: + Ndarray `[[x0, r0], [x1, r1], ...]` of nanobeam hole positions and radii. + """ + a_values = numpy.linspace(a_defect, 1, num_defect_holes, endpoint=False) + xs = a_values.cumsum() - (a_values[0] / 2) # Later mirroring makes center distance 2x as long + mirror_xs = numpy.arange(1, num_mirror_holes + 1, dtype=float) + xs[-1] + mirror_rs = numpy.ones_like(mirror_xs) + return numpy.vstack((numpy.hstack((-mirror_xs[::-1], -xs[::-1], xs, mirror_xs)), + numpy.hstack((mirror_rs[::-1], a_values[::-1], a_values, mirror_rs)))).T + + +def waveguide(length: int, num_mirror: int) -> numpy.ndarray: + """ + Line defect waveguide in a triangular lattice. + + Args: + length: waveguide length (number of holes in x direction) + num_mirror: Mirror length (number of holes per side; total size is + `2 * n + 1` holes. + + Returns: + `[[x0, y0], [x1, y1], ...]` for all the holes + """ + p = triangular_lattice([length, 2 * num_mirror + 1]) + p_wg = p[p[:, 1] != 0, :] + return p_wg + + +def wgbend(num_mirror: int) -> numpy.ndarray: + """ + Line defect waveguide bend in a triangular lattice. + + Args: + num_mirror: Mirror length (number of holes per side; total size is + approximately `2 * n + 1` + + Returns: + `[[x0, y0], [x1, y1], ...]` for all the holes + """ + p = triangular_lattice([2 * num_mirror, 2 * num_mirror + 1]) + left_horiz = (p[:, 1] == 0) & (p[:, 0] <= 0) + p = p[~left_horiz, :] + + right_diag = numpy.isclose(p[:, 1], p[:, 0] * numpy.sqrt(3)) & (p[:, 0] >= 0) + p = p[~right_diag, :] + return p + + +def y_splitter(num_mirror: int) -> numpy.ndarray: + """ + Line defect waveguide y-splitter in a triangular lattice. + + Args: + num_mirror: Mirror length (number of holes per side; total size is + approximately `2 * n + 1` holes. + + Returns: + `[[x0, y0], [x1, y1], ...]` for all the holes + """ + p = triangular_lattice([2 * num_mirror, 2 * num_mirror + 1]) + left_horiz = (p[:, 1] == 0) & (p[:, 0] <= 0) + p = p[~left_horiz, :] + + right_diag_up = numpy.isclose(p[:, 1], p[:, 0] * numpy.sqrt(3)) & (p[:, 0] >= 0) + p = p[~right_diag_up, :] + + right_diag_dn = numpy.isclose(p[:, 1], -p[:, 0] * numpy.sqrt(3)) & (p[:, 0] >= 0) + p = p[~right_diag_dn, :] + return p + + +def ln_defect(mirror_dims: Tuple[int, int], + defect_length: int, + ) -> numpy.ndarray: + """ + N-hole defect in a triangular lattice. + + Args: + mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes + is 2 * n + 1 in each direction. + defect_length: Length of defect. Should be an odd number. + + Returns: + `[[x0, y0], [x1, y1], ...]` for all the holes + """ + if defect_length % 2 != 1: + raise Exception('defect_length must be odd!') + p = triangular_lattice([2 * d + 1 for d in mirror_dims]) + half_length = numpy.floor(defect_length / 2) + hole_nums = numpy.arange(-half_length, half_length + 1) + holes_to_keep = numpy.in1d(p[:, 0], hole_nums, invert=True) + return p[numpy.logical_or(holes_to_keep, p[:, 1] != 0), ] + + +def ln_shift_defect(mirror_dims: Tuple[int, int], + defect_length: int, + shifts_a: Sequence[float] = (0.15, 0, 0.075), + shifts_r: Sequence[float] = (1, 1, 1) + ) -> numpy.ndarray: + """ + N-hole defect with shifted holes (intended to give the mode a gaussian profile + in real- and k-space so as to improve both Q and confinement). Holes along the + defect line are shifted and altered according to the shifts_* parameters. + + Args: + mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes + is `2 * n + 1` in each direction. + defect_length: Length of defect. Should be an odd number. + shifts_a: Percentage of a to shift (1st, 2nd, 3rd,...) holes along the defect line + shifts_r: Factor to multiply the radius by. Should match length of shifts_a + + Returns: + `[[x0, y0, r0], [x1, y1, r1], ...]` for all the holes + """ + if not hasattr(shifts_a, "__len__") and shifts_a is not None: + shifts_a = [shifts_a] + if not hasattr(shifts_r, "__len__") and shifts_r is not None: + shifts_r = [shifts_r] + + xy = ln_defect(mirror_dims, defect_length) + + # Add column for radius + xyr = numpy.hstack((xy, numpy.ones((xy.shape[0], 1)))) + + # Shift holes + # Expand shifts as necessary + n_shifted = max(len(shifts_a), len(shifts_r)) + + tmp_a = numpy.array(shifts_a) + shifts_a = numpy.ones((n_shifted, )) + shifts_a[:len(tmp_a)] = tmp_a + + tmp_r = numpy.array(shifts_r) + shifts_r = numpy.ones((n_shifted, )) + shifts_r[:len(tmp_r)] = tmp_r + + x_removed = numpy.floor(defect_length / 2) + + for ind in range(n_shifted): + for sign in (-1, 1): + x_val = sign * (x_removed + ind + 1) + which = numpy.logical_and(xyr[:, 0] == x_val, xyr[:, 1] == 0) + xyr[which, ] = (x_val + numpy.sign(x_val) * shifts_a[ind], 0, shifts_r[ind]) + + return xyr + + +def r6_defect(mirror_dims: Tuple[int, int]) -> numpy.ndarray: + """ + R6 defect in a triangular lattice. + + Args: + mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes + is 2 * n + 1 in each direction. + + Returns: + `[[x0, y0], [x1, y1], ...]` specifying hole centers. + """ + xy = triangular_lattice([2 * d + 1 for d in mirror_dims]) + + rem_holes_plus = numpy.array([[1, 0], + [0.5, +numpy.sqrt(3)/2], + [0.5, -numpy.sqrt(3)/2]]) + rem_holes = numpy.vstack((rem_holes_plus, -rem_holes_plus)) + + for rem_xy in rem_holes: + xy = xy[(xy != rem_xy).any(axis=1), ] + + return xy + + +def l3_shift_perturbed_defect( + mirror_dims: Tuple[int, int], + perturbed_radius: float = 1.1, + shifts_a: Sequence[float] = (), + shifts_r: Sequence[float] = () + ) -> numpy.ndarray: + """ + 3-hole defect with perturbed hole sizes intended to form an upwards-directed + beam. Can also include shifted holes along the defect line, intended + to give the mode a more gaussian profile to improve Q. + + Args: + mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes + is 2 * n + 1 in each direction. + perturbed_radius: Amount to perturb the radius of the holes used for beam-forming + shifts_a: Percentage of a to shift (1st, 2nd, 3rd,...) holes along the defect line + shifts_r: Factor to multiply the radius by. Should match length of shifts_a + + Returns: + `[[x0, y0, r0], [x1, y1, r1], ...]` for all the holes + """ + xyr = ln_shift_defect(mirror_dims, 3, shifts_a, shifts_r) + + abs_x, abs_y = (numpy.fabs(xyr[:, i]) for i in (0, 1)) + + # Sorted unique xs and ys + # Ignore row y=0 because it might have shifted holes + xs = numpy.unique(abs_x[abs_x != 0]) + ys = numpy.unique(abs_y) + + # which holes should be perturbed? (xs[[3, 7]], ys[1]) and (xs[[2, 6]], ys[2]) + perturbed_holes = ((xs[a], ys[b]) for a, b in ((3, 1), (7, 1), (2, 2), (6, 2))) + for row in xyr: + if numpy.fabs(row) in perturbed_holes: + row[2] = perturbed_radius + return xyr diff --git a/examples/phc.py b/examples/phc.py new file mode 100644 index 0000000..f214a5a --- /dev/null +++ b/examples/phc.py @@ -0,0 +1,178 @@ +from typing import Tuple, Sequence + +import numpy # type: ignore +from numpy import pi + +from masque import layer_t, Pattern, SubPattern, Label +from masque.shapes import Polygon, Circle +from masque.builder import Device, Port +from masque.library import Library, DeviceLibrary +from masque.file.klamath import writefile + +import pcgen + + +HOLE_SCALE: float = 1000 +''' Radius for the 'hole' cell. Should be significantly bigger than + 1 (minimum database unit) in order to have enough precision to + reasonably represent a polygonized circle (for GDS) +''' + +def hole(layer: layer_t, + radius: float = HOLE_SCALE * 0.35, + ) -> Pattern: + """ + Generate a pattern containing a single circular hole. + + Args: + layer: Layer to draw the circle on. + radius: Circle radius. + + Returns: + Pattern, named `'hole'` + """ + pat = Pattern('hole', shapes=[ + Circle(radius=radius, offset=(0, 0), layer=layer, dose=1.0) + ]) + return pat + + +def perturbed_l3(lattice_constant: float, + hole: Pattern, + trench_dose: float = 1.0, + trench_layer: layer_t = (1, 0), + shifts_a: Sequence[float] = (0.15, 0, 0.075), + shifts_r: Sequence[float] = (1.0, 1.0, 1.0), + xy_size: Tuple[int, int] = (10, 10), + perturbed_radius: float = 1.1, + trench_width: float = 1200, + ) -> Device: + """ + Generate a `Device` representing a perturbed L3 cavity. + + Args: + lattice_constant: Distance between nearest neighbor holes + hole: `Pattern` object containing a single hole + trench_dose: Dose for the trenches. Default 1.0. (Hole dose is 1.0.) + trench_layer: Layer for the trenches, default `(1, 0)`. + shifts_a: passed to `pcgen.l3_shift`; specifies lattice constant + (1 - multiplicative factor) for shifting holes adjacent to + the defect (same row). Default `(0.15, 0, 0.075)` for first, + second, third holes. + shifts_r: passed to `pcgen.l3_shift`; specifies radius for perturbing + holes adjacent to the defect (same row). Default 1.0 for all holes. + Provided sequence should have same length as `shifts_a`. + xy_size: `(x, y)` number of mirror periods in each direction; total size is + `2 * n + 1` holes in each direction. Default (10, 10). + perturbed_radius: radius of holes perturbed to form an upwards-driected beam + (multiplicative factor). Default 1.1. + trench width: Width of the undercut trenches. Default 1200. + + Returns: + `Device` object representing the L3 design. + """ + xyr = pcgen.l3_shift_perturbed_defect(mirror_dims=xy_size, + perturbed_radius=perturbed_radius, + shifts_a=shifts_a, + shifts_r=shifts_r) + + pat = Pattern(f'L3p-a{lattice_constant:g}rp{perturbed_radius:g}') + pat.subpatterns += [SubPattern(hole, offset=(lattice_constant * x, + lattice_constant * y), scale=r * lattice_constant / HOLE_SCALE) + for x, y, r in xyr] + + min_xy, max_xy = pat.get_bounds() + trench_dx = max_xy[0] - min_xy[0] + + pat.shapes += [ + Polygon.rect(ymin=max_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width, + layer=trench_layer, dose=trench_dose), + Polygon.rect(ymax=min_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width, + layer=trench_layer, dose=trench_dose), + ] + + ports = { + 'input': Port((-lattice_constant * xy_size[0], 0), rotation=0, ptype=1), + 'output': Port((lattice_constant * xy_size[0], 0), rotation=pi, ptype=1), + } + + return Device(pat, ports) + + +def waveguide(lattice_constant: float, + hole: Pattern, + length: int, + mirror_periods: int, + ) -> Device: + xy = pcgen.waveguide(length=length + 2, num_mirror=mirror_periods) + + pat = Pattern(f'_wg-a{lattice_constant:g}l{length}') + pat.subpatterns += [SubPattern(hole, offset=(lattice_constant * x, + lattice_constant * y), scale=lattice_constant / HOLE_SCALE) + for x, y in xy] + + ports = { + 'left': Port((-lattice_constant * length / 2, 0), rotation=0, ptype=1), + 'right': Port((lattice_constant * length / 2, 0), rotation=pi, ptype=1), + } + return Device(pat, ports) + + +def bend(lattice_constant: float, + hole: Pattern, + mirror_periods: int, + ) -> Device: + xy = pcgen.wgbend(num_mirror=mirror_periods) + + pat_half = Pattern(f'_wgbend_half-a{lattice_constant:g}l{mirror_periods}') + pat_half.subpatterns += [SubPattern(hole, offset=(lattice_constant * x, + lattice_constant * y), scale=lattice_constant / HOLE_SCALE) + for x, y in xy] + + pat = Pattern(f'_wgbend-a{lattice_constant:g}l{mirror_periods}') + pat.addsp(pat_half, offset=(0, 0), rotation=0, mirrored=(False, False)) + pat.addsp(pat_half, offset=(0, 0), rotation=-2 * pi / 3, mirrored=(True, False)) + + + ports = { + 'left': Port((-lattice_constant * mirror_periods, 0), rotation=0, ptype=1), + 'right': Port((lattice_constant * mirror_periods / 2, + lattice_constant * mirror_periods * numpy.sqrt(3) / 2), rotation=pi * 4 / 3, ptype=1), + } + return Device(pat, ports) + + +def label_ports(device: Device, layer: layer_t = (3, 0)) -> Device: + for name, port in device.ports.items(): + angle_deg = numpy.rad2deg(port.rotation) + device.pattern.labels += [ + Label(string=f'{name} (angle {angle_deg:g})', layer=layer, offset=port.offset) + ] + return device + + +def main(): + hole_layer = (1, 2) + a = 512 + hole_pat = hole(layer=hole_layer) + wg0 = label_ports(waveguide(lattice_constant=a, hole=hole_pat, length=10, mirror_periods=5)) + wg1 = label_ports(waveguide(lattice_constant=a, hole=hole_pat, length=5, mirror_periods=5)) + bend0 = label_ports(bend(lattice_constant=a, hole=hole_pat, mirror_periods=5)) + l3cav = label_ports(perturbed_l3(lattice_constant=a, hole=hole_pat, xy_size=(4, 10))) + + dev = Device(name='my_bend', ports={}) + dev.place(wg0, offset=(0, 0), port_map={'left': 'in', 'right': 'signal'}) + dev.plug(wg0, {'signal': 'left'}) + dev.plug(bend0, {'signal': 'left'}) + dev.plug(wg1, {'signal': 'left'}) + dev.plug(bend0, {'signal': 'right'}) + dev.plug(wg0, {'signal': 'left'}) + dev.plug(l3cav, {'signal': 'input'}) + dev.plug(wg0, {'signal': 'left'}) + + writefile(dev.pattern, 'phc.gds', 1e-9, 1e-3) + dev.pattern.visualize() + + +if __name__ == '__main__': + main() diff --git a/masque/__init__.py b/masque/__init__.py index 7881bdb..fde4571 100644 --- a/masque/__init__.py +++ b/masque/__init__.py @@ -24,17 +24,17 @@ metaclass is used to auto-generate slots based on superclass type annotations. - File I/O submodules are imported by `masque.file` to avoid creating hard dependencies on external file-format reader/writers - - Pattern locking/unlocking is quite slow for large hierarchies. """ -from .error import PatternError, PatternLockedError +from .error import PatternError from .shapes import Shape from .label import Label from .subpattern import SubPattern from .pattern import Pattern from .utils import layer_t, annotations_t -from .library import Library, DeviceLibrary +from .library import Library, PatternGenerator +from .builder import DeviceLibrary, LibDeviceLibrary, Device, Port __author__ = 'Jan Petykiewicz' diff --git a/masque/builder/__init__.py b/masque/builder/__init__.py index 0c083b7..9c9e45d 100644 --- a/masque/builder/__init__.py +++ b/masque/builder/__init__.py @@ -1,3 +1,4 @@ from .devices import Port, Device from .utils import ell from .tools import Tool +from .device_library import DeviceLibrary, LibDeviceLibrary diff --git a/masque/library/device_library.py b/masque/builder/device_library.py similarity index 96% rename from masque/library/device_library.py rename to masque/builder/device_library.py index e38b8fe..efa816f 100644 --- a/masque/library/device_library.py +++ b/masque/builder/device_library.py @@ -79,16 +79,6 @@ class DeviceLibrary: def __repr__(self) -> str: return '' - def set_const(self, const: Device) -> None: - """ - Convenience function to avoid having to manually wrap - already-generated Device objects into callables. - - Args: - const: Pre-generated device object - """ - self.generators[const.pattern.name] = lambda: const - def add( self: D, other: D, @@ -175,7 +165,6 @@ class DeviceLibrary: def build_dev() -> Device: dev = fn() dev.pattern = dev2pat(dev) - dev.pattern.rename(prefix + name) return dev self[prefix + name] = build_dev @@ -200,8 +189,8 @@ class DeviceLibrary: def build_wrapped_dev() -> Device: old_dev = self[old_name] - wrapper = Pattern(name=name) - wrapper.addsp(old_dev.pattern) + wrapper = Pattern() + wrapper.addsp(old_name) return Device(wrapper, old_dev.ports) self[name] = build_wrapped_dev diff --git a/masque/builder/devices.py b/masque/builder/devices.py index b681f33..50b8d94 100644 --- a/masque/builder/devices.py +++ b/masque/builder/devices.py @@ -125,7 +125,7 @@ class Device(Copyable, Mirrorable): - `Device(pattern, ports={'A': port_a, 'C': port_c})` uses an existing pattern and defines some ports. - - `Device(name='my_dev_name', ports=None)` makes a new empty pattern with + - `Device(ports=None)` makes a new empty pattern with default ports ('A' and 'B', in opposite directions, at (0, 0)). - `my_device.build('my_layout')` makes a new pattern and instantiates @@ -182,7 +182,6 @@ class Device(Copyable, Mirrorable): ports: Optional[Dict[str, Port]] = None, *, tools: Union[None, Tool, Dict[Optional[str], Tool]] = None, - name: Optional[str] = None, ) -> None: """ If `ports` is `None`, two default ports ('A' and 'B') are created. @@ -190,14 +189,7 @@ class Device(Copyable, Mirrorable): (attached devices will be placed to the left) and 'B' has rotation pi (attached devices will be placed to the right). """ - if pattern is not None: - if name is not None: - raise DeviceError('Only one of `pattern` and `name` may be specified') - self.pattern = pattern - else: - if name is None: - raise DeviceError('Must specify either `pattern` or `name`') - self.pattern = Pattern(name=name) + self.pattern = pattern or Pattern() if ports is None: self.ports = { @@ -336,25 +328,22 @@ class Device(Copyable, Mirrorable): return self - def build(self, name: str) -> 'Device': + def build(self) -> 'Device': """ Begin building a new device around an instance of the current device (rather than modifying the current device). - Args: - name: A name for the new device - Returns: The new `Device` object. """ - pat = Pattern(name) + # TODO lib: this needs a name for self, rather than for the built thing + pat = Pattern() pat.addsp(self.pattern) new = Device(pat, ports=self.ports, tools=self.tools) return new def as_interface( self, - name: str, in_prefix: str = 'in_', out_prefix: str = '', port_map: Optional[Union[Dict[str, str], Sequence[str]]] = None @@ -380,7 +369,6 @@ class Device(Copyable, Mirrorable): current device. Args: - name: Name for the new device in_prefix: Prepended to port names for newly-created ports with reversed directions compared to the current device. out_prefix: Prepended to port names for ports which are directly @@ -424,12 +412,13 @@ class Device(Copyable, Mirrorable): if duplicates: raise DeviceError(f'Duplicate keys after prefixing, try a different prefix: {duplicates}') - new = Device(name=name, ports={**ports_in, **ports_out}, tools=self.tools) + new = Device(ports={**ports_in, **ports_out}, tools=self.tools) return new def plug( self: D, - other: O, + library: Mapping[str, 'Device'], + name: str, map_in: Dict[str, str], map_out: Optional[Dict[str, Optional[str]]] = None, *, @@ -438,27 +427,29 @@ class Device(Copyable, Mirrorable): set_rotation: Optional[bool] = None, ) -> D: """ - Instantiate the device `other` into the current device, connecting + Instantiate a device `library[name]` into the current device, connecting the ports specified by `map_in` and renaming the unconnected ports specified by `map_out`. Examples: ========= - - `my_device.plug(subdevice, {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})` - instantiates `subdevice` into `my_device`, plugging ports 'A' and 'B' + - `my_device.plug(lib, 'subdevice', {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})` + instantiates `lib['subdevice']` into `my_device`, plugging ports 'A' and 'B' of `my_device` into ports 'C' and 'B' of `subdevice`. The connected ports are removed and any unconnected ports from `subdevice` are added to `my_device`. Port 'D' of `subdevice` (unconnected) is renamed to 'myport'. - - `my_device.plug(wire, {'myport': 'A'})` places port 'A' of `wire` at 'myport' - of `my_device`. If `wire` has only two ports (e.g. 'A' and 'B'), no `map_out`, - argument is provided, and the `inherit_name` argument is not explicitly - set to `False`, the unconnected port of `wire` is automatically renamed to - 'myport'. This allows easy extension of existing ports without changing - their names or having to provide `map_out` each time `plug` is called. + - `my_device.plug(lib, 'wire', {'myport': 'A'})` places port 'A' of `lib['wire']` + at 'myport' of `my_device`. + If `'wire'` has only two ports (e.g. 'A' and 'B'), no `map_out` argument is + provided, and the `inherit_name` argument is not explicitly set to `False`, + the unconnected port of `wire` is automatically renamed to 'myport'. This + allows easy extension of existing ports without changing their names or + having to provide `map_out` each time `plug` is called. Args: - other: A device to instantiate into the current device. + library: A `DeviceLibrary` containing the device to be instatiated. + name: The name of the device to be instantiated (from `library`). map_in: Dict of `{'self_port': 'other_port'}` mappings, specifying port connections between the two devices. map_out: Dict of `{'old_name': 'new_name'}` mappings, specifying @@ -513,13 +504,14 @@ class Device(Copyable, Mirrorable): del self.ports[ki] map_out[vi] = None - self.place(other, offset=translation, rotation=rotation, pivot=pivot, + self.place(library, name, offset=translation, rotation=rotation, pivot=pivot, mirrored=mirrored, port_map=map_out, skip_port_check=True) return self def place( self: D, - other: O, + library: Mapping[str, 'Device'], + name: str, *, offset: ArrayLike = (0, 0), rotation: float = 0, @@ -529,7 +521,7 @@ class Device(Copyable, Mirrorable): skip_port_check: bool = False, ) -> D: """ - Instantiate the device `other` into the current device, adding its + Instantiate the device `library[name]` into the current device, adding its ports to those of the current device (but not connecting any ports). Mirroring is applied before rotation; translation (`offset`) is applied last. @@ -543,16 +535,17 @@ class Device(Copyable, Mirrorable): rather than the port name on the original `pad` device. Args: - other: A device to instantiate into the current device. - offset: Offset at which to place `other`. Default (0, 0). - rotation: Rotation applied to `other` before placement. Default 0. + library: A `DeviceLibrary` containing the device to be instatiated. + name: The name of the device to be instantiated (from `library`). + offset: Offset at which to place the instance. Default (0, 0). + rotation: Rotation applied to the instance before placement. Default 0. pivot: Rotation is applied around this pivot point (default (0, 0)). Rotation is applied prior to translation (`offset`). - mirrored: Whether `other` should be mirrored across the x and y axes. + mirrored: Whether theinstance should be mirrored across the x and y axes. Mirroring is applied before translation and rotation. port_map: Dict of `{'old_name': 'new_name'}` mappings, specifying - new names for ports in `other`. New names can be `None`, which will - delete those ports. + new names for ports in the instantiated device. New names can be + `None`, which will delete those ports. skip_port_check: Can be used to skip the internal call to `check_ports`, in case it has already been performed elsewhere. @@ -561,7 +554,7 @@ class Device(Copyable, Mirrorable): Raises: `DeviceError` if any ports specified in `map_in` or `map_out` do not - exist in `self.ports` or `other_names`. + exist in `self.ports` or `library[name].ports`. `DeviceError` if there are any duplicate names after `map_in` and `map_out` are applied. """ @@ -572,6 +565,8 @@ class Device(Copyable, Mirrorable): if port_map is None: port_map = {} + other = library[name] + if not skip_port_check: self.check_ports(other.ports.keys(), map_in=None, map_out=port_map) @@ -589,7 +584,7 @@ class Device(Copyable, Mirrorable): p.translate(offset) self.ports[name] = p - sp = SubPattern(other.pattern, mirrored=mirrored) + sp = SubPattern(name, mirrored=mirrored) #TODO figure out how this should work?! sp.rotate_around(pivot, rotation) sp.translate(offset) self.pattern.subpatterns.append(sp) @@ -748,19 +743,6 @@ class Device(Copyable, Mirrorable): self._dead = True return self - def rename(self: D, name: str) -> D: - """ - Renames the pattern and returns the device - - Args: - name: The new name - - Returns: - self - """ - self.pattern.name = name - return self - def __repr__(self) -> str: s = f' D: @@ -873,7 +854,7 @@ class Device(Copyable, Mirrorable): port_name = tuple(portspec)[0] return self.path(port_name, ccw, extensions[port_name], tool_port_names=tool_port_names) else: - dev = Device(name='', ports=ports, tools=self.tools).as_interface(container_name) + dev = Device(name='', ports=ports, tools=self.tools).as_interface() for name, length in extensions.items(): dev.path(name, ccw, length, tool_port_names=tool_port_names) return self.plug(dev, {sp: 'in_' + sp for sp in ports.keys()}) # TODO safe to use 'in_'? diff --git a/masque/error.py b/masque/error.py index 54290f9..3cbd0f7 100644 --- a/masque/error.py +++ b/masque/error.py @@ -11,13 +11,6 @@ class PatternError(MasqueError): """ pass -class PatternLockedError(PatternError): - """ - Exception raised when trying to modify a locked pattern - """ - def __init__(self): - PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape') - class LibraryError(MasqueError): """ diff --git a/masque/file/dxf.py b/masque/file/dxf.py index 4a6b9e3..c506f1e 100644 --- a/masque/file/dxf.py +++ b/masque/file/dxf.py @@ -1,7 +1,7 @@ """ DXF file format readers and writers """ -from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable +from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Mapping import re import io import base64 @@ -10,7 +10,7 @@ import logging import pathlib import gzip -import numpy # type: ignore +import numpy import ezdxf # type: ignore from .. import Pattern, SubPattern, PatternError, Label, Shape @@ -29,12 +29,13 @@ DEFAULT_LAYER = 'DEFAULT' def write( - pattern: Pattern, + top_name: str, + library: Mapping[str, Pattern], stream: io.TextIOBase, *, modify_originals: bool = False, dxf_version='AC1024', - disambiguate_func: Callable[[Iterable[Pattern]], None] = None, + disambiguate_func: Callable[[Iterable[str]], List[str]] = None, ) -> None: """ Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes @@ -60,10 +61,12 @@ def write( array with rotated instances must be manhattan _after_ having a compensating rotation applied. Args: - patterns: A Pattern or list of patterns to write to the stream. + top_name: Name of the top-level pattern to write. + library: A {name: Pattern} mapping of patterns. Only `top_name` and patterns referenced + by it are written. stream: Stream object to write to. modify_original: If `True`, the original pattern is modified as part of the writing - process. Otherwise, a copy is made and `deepunlock()`-ed. + process. Otherwise, a copy is made. Default `False`. disambiguate_func: Function which takes a list of patterns and alters them to make their names valid and unique. Default is `disambiguate_pattern_names`. @@ -75,11 +78,14 @@ def write( assert(disambiguate_func is not None) if not modify_originals: - pattern = pattern.deepcopy().deepunlock() + library = library.deepcopy() - # Get a dict of id(pattern) -> pattern - patterns_by_id = pattern.referenced_patterns_by_id() - disambiguate_func(patterns_by_id.values()) + pattern = library[top_name] + + old_names = list(library.keys()) + new_names = disambiguate_func(old_names) + renamed_lib = {new_name: library[old_name] + for old_name, new_name in zip(old_names, new_names)} # Create library lib = ezdxf.new(dxf_version, setup=True) @@ -89,9 +95,9 @@ def write( _subpatterns_to_refs(msp, pattern.subpatterns) # Now create a block for each referenced pattern, and add in any shapes - for pat in patterns_by_id.values(): + for name, pat in renamed_lib.items(): assert(pat is not None) - block = lib.blocks.new(name=pat.name) + block = lib.blocks.new(name=name) _shapes_to_elements(block, pat.shapes) _labels_to_texts(block, pat.labels) @@ -101,7 +107,8 @@ def write( def writefile( - pattern: Pattern, + top_name: str, + library: Mapping[str, Pattern], filename: Union[str, pathlib.Path], *args, **kwargs, @@ -112,7 +119,9 @@ def writefile( Will automatically compress the file if it has a .gz suffix. Args: - pattern: `Pattern` to save + top_name: Name of the top-level pattern to write. + library: A {name: Pattern} mapping of patterns. Only `top_name` and patterns referenced + by it are written. filename: Filename to save to. *args: passed to `dxf.write` **kwargs: passed to `dxf.write` @@ -124,7 +133,7 @@ def writefile( open_func = open with open_func(path, mode='wt') as stream: - write(pattern, stream, *args, **kwargs) + write(top_name, library, stream, *args, **kwargs) def readfile( @@ -156,7 +165,7 @@ def readfile( def read( stream: io.TextIOBase, clean_vertices: bool = True, - ) -> Tuple[Pattern, Dict[str, Any]]: + ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: """ Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s @@ -176,26 +185,20 @@ def read( lib = ezdxf.read(stream) msp = lib.modelspace() - pat = _read_block(msp, clean_vertices) - patterns = [pat] + [_read_block(bb, clean_vertices) for bb in lib.blocks if bb.name != '*Model_Space'] - - # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries - # according to the subpattern.identifier (which is deleted after use). - patterns_dict = dict(((p.name, p) for p in patterns)) - for p in patterns_dict.values(): - for sp in p.subpatterns: - sp.pattern = patterns_dict[sp.identifier[0]] - del sp.identifier + npat = _read_block(msp, clean_vertices) + patterns_dict = dict([npat] + + [_read_block(bb, clean_vertices) for bb in lib.blocks if bb.name != '*Model_Space']) library_info = { 'layers': [ll.dxfattribs() for ll in lib.layers] } - return pat, library_info + return patterns_dict, library_info -def _read_block(block, clean_vertices: bool) -> Pattern: - pat = Pattern(block.name) +def _read_block(block, clean_vertices: bool) -> Tuple[str, Pattern]: + name = block.name + pat = Pattern() for element in block: eltype = element.dxftype() if eltype in ('POLYLINE', 'LWPOLYLINE'): @@ -258,12 +261,12 @@ def _read_block(block, clean_vertices: bool) -> Pattern: offset = numpy.array(attr.get('insert', (0, 0, 0)))[:2] args = { + 'target': (attr.get('name', None),), 'offset': offset, 'scale': scale, 'mirrored': mirrored, 'rotation': rotation, 'pattern': None, - 'identifier': (attr.get('name', None),), } if 'column_count' in attr: @@ -274,7 +277,7 @@ def _read_block(block, clean_vertices: bool) -> Pattern: pat.subpatterns.append(SubPattern(**args)) else: logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).') - return pat + return name, pat def _subpatterns_to_refs( @@ -282,9 +285,9 @@ def _subpatterns_to_refs( subpatterns: List[SubPattern], ) -> None: for subpat in subpatterns: - if subpat.pattern is None: + if subpat.target is None: continue - encoded_name = subpat.pattern.name + encoded_name = subpat.target rotation = (subpat.rotation * 180 / numpy.pi) % 360 attribs = { @@ -360,18 +363,24 @@ def _mlayer2dxf(layer: layer_t) -> str: def disambiguate_pattern_names( - patterns: Iterable[Pattern], + names: Iterable[str], max_name_length: int = 32, suffix_length: int = 6, - dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name - ) -> None: - used_names = [] - for pat in patterns: - sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name) + ) -> List[str]: + """ + Args: + names: List of pattern names to disambiguate + max_name_length: Names longer than this will be truncated + suffix_length: Names which get truncated are truncated by this many extra characters. This is to + leave room for a suffix if one is necessary. + """ + new_names = [] + for name in names: + sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', name) i = 0 suffixed_name = sanitized_name - while suffixed_name in used_names or suffixed_name == '': + while suffixed_name in new_names or suffixed_name == '': suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII') suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A') @@ -380,17 +389,16 @@ def disambiguate_pattern_names( if sanitized_name == '': logger.warning(f'Empty pattern name saved as "{suffixed_name}"') elif suffixed_name != sanitized_name: - if dup_warn_filter is None or dup_warn_filter(pat.name): - logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + if dup_warn_filter is None or dup_warn_filter(name): + logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n' + f' renaming to "{suffixed_name}"') if len(suffixed_name) == 0: # Should never happen since zero-length names are replaced - raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"') + raise PatternError(f'Zero-length name after sanitize,\n originally "{name}"') if len(suffixed_name) > max_name_length: raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n' - + f' originally "{pat.name}"') - - pat.name = suffixed_name - used_names.append(suffixed_name) + + f' originally "{name}"') + new_names.append(suffixed_name) + return new_names diff --git a/masque/file/gdsii.py b/masque/file/gdsii.py index 6bd4d1a..2581502 100644 --- a/masque/file/gdsii.py +++ b/masque/file/gdsii.py @@ -53,18 +53,22 @@ path_cap_map = { } +def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]: + return numpy.rint(val, dtype=numpy.int32, casting='unsafe') + + def write( - patterns: Union[Pattern, Sequence[Pattern]], + library: Mapping[str, Pattern], stream: BinaryIO, meters_per_unit: float, logical_units_per_unit: float = 1, library_name: str = 'masque-klamath', *, modify_originals: bool = False, - disambiguate_func: Callable[[Iterable[Pattern]], None] = None, + disambiguate_func: Callable[[Iterable[str]], List[str]] = None, ) -> None: """ - Convert a `Pattern` or list of patterns to a GDSII stream, and then mapping data as follows: + Convert a library to a GDSII stream, mapping data as follows: Pattern -> GDSII structure SubPattern -> GDSII SREF or AREF Path -> GSDII path @@ -85,7 +89,7 @@ def write( prior to calling this function. Args: - patterns: A Pattern or list of patterns to convert. + library: A {name: Pattern} mapping of patterns to write. meters_per_unit: Written into the GDSII file, meters per (database) length unit. All distances are assumed to be an integer multiple of this unit, and are stored as such. logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a @@ -94,52 +98,48 @@ def write( library_name: Library name written into the GDSII file. Default 'masque-klamath'. modify_originals: If `True`, the original pattern is modified as part of the writing - process. Otherwise, a copy is made and `deepunlock()`-ed. + process. Otherwise, a copy is made. Default `False`. - disambiguate_func: Function which takes a list of patterns and alters them - to make their names valid and unique. Default is `disambiguate_pattern_names`, which - attempts to adhere to the GDSII standard as well as possible. + disambiguate_func: Function which takes a list of pattern names and returns a list of names + altered to be valid and unique. Default is `disambiguate_pattern_names`, which + attempts to adhere to the GDSII standard reasonably well. WARNING: No additional error checking is performed on the results. """ - if isinstance(patterns, Pattern): - patterns = [patterns] - if disambiguate_func is None: - disambiguate_func = disambiguate_pattern_names # type: ignore - assert(disambiguate_func is not None) # placate mypy + disambiguate_func = disambiguate_pattern_names if not modify_originals: - patterns = [p.deepunlock() for p in copy.deepcopy(patterns)] + library = copy.deepcopy(library) - patterns = [p.wrap_repeated_shapes() for p in patterns] + for p in library.values(): + library.add(p.wrap_repeated_shapes()) + + old_names = list(library.keys()) + new_names = disambiguate_func(old_names) + renamed_lib = {new_name: library[old_name] + for old_name, new_name in zip(old_names, new_names)} # Create library - header = klamath.library.FileHeader(name=library_name.encode('ASCII'), - user_units_per_db_unit=logical_units_per_unit, - meters_per_db_unit=meters_per_unit) + header = klamath.library.FileHeader( + name=library_name.encode('ASCII'), + user_units_per_db_unit=logical_units_per_unit, + meters_per_db_unit=meters_per_unit, + ) header.write(stream) - # Get a dict of id(pattern) -> pattern - patterns_by_id = {id(pattern): pattern for pattern in patterns} - for pattern in patterns: - for i, p in pattern.referenced_patterns_by_id().items(): - patterns_by_id[i] = p - - disambiguate_func(patterns_by_id.values()) - # Now create a structure for each pattern, and add in any Boundary and SREF elements - for pat in patterns_by_id.values(): + for name, pat in renamed_lib.items(): elements: List[klamath.elements.Element] = [] elements += _shapes_to_elements(pat.shapes) elements += _labels_to_texts(pat.labels) elements += _subpatterns_to_refs(pat.subpatterns) - klamath.library.write_struct(stream, name=pat.name.encode('ASCII'), elements=elements) + klamath.library.write_struct(stream, name=name.encode('ASCII'), elements=elements) records.ENDLIB.write(stream, None) def writefile( - patterns: Union[Sequence[Pattern], Pattern], + library: Mapping[str, Pattern], filename: Union[str, pathlib.Path], *args, **kwargs, @@ -150,7 +150,7 @@ def writefile( Will automatically compress the file if it has a .gz suffix. Args: - patterns: `Pattern` or list of patterns to save + library: {name: Pattern} pairs to save. filename: Filename to save to. *args: passed to `write()` **kwargs: passed to `write()` @@ -216,22 +216,14 @@ def read( """ library_info = _read_header(stream) - patterns = [] + patterns_dict = {} found_struct = records.BGNSTR.skip_past(stream) while found_struct: name = records.STRNAME.skip_and_read(stream) - pat = read_elements(stream, name=name.decode('ASCII'), raw_mode=raw_mode) - patterns.append(pat) + pat = read_elements(stream, raw_mode=raw_mode) + patterns_dict[name.decode('ASCII')] = pat found_struct = records.BGNSTR.skip_past(stream) - # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries - # according to the subpattern.identifier (which is deleted after use). - patterns_dict = dict(((p.name, p) for p in patterns)) - for p in patterns_dict.values(): - for sp in p.subpatterns: - sp.pattern = patterns_dict[sp.identifier[0]] - del sp.identifier - return patterns_dict, library_info @@ -250,7 +242,6 @@ def _read_header(stream: BinaryIO) -> Dict[str, Any]: def read_elements( stream: BinaryIO, - name: str, raw_mode: bool = True, ) -> Pattern: """ @@ -265,7 +256,7 @@ def read_elements( Returns: A pattern containing the elements that were read. """ - pat = Pattern(name) + pat = Pattern() elements = klamath.library.read_elements(stream) for element in elements: @@ -276,10 +267,12 @@ def read_elements( path = _gpath_to_mpath(element, raw_mode) pat.shapes.append(path) elif isinstance(element, klamath.elements.Text): - label = Label(offset=element.xy.astype(float), - layer=element.layer, - string=element.string.decode('ASCII'), - annotations=_properties_to_annotations(element.properties)) + label = Label( + offset=element.xy.astype(float), + layer=element.layer, + string=element.string.decode('ASCII'), + annotations=_properties_to_annotations(element.properties), + ) pat.labels.append(label) elif isinstance(element, klamath.elements.Reference): pat.subpatterns.append(_ref_to_subpat(element)) @@ -304,8 +297,7 @@ def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]: def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern: """ - Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None - and sets the instance .identifier to (struct_name,). + Helper function to create a SubPattern from an SREF or AREF. Sets subpat.target to struct_name. """ xy = ref.xy.astype(float) offset = xy[0] @@ -317,14 +309,15 @@ def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern: repetition = Grid(a_vector=a_vector, b_vector=b_vector, a_count=a_count, b_count=b_count) - subpat = SubPattern(pattern=None, - offset=offset, - rotation=numpy.deg2rad(ref.angle_deg), - scale=ref.mag, - mirrored=(ref.invert_y, False), - annotations=_properties_to_annotations(ref.properties), - repetition=repetition) - subpat.identifier = (ref.struct_name.decode('ASCII'),) + subpat = SubPattern( + pattern=ref.struct_name.decode('ASCII'), + offset=offset, + rotation=numpy.deg2rad(ref.angle_deg), + scale=ref.mag, + mirrored=(ref.invert_y, False), + annotations=_properties_to_annotations(ref.properties), + repetition=repetition, + ) return subpat @@ -334,34 +327,36 @@ def _gpath_to_mpath(gpath: klamath.library.Path, raw_mode: bool) -> Path: else: raise PatternError(f'Unrecognized path type: {gpath.path_type}') - mpath = Path(vertices=gpath.xy.astype(float), - layer=gpath.layer, - width=gpath.width, - cap=cap, - offset=numpy.zeros(2), - annotations=_properties_to_annotations(gpath.properties), - raw=raw_mode, - ) + mpath = Path( + vertices=gpath.xy.astype(float), + layer=gpath.layer, + width=gpath.width, + cap=cap, + offset=numpy.zeros(2), + annotations=_properties_to_annotations(gpath.properties), + raw=raw_mode, + ) if cap == Path.Cap.SquareCustom: mpath.cap_extensions = gpath.extension return mpath def _boundary_to_polygon(boundary: klamath.library.Boundary, raw_mode: bool) -> Polygon: - return Polygon(vertices=boundary.xy[:-1].astype(float), - layer=boundary.layer, - offset=numpy.zeros(2), - annotations=_properties_to_annotations(boundary.properties), - raw=raw_mode, - ) + return Polygon( + vertices=boundary.xy[:-1].astype(float), + layer=boundary.layer, + offset=numpy.zeros(2), + annotations=_properties_to_annotations(boundary.properties), + raw=raw_mode, + ) def _subpatterns_to_refs(subpatterns: List[SubPattern]) -> List[klamath.library.Reference]: refs = [] for subpat in subpatterns: - if subpat.pattern is None: + if subpat.target is None: continue - encoded_name = subpat.pattern.name.encode('ASCII') + encoded_name = subpat.target.encode('ASCII') # Note: GDS mirrors first and rotates second mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) @@ -377,32 +372,39 @@ def _subpatterns_to_refs(subpatterns: List[SubPattern]) -> List[klamath.library. rep.a_vector * rep.a_count, b_vector * b_count, ] - aref = klamath.library.Reference(struct_name=encoded_name, - xy=numpy.round(xy).astype(int), - colrow=(numpy.round(rep.a_count), numpy.round(rep.b_count)), - angle_deg=angle_deg, - invert_y=mirror_across_x, - mag=subpat.scale, - properties=properties) + aref = klamath.library.Reference( + struct_name=encoded_name, + xy=rint_cast(xy), + colrow=(numpy.rint(rep.a_count), numpy.rint(rep.b_count)), + angle_deg=angle_deg, + invert_y=mirror_across_x, + mag=subpat.scale, + properties=properties, + ) refs.append(aref) elif rep is None: - ref = klamath.library.Reference(struct_name=encoded_name, - xy=numpy.round([subpat.offset]).astype(int), - colrow=None, - angle_deg=angle_deg, - invert_y=mirror_across_x, - mag=subpat.scale, - properties=properties) + ref = klamath.library.Reference( + struct_name=encoded_name, + xy=rint_cast([subpat.offset]), + colrow=None, + angle_deg=angle_deg, + invert_y=mirror_across_x, + mag=subpat.scale, + properties=properties, + ) refs.append(ref) else: - new_srefs = [klamath.library.Reference(struct_name=encoded_name, - xy=numpy.round([subpat.offset + dd]).astype(int), - colrow=None, - angle_deg=angle_deg, - invert_y=mirror_across_x, - mag=subpat.scale, - properties=properties) - for dd in rep.displacements] + new_srefs = [ + klamath.library.Reference( + struct_name=encoded_name, + xy=rint_cast([subpat.offset + dd]), + colrow=None, + angle_deg=angle_deg, + invert_y=mirror_across_x, + mag=subpat.scale, + properties=properties, + ) + for dd in rep.displacements] refs += new_srefs return refs @@ -443,8 +445,8 @@ def _shapes_to_elements( layer, data_type = _mlayer2gds(shape.layer) properties = _annotations_to_properties(shape.annotations, 128) if isinstance(shape, Path) and not polygonize_paths: - xy = numpy.round(shape.vertices + shape.offset).astype(int) - width = numpy.round(shape.width).astype(int) + xy = rint_cast(shape.vertices + shape.offset) + width = rint_cast(shape.width) path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup extension: Tuple[int, int] @@ -453,30 +455,36 @@ def _shapes_to_elements( else: extension = (0, 0) - path = klamath.elements.Path(layer=(layer, data_type), - xy=xy, - path_type=path_type, - width=width, - extension=extension, - properties=properties) + path = klamath.elements.Path( + layer=(layer, data_type), + xy=xy, + path_type=path_type, + width=width, + extension=extension, + properties=properties, + ) elements.append(path) elif isinstance(shape, Polygon): polygon = shape xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32) numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe') xy_closed[-1] = xy_closed[0] - boundary = klamath.elements.Boundary(layer=(layer, data_type), - xy=xy_closed, - properties=properties) + boundary = klamath.elements.Boundary( + layer=(layer, data_type), + xy=xy_closed, + properties=properties, + ) elements.append(boundary) else: for polygon in shape.to_polygons(): xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32) numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe') xy_closed[-1] = xy_closed[0] - boundary = klamath.elements.Boundary(layer=(layer, data_type), - xy=xy_closed, - properties=properties) + boundary = klamath.elements.Boundary( + layer=(layer, data_type), + xy=xy_closed, + properties=properties, + ) elements.append(boundary) return elements @@ -486,46 +494,44 @@ def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]: for label in labels: properties = _annotations_to_properties(label.annotations, 128) layer, text_type = _mlayer2gds(label.layer) - xy = numpy.round([label.offset]).astype(int) - text = klamath.elements.Text(layer=(layer, text_type), - xy=xy, - string=label.string.encode('ASCII'), - properties=properties, - presentation=0, # TODO maybe set some of these? - angle_deg=0, - invert_y=False, - width=0, - path_type=0, - mag=1) + xy = rint_cast([label.offset]) + text = klamath.elements.Text( + layer=(layer, text_type), + xy=xy, + string=label.string.encode('ASCII'), + properties=properties, + presentation=0, # TODO maybe set some of these? + angle_deg=0, + invert_y=False, + width=0, + path_type=0, + mag=1, + ) texts.append(text) return texts def disambiguate_pattern_names( - patterns: Sequence[Pattern], + names: Iterable[str], max_name_length: int = 32, suffix_length: int = 6, - dup_warn_filter: Optional[Callable[[str], bool]] = None, - ) -> None: + ) -> List[str]: """ Args: - patterns: List of patterns to disambiguate + names: List of pattern names to disambiguate max_name_length: Names longer than this will be truncated suffix_length: Names which get truncated are truncated by this many extra characters. This is to leave room for a suffix if one is necessary. - dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives - the cell name and returns `False` if the warning should be suppressed and `True` if it should - be displayed. Default displays all warnings. """ - used_names = [] - for pat in set(patterns): + new_names = [] + for name in names: # Shorten names which already exceed max-length - if len(pat.name) > max_name_length: - shortened_name = pat.name[:max_name_length - suffix_length] - logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n' + if len(name) > max_name_length: + shortened_name = name[:max_name_length - suffix_length] + logger.warning(f'Pattern name "{name}" is too long ({len(name)}/{max_name_length} chars),\n' + f' shortening to "{shortened_name}" before generating suffix') else: - shortened_name = pat.name + shortened_name = name # Remove invalid characters sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name) @@ -533,7 +539,7 @@ def disambiguate_pattern_names( # Add a suffix that makes the name unique i = 0 suffixed_name = sanitized_name - while suffixed_name in used_names or suffixed_name == '': + while suffixed_name in new_names or suffixed_name == '': suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII') suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A') @@ -542,27 +548,25 @@ def disambiguate_pattern_names( if sanitized_name == '': logger.warning(f'Empty pattern name saved as "{suffixed_name}"') elif suffixed_name != sanitized_name: - if dup_warn_filter is None or dup_warn_filter(pat.name): - logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + if dup_warn_filter is None or dup_warn_filter(name): + logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n' + f' renaming to "{suffixed_name}"') # Encode into a byte-string and perform some final checks encoded_name = suffixed_name.encode('ASCII') if len(encoded_name) == 0: # Should never happen since zero-length names are replaced - raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"') + raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{name}"') if len(encoded_name) > max_name_length: raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n' - + f' originally "{pat.name}"') + + f' originally "{name}"') - pat.name = suffixed_name - used_names.append(suffixed_name) + new_names.append(suffixed_name) + return new_names def load_library( stream: BinaryIO, - tag: str, - is_secondary: Optional[Callable[[str], bool]] = None, *, full_load: bool = False, ) -> Tuple[Library, Dict[str, Any]]: @@ -574,28 +578,17 @@ def load_library( Args: stream: Seekable stream. Position 0 should be the start of the file. - The caller should leave the stream open while the library - is still in use, since the library will need to access it - in order to read the structure contents. - tag: Unique identifier that will be used to identify this data source - is_secondary: Function which takes a structure name and returns - True if the structure should only be used as a subcell - and not appear in the main Library interface. - Default always returns False. + The caller should leave the stream open while the library + is still in use, since the library will need to access it + in order to read the structure contents. full_load: If True, force all structures to be read immediately rather - than as-needed. Since data is read sequentially from the file, - this will be faster than using the resulting library's - `precache` method. + than as-needed. Since data is read sequentially from the file, this + will be faster than using the resulting library's `precache` method. Returns: Library object, allowing for deferred load of structures. Additional library info (dict, same format as from `read`). """ - if is_secondary is None: - def is_secondary(k: str) -> bool: - return False - assert(is_secondary is not None) - stream.seek(0) lib = Library() @@ -603,7 +596,7 @@ def load_library( # Full load approach (immediately load everything) patterns, library_info = read(stream) for name, pattern in patterns.items(): - lib.set_const(name, tag, pattern, secondary=is_secondary(name)) + lib[name] = lambda: pattern return lib, library_info # Normal approach (scan and defer load) @@ -613,19 +606,17 @@ def load_library( for name_bytes, pos in structs.items(): name = name_bytes.decode('ASCII') - def mkstruct(pos: int = pos, name: str = name) -> Pattern: + def mkstruct(pos: int = pos) -> Pattern: stream.seek(pos) - return read_elements(stream, name, raw_mode=True) + return read_elements(stream, raw_mode=True) - lib.set_value(name, tag, mkstruct, secondary=is_secondary(name)) + lib[name] = mkstruct return lib, library_info def load_libraryfile( filename: Union[str, pathlib.Path], - tag: str, - is_secondary: Optional[Callable[[str], bool]] = None, *, use_mmap: bool = True, full_load: bool = False, @@ -640,8 +631,6 @@ def load_libraryfile( Args: path: filename or path to read from - tag: Unique identifier for library, see `load_library` - is_secondary: Function specifying subcess, see `load_library` use_mmap: If `True`, will attempt to memory-map the file instead of buffering. In the case of gzipped files, the file is decompressed into a python `bytes` object in memory @@ -667,4 +656,4 @@ def load_libraryfile( stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ) else: stream = io.BufferedReader(base_stream) - return load_library(stream, tag, is_secondary) + return load_library(stream, full_load=full_load) diff --git a/masque/file/oasis.py b/masque/file/oasis.py index 9917be9..e26c203 100644 --- a/masque/file/oasis.py +++ b/masque/file/oasis.py @@ -11,7 +11,7 @@ Note that OASIS references follow the same convention as `masque`, Scaling, rotation, and mirroring apply to individual instances, not grid vectors or offsets. """ -from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional +from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Mapping, Optional import re import io import copy @@ -22,11 +22,12 @@ import pathlib import gzip import numpy +from numpy.typing import ArrayLike, NDArray import fatamorgana import fatamorgana.records as fatrec from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference -from .utils import clean_pattern_vertices, is_gzipped +from .utils import is_gzipped from .. import Pattern, SubPattern, PatternError, Label, Shape from ..shapes import Polygon, Path, Circle from ..repetition import Grid, Arbitrary, Repetition @@ -47,19 +48,22 @@ path_cap_map = { #TODO implement more shape types? +def rint_cast(val: ArrayLike) -> NDArray[numpy.int64]: + return numpy.rint(val, dtype=numpy.int64, casting='unsafe') + + def build( - patterns: Union[Pattern, Sequence[Pattern]], + library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable! units_per_micron: int, layer_map: Optional[Dict[str, Union[int, Tuple[int, int]]]] = None, *, - modify_originals: bool = False, - disambiguate_func: Optional[Callable[[Iterable[Pattern]], None]] = None, + disambiguate_func: Optional[Callable[[Iterable[str]], List[str]]] = None, annotations: Optional[annotations_t] = None, ) -> fatamorgana.OasisLayout: """ - Convert a `Pattern` or list of patterns to an OASIS stream, writing patterns - as OASIS cells, subpatterns as Placement records, and other shapes and labels - mapped to equivalent record types (Polygon, Path, Circle, Text). + Convert a collection of {name: Pattern} pairs to an OASIS stream, writing patterns + as OASIS cells, subpatterns as Placement records, and mapping other shapes and labels + to equivalent record types (Polygon, Path, Circle, Text). Other shape types may be converted to polygons if no equivalent record type exists (or is not implemented here yet). @@ -75,7 +79,7 @@ def build( prior to calling this function. Args: - patterns: A Pattern or list of patterns to convert. + library: A {name: Pattern} mapping of patterns to write. units_per_micron: Written into the OASIS file, number of grid steps per micrometer. All distances are assumed to be an integer multiple of the grid step, and are stored as such. layer_map: Dictionary which translates layer names into layer numbers. If this argument is @@ -86,11 +90,8 @@ def build( into numbers, omit this argument, and manually generate the required `fatamorgana.records.LayerName` entries. Default is an empty dict (no names provided). - modify_originals: If `True`, the original pattern is modified as part of the writing - process. Otherwise, a copy is made and `deepunlock()`-ed. - Default `False`. - disambiguate_func: Function which takes a list of patterns and alters them - to make their names valid and unique. Default is `disambiguate_pattern_names`. + disambiguate_func: Function which takes a list of pattern names and returns a list of names + altered to be valid and unique. Default is `disambiguate_pattern_names`. annotations: dictionary of key-value pairs which are saved as library-level properties Returns: @@ -108,9 +109,6 @@ def build( if annotations is None: annotations = {} - if not modify_originals: - patterns = [p.deepunlock() for p in copy.deepcopy(patterns)] - # Create library lib = fatamorgana.OasisLayout(unit=units_per_micron, validation=None) lib.properties = annotations_to_properties(annotations) @@ -119,10 +117,12 @@ def build( for name, layer_num in layer_map.items(): layer, data_type = _mlayer2oas(layer_num) lib.layers += [ - fatrec.LayerName(nstring=name, - layer_interval=(layer, layer), - type_interval=(data_type, data_type), - is_textlayer=tt) + fatrec.LayerName( + nstring=name, + layer_interval=(layer, layer), + type_interval=(data_type, data_type), + is_textlayer=tt, + ) for tt in (True, False)] def layer2oas(mlayer: layer_t) -> Tuple[int, int]: @@ -132,17 +132,14 @@ def build( else: layer2oas = _mlayer2oas - # Get a dict of id(pattern) -> pattern - patterns_by_id = {id(pattern): pattern for pattern in patterns} - for pattern in patterns: - for i, p in pattern.referenced_patterns_by_id().items(): - patterns_by_id[i] = p - - disambiguate_func(patterns_by_id.values()) + old_names = list(library.keys()) + new_names = disambiguate_func(old_names) + renamed_lib = {new_name: library[old_name] + for old_name, new_name in zip(old_names, new_names)} # Now create a structure for each pattern - for pat in patterns_by_id.values(): - structure = fatamorgana.Cell(name=pat.name) + for name, pat in renamed_lib.items(): + structure = fatamorgana.Cell(name=name) lib.cells.append(structure) structure.properties += annotations_to_properties(pat.annotations) @@ -229,7 +226,6 @@ def readfile( def read( stream: io.BufferedIOBase, - clean_vertices: bool = True, ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: """ Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are @@ -243,9 +239,6 @@ def read( Args: stream: Stream to read from. - clean_vertices: If `True`, remove any redundant vertices when loading polygons. - The cleaning process removes any polygons with zero area or <3 vertices. - Default `True`. Returns: - Dict of `pattern_name`:`Pattern`s generated from OASIS cells @@ -264,14 +257,14 @@ def read( layer_map[str(layer_name.nstring)] = layer_name library_info['layer_map'] = layer_map - patterns = [] + patterns_dict = {} for cell in lib.cells: if isinstance(cell.name, int): cell_name = lib.cellnames[cell.name].nstring.string else: cell_name = cell.name.string - pat = Pattern(name=cell_name) + pat = Pattern() for element in cell.geometry: if isinstance(element, fatrec.XElement): logger.warning('Skipping XElement record') @@ -453,19 +446,7 @@ def read( for placement in cell.placements: pat.subpatterns.append(_placement_to_subpat(placement, lib)) - if clean_vertices: - clean_pattern_vertices(pat) - patterns.append(pat) - - # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries - # according to the subpattern.identifier (which is deleted after use). - patterns_dict = dict(((p.name, p) for p in patterns)) - for p in patterns_dict.values(): - for sp in p.subpatterns: - ident = sp.identifier[0] - name = ident if isinstance(ident, str) else lib.cellnames[ident].nstring.string - sp.pattern = patterns_dict[name] - del sp.identifier + patterns_dict[name] = pat return patterns_dict, library_info @@ -489,8 +470,7 @@ def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]: def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayout) -> SubPattern: """ - Helper function to create a SubPattern from a placment. Sets subpat.pattern to None - and sets the instance .identifier to (struct_name,). + Helper function to create a SubPattern from a placment. Sets subpat.target to the placemen name. """ assert(not isinstance(placement.repetition, fatamorgana.ReuseRepetition)) xy = numpy.array((placement.x, placement.y)) @@ -502,14 +482,15 @@ def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayo rotation = 0 else: rotation = numpy.deg2rad(float(placement.angle)) - subpat = SubPattern(offset=xy, - pattern=None, - mirrored=(placement.flip, False), - rotation=rotation, - scale=float(mag), - identifier=(name,), - repetition=repetition_fata2masq(placement.repetition), - annotations=annotations) + subpat = SubPattern( + target=name, + offset=xy, + mirrored=(placement.flip, False), + rotation=rotation, + scale=float(mag), + repetition=repetition_fata2masq(placement.repetition), + annotations=annotations, + ) return subpat @@ -518,17 +499,17 @@ def _subpatterns_to_placements( ) -> List[fatrec.Placement]: refs = [] for subpat in subpatterns: - if subpat.pattern is None: + if subpat.target is None: continue # Note: OASIS mirrors first and rotates second mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) frep, rep_offset = repetition_masq2fata(subpat.repetition) - offset = numpy.round(subpat.offset + rep_offset).astype(int) + offset = rint_cast(subpat.offset + rep_offset) angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360 ref = fatrec.Placement( - name=subpat.pattern.name, + name=subpat.target, flip=mirror_across_x, angle=angle, magnification=subpat.scale, @@ -552,46 +533,51 @@ def _shapes_to_elements( repetition, rep_offset = repetition_masq2fata(shape.repetition) properties = annotations_to_properties(shape.annotations) if isinstance(shape, Circle): - offset = numpy.round(shape.offset + rep_offset).astype(int) - radius = numpy.round(shape.radius).astype(int) - circle = fatrec.Circle(layer=layer, - datatype=datatype, - radius=radius, - x=offset[0], - y=offset[1], - properties=properties, - repetition=repetition) + offset = rint_cast(shape.offset + rep_offset) + radius = rint_cast(shape.radius) + circle = fatrec.Circle( + layer=layer, + datatype=datatype, + radius=radius, + x=offset[0], + y=offset[1], + properties=properties, + repetition=repetition, + ) elements.append(circle) elif isinstance(shape, Path): - xy = numpy.round(shape.offset + shape.vertices[0] + rep_offset).astype(int) - deltas = numpy.round(numpy.diff(shape.vertices, axis=0)).astype(int) - half_width = numpy.round(shape.width / 2).astype(int) + xy = rint_cast(shape.offset + shape.vertices[0] + rep_offset) + deltas = rint_cast(numpy.diff(shape.vertices, axis=0)) + half_width = rint_cast(shape.width / 2) path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup extension_start = (path_type, shape.cap_extensions[0] if shape.cap_extensions is not None else None) extension_end = (path_type, shape.cap_extensions[1] if shape.cap_extensions is not None else None) - path = fatrec.Path(layer=layer, - datatype=datatype, - point_list=deltas, - half_width=half_width, - x=xy[0], - y=xy[1], - extension_start=extension_start, # TODO implement multiple cap types? - extension_end=extension_end, - properties=properties, - repetition=repetition, - ) + path = fatrec.Path( + layer=layer, + datatype=datatype, + point_list=deltas, + half_width=half_width, + x=xy[0], + y=xy[1], + extension_start=extension_start, # TODO implement multiple cap types? + extension_end=extension_end, + properties=properties, + repetition=repetition, + ) elements.append(path) else: for polygon in shape.to_polygons(): - xy = numpy.round(polygon.offset + polygon.vertices[0] + rep_offset).astype(int) - points = numpy.round(numpy.diff(polygon.vertices, axis=0)).astype(int) - elements.append(fatrec.Polygon(layer=layer, - datatype=datatype, - x=xy[0], - y=xy[1], - point_list=points, - properties=properties, - repetition=repetition)) + xy = rint_cast(polygon.offset + polygon.vertices[0] + rep_offset) + points = rint_cast(numpy.diff(polygon.vertices, axis=0)) + elements.append(fatrec.Polygon( + layer=layer, + datatype=datatype, + x=xy[0], + y=xy[1], + point_list=points, + properties=properties, + repetition=repetition, + )) return elements @@ -603,29 +589,31 @@ def _labels_to_texts( for label in labels: layer, datatype = layer2oas(label.layer) repetition, rep_offset = repetition_masq2fata(label.repetition) - xy = numpy.round(label.offset + rep_offset).astype(int) + xy = rint_cast(label.offset + rep_offset) properties = annotations_to_properties(label.annotations) - texts.append(fatrec.Text(layer=layer, - datatype=datatype, - x=xy[0], - y=xy[1], - string=label.string, - properties=properties, - repetition=repetition)) + texts.append(fatrec.Text( + layer=layer, + datatype=datatype, + x=xy[0], + y=xy[1], + string=label.string, + properties=properties, + repetition=repetition, + )) return texts def disambiguate_pattern_names( - patterns, + names: Iterable[str], dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name - ) -> None: - used_names = [] - for pat in patterns: - sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name) + ) -> List[str]: + new_names = [] + for name in names: + sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', name) i = 0 suffixed_name = sanitized_name - while suffixed_name in used_names or suffixed_name == '': + while suffixed_name in new_names or suffixed_name == '': suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII') suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A') @@ -634,16 +622,16 @@ def disambiguate_pattern_names( if sanitized_name == '': logger.warning(f'Empty pattern name saved as "{suffixed_name}"') elif suffixed_name != sanitized_name: - if dup_warn_filter is None or dup_warn_filter(pat.name): - logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + if dup_warn_filter is None or dup_warn_filter(name): + logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n' + f' renaming to "{suffixed_name}"') if len(suffixed_name) == 0: # Should never happen since zero-length names are replaced - raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"') + raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{name}"') - pat.name = suffixed_name - used_names.append(suffixed_name) + new_names.append(suffixed_name) + return new_names def repetition_fata2masq( diff --git a/masque/file/python_gdsii.py b/masque/file/python_gdsii.py index 6b89abc..303e98c 100644 --- a/masque/file/python_gdsii.py +++ b/masque/file/python_gdsii.py @@ -18,7 +18,7 @@ Notes: * GDS does not support library- or structure-level annotations """ from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional -from typing import Sequence +from typing import Sequence, Mapping import re import io import copy @@ -59,13 +59,13 @@ def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]: def build( - patterns: Union[Pattern, Sequence[Pattern]], + library: Mapping[str, Pattern], meters_per_unit: float, logical_units_per_unit: float = 1, library_name: str = 'masque-gdsii-write', *, modify_originals: bool = False, - disambiguate_func: Callable[[Iterable[Pattern]], None] = None, + disambiguate_func: Callable[[Iterable[str]], List[str]] = None, ) -> gdsii.library.Library: """ Convert a `Pattern` or list of patterns to a GDSII stream, by first calling @@ -86,7 +86,7 @@ def build( prior to calling this function. Args: - patterns: A Pattern or list of patterns to convert. + library: A {name: Pattern} mapping of patterns to write. meters_per_unit: Written into the GDSII file, meters per (database) length unit. All distances are assumed to be an integer multiple of this unit, and are stored as such. logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a @@ -95,27 +95,29 @@ def build( library_name: Library name written into the GDSII file. Default 'masque-gdsii-write'. modify_originals: If `True`, the original pattern is modified as part of the writing - process. Otherwise, a copy is made and `deepunlock()`-ed. + process. Otherwise, a copy is made. Default `False`. - disambiguate_func: Function which takes a list of patterns and alters them - to make their names valid and unique. Default is `disambiguate_pattern_names`, which - attempts to adhere to the GDSII standard as well as possible. + disambiguate_func: Function which takes a list of pattern names and returns a list of names + altered to be valid and unique. Default is `disambiguate_pattern_names`, which + attempts to adhere to the GDSII standard reasonably well. WARNING: No additional error checking is performed on the results. Returns: `gdsii.library.Library` """ - if isinstance(patterns, Pattern): - patterns = [patterns] - if disambiguate_func is None: - disambiguate_func = disambiguate_pattern_names # type: ignore - assert(disambiguate_func is not None) # placate mypy + disambiguate_func = disambiguate_pattern_names if not modify_originals: - patterns = [p.deepunlock() for p in copy.deepcopy(patterns)] + library = copy.deepcopy(library) - patterns = [p.wrap_repeated_shapes() for p in patterns] + for p in library.values(): + library.add(p.wrap_repeated_shapes()) + + old_names = list(library.keys()) + new_names = disambiguate_func(old_names) + renamed_lib = {new_name: library[old_name] + for old_name, new_name in zip(old_names, new_names)} # Create library lib = gdsii.library.Library(version=600, @@ -123,17 +125,9 @@ def build( logical_unit=logical_units_per_unit, physical_unit=meters_per_unit) - # Get a dict of id(pattern) -> pattern - patterns_by_id = {id(pattern): pattern for pattern in patterns} - for pattern in patterns: - for i, p in pattern.referenced_patterns_by_id().items(): - patterns_by_id[i] = p - - disambiguate_func(patterns_by_id.values()) - # Now create a structure for each pattern, and add in any Boundary and SREF elements - for pat in patterns_by_id.values(): - structure = gdsii.structure.Structure(name=pat.name.encode('ASCII')) + for name, pat in renamed_lib.items(): + structure = gdsii.structure.Structure(name=name.encode('ASCII')) lib.append(structure) structure += _shapes_to_elements(pat.shapes) @@ -144,7 +138,7 @@ def build( def write( - patterns: Union[Pattern, Sequence[Pattern]], + library: Mapping[str, Pattern], stream: io.BufferedIOBase, *args, **kwargs, @@ -154,31 +148,31 @@ def write( See `masque.file.gdsii.build()` for details. Args: - patterns: A Pattern or list of patterns to write to file. + library: A {name: Pattern} mapping of patterns to write. stream: Stream to write to. *args: passed to `masque.file.gdsii.build()` **kwargs: passed to `masque.file.gdsii.build()` """ - lib = build(patterns, *args, **kwargs) + lib = build(library, *args, **kwargs) lib.save(stream) return def writefile( - patterns: Union[Sequence[Pattern], Pattern], + library: Mapping[str, Pattern], filename: Union[str, pathlib.Path], *args, **kwargs, ) -> None: """ - Wrapper for `masque.file.gdsii.write()` that takes a filename or path instead of a stream. + Wrapper for `write()` that takes a filename or path instead of a stream. Will automatically compress the file if it has a .gz suffix. Args: - patterns: `Pattern` or list of patterns to save + library: {name: Pattern} pairs to save. filename: Filename to save to. - *args: passed to `masque.file.gdsii.write` - **kwargs: passed to `masque.file.gdsii.write` + *args: passed to `write()` + **kwargs: passed to `write()` """ path = pathlib.Path(filename) if path.suffix == '.gz': @@ -196,14 +190,14 @@ def readfile( **kwargs, ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: """ - Wrapper for `masque.file.gdsii.read()` that takes a filename or path instead of a stream. + Wrapper for `read()` that takes a filename or path instead of a stream. Will automatically decompress gzipped files. Args: filename: Filename to save to. - *args: passed to `masque.file.gdsii.read` - **kwargs: passed to `masque.file.gdsii.read` + *args: passed to `read()` + **kwargs: passed to `read()` """ path = pathlib.Path(filename) if is_gzipped(path): @@ -251,9 +245,10 @@ def read( raw_mode = True # Whether to construct shapes in raw mode (less error checking) - patterns = [] + patterns_dict = {} for structure in lib: - pat = Pattern(name=structure.name.decode('ASCII')) + pat = Pattern() + name=structure.name.decode('ASCII') for element in structure: # Switch based on element type: if isinstance(element, gdsii.elements.Boundary): @@ -275,15 +270,7 @@ def read( if clean_vertices: clean_pattern_vertices(pat) - patterns.append(pat) - - # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries - # according to the subpattern.identifier (which is deleted after use). - patterns_dict = dict(((p.name, p) for p in patterns)) - for p in patterns_dict.values(): - for sp in p.subpatterns: - sp.pattern = patterns_dict[sp.identifier[0].decode('ASCII')] - del sp.identifier + patterns_dict[name] = pat return patterns_dict, library_info @@ -309,8 +296,7 @@ def _ref_to_subpat( gdsii.elements.ARef] ) -> SubPattern: """ - Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None - and sets the instance .identifier to (struct_name,). + Helper function to create a SubPattern from an SREF or AREF. Sets subpat.target to struct_name. NOTE: "Absolute" means not affected by parent elements. That's not currently supported by masque at all (and not planned). @@ -351,7 +337,6 @@ def _ref_to_subpat( mirrored=(mirror_across_x, False), annotations=_properties_to_annotations(element.properties), repetition=repetition) - subpat.identifier = (element.struct_name,) return subpat @@ -395,9 +380,9 @@ def _subpatterns_to_refs( ) -> List[Union[gdsii.elements.ARef, gdsii.elements.SRef]]: refs = [] for subpat in subpatterns: - if subpat.pattern is None: + if subpat.target is None: continue - encoded_name = subpat.pattern.name.encode('ASCII') + encoded_name = subpat.target.encode('ASCII') # Note: GDS mirrors first and rotates second mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) @@ -523,14 +508,14 @@ def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]: def disambiguate_pattern_names( - patterns: Sequence[Pattern], + names: Iterable[str], max_name_length: int = 32, suffix_length: int = 6, dup_warn_filter: Optional[Callable[[str], bool]] = None, - ) -> None: + ) -> List[str]: """ Args: - patterns: List of patterns to disambiguate + names: List of pattern names to disambiguate max_name_length: Names longer than this will be truncated suffix_length: Names which get truncated are truncated by this many extra characters. This is to leave room for a suffix if one is necessary. @@ -538,15 +523,15 @@ def disambiguate_pattern_names( the cell name and returns `False` if the warning should be suppressed and `True` if it should be displayed. Default displays all warnings. """ - used_names = [] - for pat in set(patterns): + new_names = [] + for name in names: # Shorten names which already exceed max-length - if len(pat.name) > max_name_length: - shortened_name = pat.name[:max_name_length - suffix_length] - logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n' + if len(name) > max_name_length: + shortened_name = name[:max_name_length - suffix_length] + logger.warning(f'Pattern name "{name}" is too long ({len(name)}/{max_name_length} chars),\n' + f' shortening to "{shortened_name}" before generating suffix') else: - shortened_name = pat.name + shortened_name = name # Remove invalid characters sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name) @@ -554,7 +539,7 @@ def disambiguate_pattern_names( # Add a suffix that makes the name unique i = 0 suffixed_name = sanitized_name - while suffixed_name in used_names or suffixed_name == '': + while suffixed_name in new_names or suffixed_name == '': suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII') suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A') @@ -563,18 +548,19 @@ def disambiguate_pattern_names( if sanitized_name == '': logger.warning(f'Empty pattern name saved as "{suffixed_name}"') elif suffixed_name != sanitized_name: - if dup_warn_filter is None or dup_warn_filter(pat.name): - logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + if dup_warn_filter is None or dup_warn_filter(name): + logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n' + f' renaming to "{suffixed_name}"') # Encode into a byte-string and perform some final checks encoded_name = suffixed_name.encode('ASCII') if len(encoded_name) == 0: # Should never happen since zero-length names are replaced - raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"') + raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{name}"') if len(encoded_name) > max_name_length: raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n' - + f' originally "{pat.name}"') + + f' originally "{name}"') + + new_names.append(suffixed_name) + return new_names - pat.name = suffixed_name - used_names.append(suffixed_name) diff --git a/masque/file/svg.py b/masque/file/svg.py index 58c9c6a..e6912f9 100644 --- a/masque/file/svg.py +++ b/masque/file/svg.py @@ -1,7 +1,7 @@ """ SVG file format readers and writers """ -from typing import Dict, Optional +from typing import Dict, Optional, Mapping import warnings import numpy @@ -13,7 +13,8 @@ from .. import Pattern def writefile( - pattern: Pattern, + library: Mapping[str, Pattern], + top: str, filename: str, custom_attributes: bool = False, ) -> None: @@ -41,11 +42,12 @@ def writefile( custom_attributes: Whether to write non-standard `pattern_layer` and `pattern_dose` attributes to the SVG elements. """ + pattern = library[top] # Polygonize pattern pattern.polygonize() - bounds = pattern.get_bounds() + bounds = pattern.get_bounds(library=library) if bounds is None: bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]]) warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox') @@ -59,15 +61,10 @@ def writefile( svg = svgwrite.Drawing(filename, profile='full', viewBox=viewbox_string, debug=(not custom_attributes)) - # Get a dict of id(pattern) -> pattern - patterns_by_id = {**(pattern.referenced_patterns_by_id()), id(pattern): pattern} # type: Dict[int, Optional[Pattern]] - # Now create a group for each row in sd_table (ie, each pattern + dose combination) # and add in any Boundary and Use elements - for pat in patterns_by_id.values(): - if pat is None: - continue - svg_group = svg.g(id=mangle_name(pat), fill='blue', stroke='red') + for name, pat in library.items(): + svg_group = svg.g(id=mangle_name(name), fill='blue', stroke='red') for shape in pat.shapes: for polygon in shape.to_polygons(): @@ -81,20 +78,24 @@ def writefile( svg_group.add(path) for subpat in pat.subpatterns: - if subpat.pattern is None: + if subpat.target is None: continue transform = f'scale({subpat.scale:g}) rotate({subpat.rotation:g}) translate({subpat.offset[0]:g},{subpat.offset[1]:g})' - use = svg.use(href='#' + mangle_name(subpat.pattern), transform=transform) + use = svg.use(href='#' + mangle_name(subpat.target), transform=transform) if custom_attributes: use['pattern_dose'] = subpat.dose svg_group.add(use) svg.defs.add(svg_group) - svg.add(svg.use(href='#' + mangle_name(pattern))) + svg.add(svg.use(href='#' + mangle_name(top))) svg.save() -def writefile_inverted(pattern: Pattern, filename: str): +def writefile_inverted( + library: Mapping[str, Pattern], + top: str, + filename: str, + ) -> None: """ Write an inverted Pattern to an SVG file, by first calling `.polygonize()` and `.flatten()` on it to change the shapes into polygons, then drawing a bounding @@ -110,10 +111,12 @@ def writefile_inverted(pattern: Pattern, filename: str): pattern: Pattern to write to file. Modified by this function. filename: Filename to write to. """ + pattern = library[top] + # Polygonize and flatten pattern pattern.polygonize().flatten() - bounds = pattern.get_bounds() + bounds = pattern.get_bounds(library=library) if bounds is None: bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]]) warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox') diff --git a/masque/file/utils.py b/masque/file/utils.py index 47e8b7d..2f75be4 100644 --- a/masque/file/utils.py +++ b/masque/file/utils.py @@ -1,7 +1,7 @@ """ Helper functions for file reading and writing """ -from typing import Set, Tuple, List +from typing import Set, Tuple, List, Iterable, Mapping import re import copy import pathlib @@ -10,19 +10,22 @@ from .. import Pattern, PatternError from ..shapes import Polygon, Path -def mangle_name(pattern: Pattern, dose_multiplier: float = 1.0) -> str: +def mangle_name(name: str, dose_multiplier: float = 1.0) -> str: """ - Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier. + Create a new name using `name` and the `dose_multiplier`. Args: - pattern: Pattern whose name we want to mangle. + name: Name we want to mangle. dose_multiplier: Dose multiplier to mangle with. Returns: Mangled name. """ + if dose_multiplier == 1: + full_name = name + else: + full_name = f'{name}_dm{dose_multiplier}' expression = re.compile(r'[^A-Za-z0-9_\?\$]') - full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern)) sanitized_name = expression.sub('_', full_name) return sanitized_name @@ -51,25 +54,30 @@ def clean_pattern_vertices(pat: Pattern) -> Pattern: return pat -def make_dose_table(patterns: List[Pattern], dose_multiplier: float = 1.0) -> Set[Tuple[int, float]]: +def make_dose_table( + top_names: Iterable[str], + library: Mapping[str, Pattern], + dose_multiplier: float = 1.0, + ) -> Set[Tuple[int, float]]: """ - Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns) + Create a set containing `(name, written_dose)` for each pattern (including subpatterns) Args: + top_names: Names of all topcells pattern: Source Patterns. dose_multiplier: Multiplier for all written_dose entries. Returns: - `{(id(subpat.pattern), written_dose), ...}` + `{(name, written_dose), ...}` """ - dose_table = {(id(pattern), dose_multiplier) for pattern in patterns} - for pattern in patterns: + dose_table = {(top_name, dose_multiplier) for top_name in top_names} + for name, pattern in library.items(): for subpat in pattern.subpatterns: - if subpat.pattern is None: + if subpat.target is None: continue - subpat_dose_entry = (id(subpat.pattern), subpat.dose * dose_multiplier) + subpat_dose_entry = (subpat.target, subpat.dose * dose_multiplier) if subpat_dose_entry not in dose_table: - subpat_dose_table = make_dose_table([subpat.pattern], subpat.dose * dose_multiplier) + subpat_dose_table = make_dose_table(subpat.target, library, subpat.dose * dose_multiplier) dose_table = dose_table.union(subpat_dose_table) return dose_table @@ -96,7 +104,7 @@ def dtype2dose(pattern: Pattern) -> Pattern: def dose2dtype( - patterns: List[Pattern], + library: List[Pattern], ) -> Tuple[List[Pattern], List[float]]: """ For each shape in each pattern, set shape.layer to the tuple @@ -119,21 +127,16 @@ def dose2dtype( dose_list: A list of doses, providing a mapping between datatype (int, list index) and dose (float, list entry). """ - # Get a dict of id(pattern) -> pattern - patterns_by_id = {id(pattern): pattern for pattern in patterns} - for pattern in patterns: - for i, p in pattern.referenced_patterns_by_id().items(): - patterns_by_id[i] = p - + logger.warning('TODO: dose2dtype() needs to be tested!') # Get a table of (id(pat), written_dose) for each pattern and subpattern - sd_table = make_dose_table(patterns) + sd_table = make_dose_table(library.find_topcells(), library) # Figure out all the unique doses necessary to write this pattern # This means going through each row in sd_table and adding the dose values needed to write # that subpattern at that dose level dose_vals = set() - for pat_id, pat_dose in sd_table: - pat = patterns_by_id[pat_id] + for name, pat_dose in sd_table: + pat = library[name] for shape in pat.shapes: dose_vals.add(shape.dose * pat_dose) @@ -144,21 +147,22 @@ def dose2dtype( # Create a new pattern for each non-1-dose entry in the dose table # and update the shapes to reflect their new dose - new_pats = {} # (id, dose) -> new_pattern mapping - for pat_id, pat_dose in sd_table: + new_names = {} # {(old name, dose): new name} mapping + new_lib = {} # {new_name: new_pattern} mapping + for name, pat_dose in sd_table: + mangled_name = mangle_name(name, pat_dose) + new_names[(name, pat_dose)] = mangled_name + + old_pat = library[name] + if pat_dose == 1: - new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id] + new_lib[mangled_name] = old_pat continue - old_pat = patterns_by_id[pat_id] - pat = old_pat.copy() # keep old subpatterns - pat.shapes = copy.deepcopy(old_pat.shapes) - pat.labels = copy.deepcopy(old_pat.labels) + pat = old_pat.deepcopy() - encoded_name = mangle_name(pat, pat_dose) if len(encoded_name) == 0: - raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name)) - pat.name = encoded_name + raise PatternError('Zero-length name after mangle+encode, originally "{name}"'.format(pat.name)) for shape in pat.shapes: data_type = dose_vals_list.index(shape.dose * pat_dose) @@ -169,15 +173,9 @@ def dose2dtype( else: raise PatternError(f'Invalid layer for gdsii: {shape.layer}') - new_pats[(pat_id, pat_dose)] = pat + new_lib[mangled_name] = pat - # Go back through all the dose-specific patterns and fix up their subpattern entries - for (pat_id, pat_dose), pat in new_pats.items(): - for subpat in pat.subpatterns: - dose_mult = subpat.dose * pat_dose - subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)] - - return patterns, dose_vals_list + return new_lib, dose_vals_list def is_gzipped(path: pathlib.Path) -> bool: diff --git a/masque/label.py b/masque/label.py index fe7d9ab..567af8a 100644 --- a/masque/label.py +++ b/masque/label.py @@ -6,14 +6,14 @@ from numpy.typing import ArrayLike, NDArray from .repetition import Repetition from .utils import rotation_matrix_2d, layer_t, AutoSlots, annotations_t -from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl +from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, RepeatableImpl from .traits import AnnotatableImpl L = TypeVar('L', bound='Label') -class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, AnnotatableImpl, +class Label(PositionableImpl, LayerableImpl, RepeatableImpl, AnnotatableImpl, Pivotable, Copyable, metaclass=AutoSlots): """ A text annotation with a position and layer (but no size; it is not drawn) @@ -49,32 +49,28 @@ class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, Annot layer: layer_t = 0, repetition: Optional[Repetition] = None, annotations: Optional[annotations_t] = None, - locked: bool = False, identifier: Tuple = (), ) -> None: - LockableImpl.unlock(self) self.identifier = identifier self.string = string self.offset = numpy.array(offset, dtype=float, copy=True) self.layer = layer self.repetition = repetition self.annotations = annotations if annotations is not None else {} - self.set_locked(locked) def __copy__(self: L) -> L: - return type(self)(string=self.string, - offset=self.offset.copy(), - layer=self.layer, - repetition=self.repetition, - locked=self.locked, - identifier=self.identifier) + return type(self)( + string=self.string, + offset=self.offset.copy(), + layer=self.layer, + repetition=self.repetition, + identifier=self.identifier, + ) def __deepcopy__(self: L, memo: Dict = None) -> L: memo = {} if memo is None else memo new = copy.copy(self) - LockableImpl.unlock(new) new._offset = self._offset.copy() - new.set_locked(self.locked) return new def rotate_around(self: L, pivot: ArrayLike, rotation: float) -> L: @@ -106,17 +102,3 @@ class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, Annot Bounds [[xmin, xmax], [ymin, ymax]] """ return numpy.array([self.offset, self.offset]) - - def lock(self: L) -> L: - PositionableImpl._lock(self) - LockableImpl.lock(self) - return self - - def unlock(self: L) -> L: - LockableImpl.unlock(self) - PositionableImpl._unlock(self) - return self - - def __repr__(self) -> str: - locked = ' L' if self.locked else '' - return f'