WIP: make libraries and names first-class!

This commit is contained in:
jan 2022-07-07 11:27:29 -07:00
parent fff20b3da9
commit 42c3a2b1e1
34 changed files with 1785 additions and 1817 deletions

View File

@ -0,0 +1,28 @@
import numpy
from pyclipper import (
Pyclipper, PT_CLIP, PT_SUBJECT, CT_UNION, CT_INTERSECTION, PFT_NONZERO,
scale_to_clipper, scale_from_clipper,
)
p = Pyclipper()
p.AddPaths([
[(-10, -10), (-10, 10), (-9, 10), (-9, -10)],
[(-10, 10), (10, 10), (10, 9), (-10, 9)],
[(10, 10), (10, -10), (9, -10), (9, 10)],
[(10, -10), (-10, -10), (-10, -9), (10, -9)],
], PT_SUBJECT, closed=True)
p.Execute2?
p.Execute?
p.Execute(PT_UNION, PT_NONZERO, PT_NONZERO)
p.Execute(CT_UNION, PT_NONZERO, PT_NONZERO)
p.Execute(CT_UNION, PFT_NONZERO, PFT_NONZERO)
p = Pyclipper()
p.AddPaths([
[(-10, -10), (-10, 10), (-9, 10), (-9, -10)],
[(-10, 10), (10, 10), (10, 9), (-10, 9)],
[(10, 10), (10, -10), (9, -10), (9, 10)],
[(10, -10), (-10, -10), (-10, -9), (10, -9)],
], PT_SUBJECT, closed=True)
r = p.Execute2(CT_UNION, PFT_NONZERO, PFT_NONZERO)
r
r.Childs
%history -f nested_poly_test.py

298
examples/pcgen.py Normal file
View File

@ -0,0 +1,298 @@
"""
Routines for creating normalized 2D lattices and common photonic crystal
cavity designs.
"""
from typing import Sequence, Tuple
import numpy # type: ignore
def triangular_lattice(dims: Tuple[int, int],
asymmetric: bool = False,
origin: str = 'center',
) -> numpy.ndarray:
"""
Return an ndarray of `[[x0, y0], [x1, y1], ...]` denoting lattice sites for
a triangular lattice in 2D.
Args:
dims: Number of lattice sites in the [x, y] directions.
asymmetric: If true, each row will contain the same number of
x-coord lattice sites. If false, every other row will be
one site shorter (to make the structure symmetric).
origin: If 'corner', the least-(x,y) lattice site is placed at (0, 0)
If 'center', the center of the lattice (not necessarily a
lattice site) is placed at (0, 0).
Returns:
`[[x0, y0], [x1, 1], ...]` denoting lattice sites.
"""
sx, sy = numpy.meshgrid(numpy.arange(dims[0], dtype=float),
numpy.arange(dims[1], dtype=float), indexing='ij')
sx[sy % 2 == 1] += 0.5
sy *= numpy.sqrt(3) / 2
if not asymmetric:
which = sx != sx.max()
sx = sx[which]
sy = sy[which]
xy = numpy.column_stack((sx.flat, sy.flat))
if origin == 'center':
xy -= (xy.max(axis=0) - xy.min(axis=0)) / 2
elif origin == 'corner':
pass
else:
raise Exception(f'Invalid value for `origin`: {origin}')
return xy[xy[:, 0].argsort(), :]
def square_lattice(dims: Tuple[int, int]) -> numpy.ndarray:
"""
Return an ndarray of `[[x0, y0], [x1, y1], ...]` denoting lattice sites for
a square lattice in 2D. The lattice will be centered around (0, 0).
Args:
dims: Number of lattice sites in the [x, y] directions.
Returns:
`[[x0, y0], [x1, 1], ...]` denoting lattice sites.
"""
xs, ys = numpy.meshgrid(range(dims[0]), range(dims[1]), 'xy')
xs -= dims[0]/2
ys -= dims[1]/2
xy = numpy.vstack((xs.flatten(), ys.flatten())).T
return xy[xy[:, 0].argsort(), ]
# ### Photonic crystal functions ###
def nanobeam_holes(a_defect: float,
num_defect_holes: int,
num_mirror_holes: int
) -> numpy.ndarray:
"""
Returns a list of `[[x0, r0], [x1, r1], ...]` of nanobeam hole positions and radii.
Creates a region in which the lattice constant and radius are progressively
(linearly) altered over num_defect_holes holes until they reach the value
specified by a_defect, then symmetrically returned to a lattice constant and
radius of 1, which is repeated num_mirror_holes times on each side.
Args:
a_defect: Minimum lattice constant for the defect, as a fraction of the
mirror lattice constant (ie., for no defect, a_defect = 1).
num_defect_holes: How many holes form the defect (per-side)
num_mirror_holes: How many holes form the mirror (per-side)
Returns:
Ndarray `[[x0, r0], [x1, r1], ...]` of nanobeam hole positions and radii.
"""
a_values = numpy.linspace(a_defect, 1, num_defect_holes, endpoint=False)
xs = a_values.cumsum() - (a_values[0] / 2) # Later mirroring makes center distance 2x as long
mirror_xs = numpy.arange(1, num_mirror_holes + 1, dtype=float) + xs[-1]
mirror_rs = numpy.ones_like(mirror_xs)
return numpy.vstack((numpy.hstack((-mirror_xs[::-1], -xs[::-1], xs, mirror_xs)),
numpy.hstack((mirror_rs[::-1], a_values[::-1], a_values, mirror_rs)))).T
def waveguide(length: int, num_mirror: int) -> numpy.ndarray:
"""
Line defect waveguide in a triangular lattice.
Args:
length: waveguide length (number of holes in x direction)
num_mirror: Mirror length (number of holes per side; total size is
`2 * n + 1` holes.
Returns:
`[[x0, y0], [x1, y1], ...]` for all the holes
"""
p = triangular_lattice([length, 2 * num_mirror + 1])
p_wg = p[p[:, 1] != 0, :]
return p_wg
def wgbend(num_mirror: int) -> numpy.ndarray:
"""
Line defect waveguide bend in a triangular lattice.
Args:
num_mirror: Mirror length (number of holes per side; total size is
approximately `2 * n + 1`
Returns:
`[[x0, y0], [x1, y1], ...]` for all the holes
"""
p = triangular_lattice([2 * num_mirror, 2 * num_mirror + 1])
left_horiz = (p[:, 1] == 0) & (p[:, 0] <= 0)
p = p[~left_horiz, :]
right_diag = numpy.isclose(p[:, 1], p[:, 0] * numpy.sqrt(3)) & (p[:, 0] >= 0)
p = p[~right_diag, :]
return p
def y_splitter(num_mirror: int) -> numpy.ndarray:
"""
Line defect waveguide y-splitter in a triangular lattice.
Args:
num_mirror: Mirror length (number of holes per side; total size is
approximately `2 * n + 1` holes.
Returns:
`[[x0, y0], [x1, y1], ...]` for all the holes
"""
p = triangular_lattice([2 * num_mirror, 2 * num_mirror + 1])
left_horiz = (p[:, 1] == 0) & (p[:, 0] <= 0)
p = p[~left_horiz, :]
right_diag_up = numpy.isclose(p[:, 1], p[:, 0] * numpy.sqrt(3)) & (p[:, 0] >= 0)
p = p[~right_diag_up, :]
right_diag_dn = numpy.isclose(p[:, 1], -p[:, 0] * numpy.sqrt(3)) & (p[:, 0] >= 0)
p = p[~right_diag_dn, :]
return p
def ln_defect(mirror_dims: Tuple[int, int],
defect_length: int,
) -> numpy.ndarray:
"""
N-hole defect in a triangular lattice.
Args:
mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes
is 2 * n + 1 in each direction.
defect_length: Length of defect. Should be an odd number.
Returns:
`[[x0, y0], [x1, y1], ...]` for all the holes
"""
if defect_length % 2 != 1:
raise Exception('defect_length must be odd!')
p = triangular_lattice([2 * d + 1 for d in mirror_dims])
half_length = numpy.floor(defect_length / 2)
hole_nums = numpy.arange(-half_length, half_length + 1)
holes_to_keep = numpy.in1d(p[:, 0], hole_nums, invert=True)
return p[numpy.logical_or(holes_to_keep, p[:, 1] != 0), ]
def ln_shift_defect(mirror_dims: Tuple[int, int],
defect_length: int,
shifts_a: Sequence[float] = (0.15, 0, 0.075),
shifts_r: Sequence[float] = (1, 1, 1)
) -> numpy.ndarray:
"""
N-hole defect with shifted holes (intended to give the mode a gaussian profile
in real- and k-space so as to improve both Q and confinement). Holes along the
defect line are shifted and altered according to the shifts_* parameters.
Args:
mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes
is `2 * n + 1` in each direction.
defect_length: Length of defect. Should be an odd number.
shifts_a: Percentage of a to shift (1st, 2nd, 3rd,...) holes along the defect line
shifts_r: Factor to multiply the radius by. Should match length of shifts_a
Returns:
`[[x0, y0, r0], [x1, y1, r1], ...]` for all the holes
"""
if not hasattr(shifts_a, "__len__") and shifts_a is not None:
shifts_a = [shifts_a]
if not hasattr(shifts_r, "__len__") and shifts_r is not None:
shifts_r = [shifts_r]
xy = ln_defect(mirror_dims, defect_length)
# Add column for radius
xyr = numpy.hstack((xy, numpy.ones((xy.shape[0], 1))))
# Shift holes
# Expand shifts as necessary
n_shifted = max(len(shifts_a), len(shifts_r))
tmp_a = numpy.array(shifts_a)
shifts_a = numpy.ones((n_shifted, ))
shifts_a[:len(tmp_a)] = tmp_a
tmp_r = numpy.array(shifts_r)
shifts_r = numpy.ones((n_shifted, ))
shifts_r[:len(tmp_r)] = tmp_r
x_removed = numpy.floor(defect_length / 2)
for ind in range(n_shifted):
for sign in (-1, 1):
x_val = sign * (x_removed + ind + 1)
which = numpy.logical_and(xyr[:, 0] == x_val, xyr[:, 1] == 0)
xyr[which, ] = (x_val + numpy.sign(x_val) * shifts_a[ind], 0, shifts_r[ind])
return xyr
def r6_defect(mirror_dims: Tuple[int, int]) -> numpy.ndarray:
"""
R6 defect in a triangular lattice.
Args:
mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes
is 2 * n + 1 in each direction.
Returns:
`[[x0, y0], [x1, y1], ...]` specifying hole centers.
"""
xy = triangular_lattice([2 * d + 1 for d in mirror_dims])
rem_holes_plus = numpy.array([[1, 0],
[0.5, +numpy.sqrt(3)/2],
[0.5, -numpy.sqrt(3)/2]])
rem_holes = numpy.vstack((rem_holes_plus, -rem_holes_plus))
for rem_xy in rem_holes:
xy = xy[(xy != rem_xy).any(axis=1), ]
return xy
def l3_shift_perturbed_defect(
mirror_dims: Tuple[int, int],
perturbed_radius: float = 1.1,
shifts_a: Sequence[float] = (),
shifts_r: Sequence[float] = ()
) -> numpy.ndarray:
"""
3-hole defect with perturbed hole sizes intended to form an upwards-directed
beam. Can also include shifted holes along the defect line, intended
to give the mode a more gaussian profile to improve Q.
Args:
mirror_dims: [x, y] mirror lengths (number of holes). Total number of holes
is 2 * n + 1 in each direction.
perturbed_radius: Amount to perturb the radius of the holes used for beam-forming
shifts_a: Percentage of a to shift (1st, 2nd, 3rd,...) holes along the defect line
shifts_r: Factor to multiply the radius by. Should match length of shifts_a
Returns:
`[[x0, y0, r0], [x1, y1, r1], ...]` for all the holes
"""
xyr = ln_shift_defect(mirror_dims, 3, shifts_a, shifts_r)
abs_x, abs_y = (numpy.fabs(xyr[:, i]) for i in (0, 1))
# Sorted unique xs and ys
# Ignore row y=0 because it might have shifted holes
xs = numpy.unique(abs_x[abs_x != 0])
ys = numpy.unique(abs_y)
# which holes should be perturbed? (xs[[3, 7]], ys[1]) and (xs[[2, 6]], ys[2])
perturbed_holes = ((xs[a], ys[b]) for a, b in ((3, 1), (7, 1), (2, 2), (6, 2)))
for row in xyr:
if numpy.fabs(row) in perturbed_holes:
row[2] = perturbed_radius
return xyr

178
examples/phc.py Normal file
View File

@ -0,0 +1,178 @@
from typing import Tuple, Sequence
import numpy # type: ignore
from numpy import pi
from masque import layer_t, Pattern, SubPattern, Label
from masque.shapes import Polygon, Circle
from masque.builder import Device, Port
from masque.library import Library, DeviceLibrary
from masque.file.klamath import writefile
import pcgen
HOLE_SCALE: float = 1000
''' Radius for the 'hole' cell. Should be significantly bigger than
1 (minimum database unit) in order to have enough precision to
reasonably represent a polygonized circle (for GDS)
'''
def hole(layer: layer_t,
radius: float = HOLE_SCALE * 0.35,
) -> Pattern:
"""
Generate a pattern containing a single circular hole.
Args:
layer: Layer to draw the circle on.
radius: Circle radius.
Returns:
Pattern, named `'hole'`
"""
pat = Pattern('hole', shapes=[
Circle(radius=radius, offset=(0, 0), layer=layer, dose=1.0)
])
return pat
def perturbed_l3(lattice_constant: float,
hole: Pattern,
trench_dose: float = 1.0,
trench_layer: layer_t = (1, 0),
shifts_a: Sequence[float] = (0.15, 0, 0.075),
shifts_r: Sequence[float] = (1.0, 1.0, 1.0),
xy_size: Tuple[int, int] = (10, 10),
perturbed_radius: float = 1.1,
trench_width: float = 1200,
) -> Device:
"""
Generate a `Device` representing a perturbed L3 cavity.
Args:
lattice_constant: Distance between nearest neighbor holes
hole: `Pattern` object containing a single hole
trench_dose: Dose for the trenches. Default 1.0. (Hole dose is 1.0.)
trench_layer: Layer for the trenches, default `(1, 0)`.
shifts_a: passed to `pcgen.l3_shift`; specifies lattice constant
(1 - multiplicative factor) for shifting holes adjacent to
the defect (same row). Default `(0.15, 0, 0.075)` for first,
second, third holes.
shifts_r: passed to `pcgen.l3_shift`; specifies radius for perturbing
holes adjacent to the defect (same row). Default 1.0 for all holes.
Provided sequence should have same length as `shifts_a`.
xy_size: `(x, y)` number of mirror periods in each direction; total size is
`2 * n + 1` holes in each direction. Default (10, 10).
perturbed_radius: radius of holes perturbed to form an upwards-driected beam
(multiplicative factor). Default 1.1.
trench width: Width of the undercut trenches. Default 1200.
Returns:
`Device` object representing the L3 design.
"""
xyr = pcgen.l3_shift_perturbed_defect(mirror_dims=xy_size,
perturbed_radius=perturbed_radius,
shifts_a=shifts_a,
shifts_r=shifts_r)
pat = Pattern(f'L3p-a{lattice_constant:g}rp{perturbed_radius:g}')
pat.subpatterns += [SubPattern(hole, offset=(lattice_constant * x,
lattice_constant * y), scale=r * lattice_constant / HOLE_SCALE)
for x, y, r in xyr]
min_xy, max_xy = pat.get_bounds()
trench_dx = max_xy[0] - min_xy[0]
pat.shapes += [
Polygon.rect(ymin=max_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width,
layer=trench_layer, dose=trench_dose),
Polygon.rect(ymax=min_xy[1], xmin=min_xy[0], lx=trench_dx, ly=trench_width,
layer=trench_layer, dose=trench_dose),
]
ports = {
'input': Port((-lattice_constant * xy_size[0], 0), rotation=0, ptype=1),
'output': Port((lattice_constant * xy_size[0], 0), rotation=pi, ptype=1),
}
return Device(pat, ports)
def waveguide(lattice_constant: float,
hole: Pattern,
length: int,
mirror_periods: int,
) -> Device:
xy = pcgen.waveguide(length=length + 2, num_mirror=mirror_periods)
pat = Pattern(f'_wg-a{lattice_constant:g}l{length}')
pat.subpatterns += [SubPattern(hole, offset=(lattice_constant * x,
lattice_constant * y), scale=lattice_constant / HOLE_SCALE)
for x, y in xy]
ports = {
'left': Port((-lattice_constant * length / 2, 0), rotation=0, ptype=1),
'right': Port((lattice_constant * length / 2, 0), rotation=pi, ptype=1),
}
return Device(pat, ports)
def bend(lattice_constant: float,
hole: Pattern,
mirror_periods: int,
) -> Device:
xy = pcgen.wgbend(num_mirror=mirror_periods)
pat_half = Pattern(f'_wgbend_half-a{lattice_constant:g}l{mirror_periods}')
pat_half.subpatterns += [SubPattern(hole, offset=(lattice_constant * x,
lattice_constant * y), scale=lattice_constant / HOLE_SCALE)
for x, y in xy]
pat = Pattern(f'_wgbend-a{lattice_constant:g}l{mirror_periods}')
pat.addsp(pat_half, offset=(0, 0), rotation=0, mirrored=(False, False))
pat.addsp(pat_half, offset=(0, 0), rotation=-2 * pi / 3, mirrored=(True, False))
ports = {
'left': Port((-lattice_constant * mirror_periods, 0), rotation=0, ptype=1),
'right': Port((lattice_constant * mirror_periods / 2,
lattice_constant * mirror_periods * numpy.sqrt(3) / 2), rotation=pi * 4 / 3, ptype=1),
}
return Device(pat, ports)
def label_ports(device: Device, layer: layer_t = (3, 0)) -> Device:
for name, port in device.ports.items():
angle_deg = numpy.rad2deg(port.rotation)
device.pattern.labels += [
Label(string=f'{name} (angle {angle_deg:g})', layer=layer, offset=port.offset)
]
return device
def main():
hole_layer = (1, 2)
a = 512
hole_pat = hole(layer=hole_layer)
wg0 = label_ports(waveguide(lattice_constant=a, hole=hole_pat, length=10, mirror_periods=5))
wg1 = label_ports(waveguide(lattice_constant=a, hole=hole_pat, length=5, mirror_periods=5))
bend0 = label_ports(bend(lattice_constant=a, hole=hole_pat, mirror_periods=5))
l3cav = label_ports(perturbed_l3(lattice_constant=a, hole=hole_pat, xy_size=(4, 10)))
dev = Device(name='my_bend', ports={})
dev.place(wg0, offset=(0, 0), port_map={'left': 'in', 'right': 'signal'})
dev.plug(wg0, {'signal': 'left'})
dev.plug(bend0, {'signal': 'left'})
dev.plug(wg1, {'signal': 'left'})
dev.plug(bend0, {'signal': 'right'})
dev.plug(wg0, {'signal': 'left'})
dev.plug(l3cav, {'signal': 'input'})
dev.plug(wg0, {'signal': 'left'})
writefile(dev.pattern, 'phc.gds', 1e-9, 1e-3)
dev.pattern.visualize()
if __name__ == '__main__':
main()

View File

@ -24,17 +24,17 @@
metaclass is used to auto-generate slots based on superclass type annotations.
- File I/O submodules are imported by `masque.file` to avoid creating hard dependencies on
external file-format reader/writers
- Pattern locking/unlocking is quite slow for large hierarchies.
"""
from .error import PatternError, PatternLockedError
from .error import PatternError
from .shapes import Shape
from .label import Label
from .subpattern import SubPattern
from .pattern import Pattern
from .utils import layer_t, annotations_t
from .library import Library, DeviceLibrary
from .library import Library, PatternGenerator
from .builder import DeviceLibrary, LibDeviceLibrary, Device, Port
__author__ = 'Jan Petykiewicz'

View File

@ -1,3 +1,4 @@
from .devices import Port, Device
from .utils import ell
from .tools import Tool
from .device_library import DeviceLibrary, LibDeviceLibrary

View File

@ -79,16 +79,6 @@ class DeviceLibrary:
def __repr__(self) -> str:
return '<DeviceLibrary with keys ' + repr(list(self.generators.keys())) + '>'
def set_const(self, const: Device) -> None:
"""
Convenience function to avoid having to manually wrap
already-generated Device objects into callables.
Args:
const: Pre-generated device object
"""
self.generators[const.pattern.name] = lambda: const
def add(
self: D,
other: D,
@ -175,7 +165,6 @@ class DeviceLibrary:
def build_dev() -> Device:
dev = fn()
dev.pattern = dev2pat(dev)
dev.pattern.rename(prefix + name)
return dev
self[prefix + name] = build_dev
@ -200,8 +189,8 @@ class DeviceLibrary:
def build_wrapped_dev() -> Device:
old_dev = self[old_name]
wrapper = Pattern(name=name)
wrapper.addsp(old_dev.pattern)
wrapper = Pattern()
wrapper.addsp(old_name)
return Device(wrapper, old_dev.ports)
self[name] = build_wrapped_dev

View File

@ -125,7 +125,7 @@ class Device(Copyable, Mirrorable):
- `Device(pattern, ports={'A': port_a, 'C': port_c})` uses an existing
pattern and defines some ports.
- `Device(name='my_dev_name', ports=None)` makes a new empty pattern with
- `Device(ports=None)` makes a new empty pattern with
default ports ('A' and 'B', in opposite directions, at (0, 0)).
- `my_device.build('my_layout')` makes a new pattern and instantiates
@ -182,7 +182,6 @@ class Device(Copyable, Mirrorable):
ports: Optional[Dict[str, Port]] = None,
*,
tools: Union[None, Tool, Dict[Optional[str], Tool]] = None,
name: Optional[str] = None,
) -> None:
"""
If `ports` is `None`, two default ports ('A' and 'B') are created.
@ -190,14 +189,7 @@ class Device(Copyable, Mirrorable):
(attached devices will be placed to the left) and 'B' has rotation
pi (attached devices will be placed to the right).
"""
if pattern is not None:
if name is not None:
raise DeviceError('Only one of `pattern` and `name` may be specified')
self.pattern = pattern
else:
if name is None:
raise DeviceError('Must specify either `pattern` or `name`')
self.pattern = Pattern(name=name)
self.pattern = pattern or Pattern()
if ports is None:
self.ports = {
@ -336,25 +328,22 @@ class Device(Copyable, Mirrorable):
return self
def build(self, name: str) -> 'Device':
def build(self) -> 'Device':
"""
Begin building a new device around an instance of the current device
(rather than modifying the current device).
Args:
name: A name for the new device
Returns:
The new `Device` object.
"""
pat = Pattern(name)
# TODO lib: this needs a name for self, rather than for the built thing
pat = Pattern()
pat.addsp(self.pattern)
new = Device(pat, ports=self.ports, tools=self.tools)
return new
def as_interface(
self,
name: str,
in_prefix: str = 'in_',
out_prefix: str = '',
port_map: Optional[Union[Dict[str, str], Sequence[str]]] = None
@ -380,7 +369,6 @@ class Device(Copyable, Mirrorable):
current device.
Args:
name: Name for the new device
in_prefix: Prepended to port names for newly-created ports with
reversed directions compared to the current device.
out_prefix: Prepended to port names for ports which are directly
@ -424,12 +412,13 @@ class Device(Copyable, Mirrorable):
if duplicates:
raise DeviceError(f'Duplicate keys after prefixing, try a different prefix: {duplicates}')
new = Device(name=name, ports={**ports_in, **ports_out}, tools=self.tools)
new = Device(ports={**ports_in, **ports_out}, tools=self.tools)
return new
def plug(
self: D,
other: O,
library: Mapping[str, 'Device'],
name: str,
map_in: Dict[str, str],
map_out: Optional[Dict[str, Optional[str]]] = None,
*,
@ -438,27 +427,29 @@ class Device(Copyable, Mirrorable):
set_rotation: Optional[bool] = None,
) -> D:
"""
Instantiate the device `other` into the current device, connecting
Instantiate a device `library[name]` into the current device, connecting
the ports specified by `map_in` and renaming the unconnected
ports specified by `map_out`.
Examples:
=========
- `my_device.plug(subdevice, {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})`
instantiates `subdevice` into `my_device`, plugging ports 'A' and 'B'
- `my_device.plug(lib, 'subdevice', {'A': 'C', 'B': 'B'}, map_out={'D': 'myport'})`
instantiates `lib['subdevice']` into `my_device`, plugging ports 'A' and 'B'
of `my_device` into ports 'C' and 'B' of `subdevice`. The connected ports
are removed and any unconnected ports from `subdevice` are added to
`my_device`. Port 'D' of `subdevice` (unconnected) is renamed to 'myport'.
- `my_device.plug(wire, {'myport': 'A'})` places port 'A' of `wire` at 'myport'
of `my_device`. If `wire` has only two ports (e.g. 'A' and 'B'), no `map_out`,
argument is provided, and the `inherit_name` argument is not explicitly
set to `False`, the unconnected port of `wire` is automatically renamed to
'myport'. This allows easy extension of existing ports without changing
their names or having to provide `map_out` each time `plug` is called.
- `my_device.plug(lib, 'wire', {'myport': 'A'})` places port 'A' of `lib['wire']`
at 'myport' of `my_device`.
If `'wire'` has only two ports (e.g. 'A' and 'B'), no `map_out` argument is
provided, and the `inherit_name` argument is not explicitly set to `False`,
the unconnected port of `wire` is automatically renamed to 'myport'. This
allows easy extension of existing ports without changing their names or
having to provide `map_out` each time `plug` is called.
Args:
other: A device to instantiate into the current device.
library: A `DeviceLibrary` containing the device to be instatiated.
name: The name of the device to be instantiated (from `library`).
map_in: Dict of `{'self_port': 'other_port'}` mappings, specifying
port connections between the two devices.
map_out: Dict of `{'old_name': 'new_name'}` mappings, specifying
@ -513,13 +504,14 @@ class Device(Copyable, Mirrorable):
del self.ports[ki]
map_out[vi] = None
self.place(other, offset=translation, rotation=rotation, pivot=pivot,
self.place(library, name, offset=translation, rotation=rotation, pivot=pivot,
mirrored=mirrored, port_map=map_out, skip_port_check=True)
return self
def place(
self: D,
other: O,
library: Mapping[str, 'Device'],
name: str,
*,
offset: ArrayLike = (0, 0),
rotation: float = 0,
@ -529,7 +521,7 @@ class Device(Copyable, Mirrorable):
skip_port_check: bool = False,
) -> D:
"""
Instantiate the device `other` into the current device, adding its
Instantiate the device `library[name]` into the current device, adding its
ports to those of the current device (but not connecting any ports).
Mirroring is applied before rotation; translation (`offset`) is applied last.
@ -543,16 +535,17 @@ class Device(Copyable, Mirrorable):
rather than the port name on the original `pad` device.
Args:
other: A device to instantiate into the current device.
offset: Offset at which to place `other`. Default (0, 0).
rotation: Rotation applied to `other` before placement. Default 0.
library: A `DeviceLibrary` containing the device to be instatiated.
name: The name of the device to be instantiated (from `library`).
offset: Offset at which to place the instance. Default (0, 0).
rotation: Rotation applied to the instance before placement. Default 0.
pivot: Rotation is applied around this pivot point (default (0, 0)).
Rotation is applied prior to translation (`offset`).
mirrored: Whether `other` should be mirrored across the x and y axes.
mirrored: Whether theinstance should be mirrored across the x and y axes.
Mirroring is applied before translation and rotation.
port_map: Dict of `{'old_name': 'new_name'}` mappings, specifying
new names for ports in `other`. New names can be `None`, which will
delete those ports.
new names for ports in the instantiated device. New names can be
`None`, which will delete those ports.
skip_port_check: Can be used to skip the internal call to `check_ports`,
in case it has already been performed elsewhere.
@ -561,7 +554,7 @@ class Device(Copyable, Mirrorable):
Raises:
`DeviceError` if any ports specified in `map_in` or `map_out` do not
exist in `self.ports` or `other_names`.
exist in `self.ports` or `library[name].ports`.
`DeviceError` if there are any duplicate names after `map_in` and `map_out`
are applied.
"""
@ -572,6 +565,8 @@ class Device(Copyable, Mirrorable):
if port_map is None:
port_map = {}
other = library[name]
if not skip_port_check:
self.check_ports(other.ports.keys(), map_in=None, map_out=port_map)
@ -589,7 +584,7 @@ class Device(Copyable, Mirrorable):
p.translate(offset)
self.ports[name] = p
sp = SubPattern(other.pattern, mirrored=mirrored)
sp = SubPattern(name, mirrored=mirrored) #TODO figure out how this should work?!
sp.rotate_around(pivot, rotation)
sp.translate(offset)
self.pattern.subpatterns.append(sp)
@ -748,19 +743,6 @@ class Device(Copyable, Mirrorable):
self._dead = True
return self
def rename(self: D, name: str) -> D:
"""
Renames the pattern and returns the device
Args:
name: The new name
Returns:
self
"""
self.pattern.name = name
return self
def __repr__(self) -> str:
s = f'<Device {self.pattern} ['
for name, port in self.ports.items():
@ -831,7 +813,7 @@ class Device(Copyable, Mirrorable):
return self.path(portspec, ccw, length, tool_port_names=tool_port_names, **kwargs)
def busL(
def mpath(
self: D,
portspec: Union[str, Sequence[str]],
ccw: Optional[bool],
@ -839,7 +821,6 @@ class Device(Copyable, Mirrorable):
spacing: Optional[Union[float, ArrayLike]] = None,
set_rotation: Optional[float] = None,
tool_port_names: Sequence[str] = ('A', 'B'),
container_name: str = '_busL',
force_container: bool = False,
**kwargs,
) -> D:
@ -873,7 +854,7 @@ class Device(Copyable, Mirrorable):
port_name = tuple(portspec)[0]
return self.path(port_name, ccw, extensions[port_name], tool_port_names=tool_port_names)
else:
dev = Device(name='', ports=ports, tools=self.tools).as_interface(container_name)
dev = Device(name='', ports=ports, tools=self.tools).as_interface()
for name, length in extensions.items():
dev.path(name, ccw, length, tool_port_names=tool_port_names)
return self.plug(dev, {sp: 'in_' + sp for sp in ports.keys()}) # TODO safe to use 'in_'?

View File

@ -11,13 +11,6 @@ class PatternError(MasqueError):
"""
pass
class PatternLockedError(PatternError):
"""
Exception raised when trying to modify a locked pattern
"""
def __init__(self):
PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape')
class LibraryError(MasqueError):
"""

View File

@ -1,7 +1,7 @@
"""
DXF file format readers and writers
"""
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Mapping
import re
import io
import base64
@ -10,7 +10,7 @@ import logging
import pathlib
import gzip
import numpy # type: ignore
import numpy
import ezdxf # type: ignore
from .. import Pattern, SubPattern, PatternError, Label, Shape
@ -29,12 +29,13 @@ DEFAULT_LAYER = 'DEFAULT'
def write(
pattern: Pattern,
top_name: str,
library: Mapping[str, Pattern],
stream: io.TextIOBase,
*,
modify_originals: bool = False,
dxf_version='AC1024',
disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
) -> None:
"""
Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes
@ -60,10 +61,12 @@ def write(
array with rotated instances must be manhattan _after_ having a compensating rotation applied.
Args:
patterns: A Pattern or list of patterns to write to the stream.
top_name: Name of the top-level pattern to write.
library: A {name: Pattern} mapping of patterns. Only `top_name` and patterns referenced
by it are written.
stream: Stream object to write to.
modify_original: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
process. Otherwise, a copy is made.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`.
@ -75,11 +78,14 @@ def write(
assert(disambiguate_func is not None)
if not modify_originals:
pattern = pattern.deepcopy().deepunlock()
library = library.deepcopy()
# Get a dict of id(pattern) -> pattern
patterns_by_id = pattern.referenced_patterns_by_id()
disambiguate_func(patterns_by_id.values())
pattern = library[top_name]
old_names = list(library.keys())
new_names = disambiguate_func(old_names)
renamed_lib = {new_name: library[old_name]
for old_name, new_name in zip(old_names, new_names)}
# Create library
lib = ezdxf.new(dxf_version, setup=True)
@ -89,9 +95,9 @@ def write(
_subpatterns_to_refs(msp, pattern.subpatterns)
# Now create a block for each referenced pattern, and add in any shapes
for pat in patterns_by_id.values():
for name, pat in renamed_lib.items():
assert(pat is not None)
block = lib.blocks.new(name=pat.name)
block = lib.blocks.new(name=name)
_shapes_to_elements(block, pat.shapes)
_labels_to_texts(block, pat.labels)
@ -101,7 +107,8 @@ def write(
def writefile(
pattern: Pattern,
top_name: str,
library: Mapping[str, Pattern],
filename: Union[str, pathlib.Path],
*args,
**kwargs,
@ -112,7 +119,9 @@ def writefile(
Will automatically compress the file if it has a .gz suffix.
Args:
pattern: `Pattern` to save
top_name: Name of the top-level pattern to write.
library: A {name: Pattern} mapping of patterns. Only `top_name` and patterns referenced
by it are written.
filename: Filename to save to.
*args: passed to `dxf.write`
**kwargs: passed to `dxf.write`
@ -124,7 +133,7 @@ def writefile(
open_func = open
with open_func(path, mode='wt') as stream:
write(pattern, stream, *args, **kwargs)
write(top_name, library, stream, *args, **kwargs)
def readfile(
@ -156,7 +165,7 @@ def readfile(
def read(
stream: io.TextIOBase,
clean_vertices: bool = True,
) -> Tuple[Pattern, Dict[str, Any]]:
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
"""
Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are
translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s
@ -176,26 +185,20 @@ def read(
lib = ezdxf.read(stream)
msp = lib.modelspace()
pat = _read_block(msp, clean_vertices)
patterns = [pat] + [_read_block(bb, clean_vertices) for bb in lib.blocks if bb.name != '*Model_Space']
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0]]
del sp.identifier
npat = _read_block(msp, clean_vertices)
patterns_dict = dict([npat]
+ [_read_block(bb, clean_vertices) for bb in lib.blocks if bb.name != '*Model_Space'])
library_info = {
'layers': [ll.dxfattribs() for ll in lib.layers]
}
return pat, library_info
return patterns_dict, library_info
def _read_block(block, clean_vertices: bool) -> Pattern:
pat = Pattern(block.name)
def _read_block(block, clean_vertices: bool) -> Tuple[str, Pattern]:
name = block.name
pat = Pattern()
for element in block:
eltype = element.dxftype()
if eltype in ('POLYLINE', 'LWPOLYLINE'):
@ -258,12 +261,12 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
offset = numpy.array(attr.get('insert', (0, 0, 0)))[:2]
args = {
'target': (attr.get('name', None),),
'offset': offset,
'scale': scale,
'mirrored': mirrored,
'rotation': rotation,
'pattern': None,
'identifier': (attr.get('name', None),),
}
if 'column_count' in attr:
@ -274,7 +277,7 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
pat.subpatterns.append(SubPattern(**args))
else:
logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).')
return pat
return name, pat
def _subpatterns_to_refs(
@ -282,9 +285,9 @@ def _subpatterns_to_refs(
subpatterns: List[SubPattern],
) -> None:
for subpat in subpatterns:
if subpat.pattern is None:
if subpat.target is None:
continue
encoded_name = subpat.pattern.name
encoded_name = subpat.target
rotation = (subpat.rotation * 180 / numpy.pi) % 360
attribs = {
@ -360,18 +363,24 @@ def _mlayer2dxf(layer: layer_t) -> str:
def disambiguate_pattern_names(
patterns: Iterable[Pattern],
names: Iterable[str],
max_name_length: int = 32,
suffix_length: int = 6,
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
) -> None:
used_names = []
for pat in patterns:
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
) -> List[str]:
"""
Args:
names: List of pattern names to disambiguate
max_name_length: Names longer than this will be truncated
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
leave room for a suffix if one is necessary.
"""
new_names = []
for name in names:
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', name)
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
while suffixed_name in new_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
@ -380,17 +389,16 @@ def disambiguate_pattern_names(
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
if dup_warn_filter is None or dup_warn_filter(name):
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
if len(suffixed_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"')
raise PatternError(f'Zero-length name after sanitize,\n originally "{name}"')
if len(suffixed_name) > max_name_length:
raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)
+ f' originally "{name}"')
new_names.append(suffixed_name)
return new_names

View File

@ -53,18 +53,22 @@ path_cap_map = {
}
def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
return numpy.rint(val, dtype=numpy.int32, casting='unsafe')
def write(
patterns: Union[Pattern, Sequence[Pattern]],
library: Mapping[str, Pattern],
stream: BinaryIO,
meters_per_unit: float,
logical_units_per_unit: float = 1,
library_name: str = 'masque-klamath',
*,
modify_originals: bool = False,
disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
) -> None:
"""
Convert a `Pattern` or list of patterns to a GDSII stream, and then mapping data as follows:
Convert a library to a GDSII stream, mapping data as follows:
Pattern -> GDSII structure
SubPattern -> GDSII SREF or AREF
Path -> GSDII path
@ -85,7 +89,7 @@ def write(
prior to calling this function.
Args:
patterns: A Pattern or list of patterns to convert.
library: A {name: Pattern} mapping of patterns to write.
meters_per_unit: Written into the GDSII file, meters per (database) length unit.
All distances are assumed to be an integer multiple of this unit, and are stored as such.
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
@ -94,52 +98,48 @@ def write(
library_name: Library name written into the GDSII file.
Default 'masque-klamath'.
modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
process. Otherwise, a copy is made.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard as well as possible.
disambiguate_func: Function which takes a list of pattern names and returns a list of names
altered to be valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard reasonably well.
WARNING: No additional error checking is performed on the results.
"""
if isinstance(patterns, Pattern):
patterns = [patterns]
if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names # type: ignore
assert(disambiguate_func is not None) # placate mypy
disambiguate_func = disambiguate_pattern_names
if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
library = copy.deepcopy(library)
patterns = [p.wrap_repeated_shapes() for p in patterns]
for p in library.values():
library.add(p.wrap_repeated_shapes())
old_names = list(library.keys())
new_names = disambiguate_func(old_names)
renamed_lib = {new_name: library[old_name]
for old_name, new_name in zip(old_names, new_names)}
# Create library
header = klamath.library.FileHeader(name=library_name.encode('ASCII'),
user_units_per_db_unit=logical_units_per_unit,
meters_per_db_unit=meters_per_unit)
header = klamath.library.FileHeader(
name=library_name.encode('ASCII'),
user_units_per_db_unit=logical_units_per_unit,
meters_per_db_unit=meters_per_unit,
)
header.write(stream)
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
disambiguate_func(patterns_by_id.values())
# Now create a structure for each pattern, and add in any Boundary and SREF elements
for pat in patterns_by_id.values():
for name, pat in renamed_lib.items():
elements: List[klamath.elements.Element] = []
elements += _shapes_to_elements(pat.shapes)
elements += _labels_to_texts(pat.labels)
elements += _subpatterns_to_refs(pat.subpatterns)
klamath.library.write_struct(stream, name=pat.name.encode('ASCII'), elements=elements)
klamath.library.write_struct(stream, name=name.encode('ASCII'), elements=elements)
records.ENDLIB.write(stream, None)
def writefile(
patterns: Union[Sequence[Pattern], Pattern],
library: Mapping[str, Pattern],
filename: Union[str, pathlib.Path],
*args,
**kwargs,
@ -150,7 +150,7 @@ def writefile(
Will automatically compress the file if it has a .gz suffix.
Args:
patterns: `Pattern` or list of patterns to save
library: {name: Pattern} pairs to save.
filename: Filename to save to.
*args: passed to `write()`
**kwargs: passed to `write()`
@ -216,22 +216,14 @@ def read(
"""
library_info = _read_header(stream)
patterns = []
patterns_dict = {}
found_struct = records.BGNSTR.skip_past(stream)
while found_struct:
name = records.STRNAME.skip_and_read(stream)
pat = read_elements(stream, name=name.decode('ASCII'), raw_mode=raw_mode)
patterns.append(pat)
pat = read_elements(stream, raw_mode=raw_mode)
patterns_dict[name.decode('ASCII')] = pat
found_struct = records.BGNSTR.skip_past(stream)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0]]
del sp.identifier
return patterns_dict, library_info
@ -250,7 +242,6 @@ def _read_header(stream: BinaryIO) -> Dict[str, Any]:
def read_elements(
stream: BinaryIO,
name: str,
raw_mode: bool = True,
) -> Pattern:
"""
@ -265,7 +256,7 @@ def read_elements(
Returns:
A pattern containing the elements that were read.
"""
pat = Pattern(name)
pat = Pattern()
elements = klamath.library.read_elements(stream)
for element in elements:
@ -276,10 +267,12 @@ def read_elements(
path = _gpath_to_mpath(element, raw_mode)
pat.shapes.append(path)
elif isinstance(element, klamath.elements.Text):
label = Label(offset=element.xy.astype(float),
layer=element.layer,
string=element.string.decode('ASCII'),
annotations=_properties_to_annotations(element.properties))
label = Label(
offset=element.xy.astype(float),
layer=element.layer,
string=element.string.decode('ASCII'),
annotations=_properties_to_annotations(element.properties),
)
pat.labels.append(label)
elif isinstance(element, klamath.elements.Reference):
pat.subpatterns.append(_ref_to_subpat(element))
@ -304,8 +297,7 @@ def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]:
def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern:
"""
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None
and sets the instance .identifier to (struct_name,).
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.target to struct_name.
"""
xy = ref.xy.astype(float)
offset = xy[0]
@ -317,14 +309,15 @@ def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern:
repetition = Grid(a_vector=a_vector, b_vector=b_vector,
a_count=a_count, b_count=b_count)
subpat = SubPattern(pattern=None,
offset=offset,
rotation=numpy.deg2rad(ref.angle_deg),
scale=ref.mag,
mirrored=(ref.invert_y, False),
annotations=_properties_to_annotations(ref.properties),
repetition=repetition)
subpat.identifier = (ref.struct_name.decode('ASCII'),)
subpat = SubPattern(
pattern=ref.struct_name.decode('ASCII'),
offset=offset,
rotation=numpy.deg2rad(ref.angle_deg),
scale=ref.mag,
mirrored=(ref.invert_y, False),
annotations=_properties_to_annotations(ref.properties),
repetition=repetition,
)
return subpat
@ -334,34 +327,36 @@ def _gpath_to_mpath(gpath: klamath.library.Path, raw_mode: bool) -> Path:
else:
raise PatternError(f'Unrecognized path type: {gpath.path_type}')
mpath = Path(vertices=gpath.xy.astype(float),
layer=gpath.layer,
width=gpath.width,
cap=cap,
offset=numpy.zeros(2),
annotations=_properties_to_annotations(gpath.properties),
raw=raw_mode,
)
mpath = Path(
vertices=gpath.xy.astype(float),
layer=gpath.layer,
width=gpath.width,
cap=cap,
offset=numpy.zeros(2),
annotations=_properties_to_annotations(gpath.properties),
raw=raw_mode,
)
if cap == Path.Cap.SquareCustom:
mpath.cap_extensions = gpath.extension
return mpath
def _boundary_to_polygon(boundary: klamath.library.Boundary, raw_mode: bool) -> Polygon:
return Polygon(vertices=boundary.xy[:-1].astype(float),
layer=boundary.layer,
offset=numpy.zeros(2),
annotations=_properties_to_annotations(boundary.properties),
raw=raw_mode,
)
return Polygon(
vertices=boundary.xy[:-1].astype(float),
layer=boundary.layer,
offset=numpy.zeros(2),
annotations=_properties_to_annotations(boundary.properties),
raw=raw_mode,
)
def _subpatterns_to_refs(subpatterns: List[SubPattern]) -> List[klamath.library.Reference]:
refs = []
for subpat in subpatterns:
if subpat.pattern is None:
if subpat.target is None:
continue
encoded_name = subpat.pattern.name.encode('ASCII')
encoded_name = subpat.target.encode('ASCII')
# Note: GDS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
@ -377,32 +372,39 @@ def _subpatterns_to_refs(subpatterns: List[SubPattern]) -> List[klamath.library.
rep.a_vector * rep.a_count,
b_vector * b_count,
]
aref = klamath.library.Reference(struct_name=encoded_name,
xy=numpy.round(xy).astype(int),
colrow=(numpy.round(rep.a_count), numpy.round(rep.b_count)),
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties)
aref = klamath.library.Reference(
struct_name=encoded_name,
xy=rint_cast(xy),
colrow=(numpy.rint(rep.a_count), numpy.rint(rep.b_count)),
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties,
)
refs.append(aref)
elif rep is None:
ref = klamath.library.Reference(struct_name=encoded_name,
xy=numpy.round([subpat.offset]).astype(int),
colrow=None,
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties)
ref = klamath.library.Reference(
struct_name=encoded_name,
xy=rint_cast([subpat.offset]),
colrow=None,
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties,
)
refs.append(ref)
else:
new_srefs = [klamath.library.Reference(struct_name=encoded_name,
xy=numpy.round([subpat.offset + dd]).astype(int),
colrow=None,
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties)
for dd in rep.displacements]
new_srefs = [
klamath.library.Reference(
struct_name=encoded_name,
xy=rint_cast([subpat.offset + dd]),
colrow=None,
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties,
)
for dd in rep.displacements]
refs += new_srefs
return refs
@ -443,8 +445,8 @@ def _shapes_to_elements(
layer, data_type = _mlayer2gds(shape.layer)
properties = _annotations_to_properties(shape.annotations, 128)
if isinstance(shape, Path) and not polygonize_paths:
xy = numpy.round(shape.vertices + shape.offset).astype(int)
width = numpy.round(shape.width).astype(int)
xy = rint_cast(shape.vertices + shape.offset)
width = rint_cast(shape.width)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension: Tuple[int, int]
@ -453,30 +455,36 @@ def _shapes_to_elements(
else:
extension = (0, 0)
path = klamath.elements.Path(layer=(layer, data_type),
xy=xy,
path_type=path_type,
width=width,
extension=extension,
properties=properties)
path = klamath.elements.Path(
layer=(layer, data_type),
xy=xy,
path_type=path_type,
width=width,
extension=extension,
properties=properties,
)
elements.append(path)
elif isinstance(shape, Polygon):
polygon = shape
xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32)
numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe')
xy_closed[-1] = xy_closed[0]
boundary = klamath.elements.Boundary(layer=(layer, data_type),
xy=xy_closed,
properties=properties)
boundary = klamath.elements.Boundary(
layer=(layer, data_type),
xy=xy_closed,
properties=properties,
)
elements.append(boundary)
else:
for polygon in shape.to_polygons():
xy_closed = numpy.empty((polygon.vertices.shape[0] + 1, 2), dtype=numpy.int32)
numpy.rint(polygon.vertices + polygon.offset, out=xy_closed[:-1], casting='unsafe')
xy_closed[-1] = xy_closed[0]
boundary = klamath.elements.Boundary(layer=(layer, data_type),
xy=xy_closed,
properties=properties)
boundary = klamath.elements.Boundary(
layer=(layer, data_type),
xy=xy_closed,
properties=properties,
)
elements.append(boundary)
return elements
@ -486,46 +494,44 @@ def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]:
for label in labels:
properties = _annotations_to_properties(label.annotations, 128)
layer, text_type = _mlayer2gds(label.layer)
xy = numpy.round([label.offset]).astype(int)
text = klamath.elements.Text(layer=(layer, text_type),
xy=xy,
string=label.string.encode('ASCII'),
properties=properties,
presentation=0, # TODO maybe set some of these?
angle_deg=0,
invert_y=False,
width=0,
path_type=0,
mag=1)
xy = rint_cast([label.offset])
text = klamath.elements.Text(
layer=(layer, text_type),
xy=xy,
string=label.string.encode('ASCII'),
properties=properties,
presentation=0, # TODO maybe set some of these?
angle_deg=0,
invert_y=False,
width=0,
path_type=0,
mag=1,
)
texts.append(text)
return texts
def disambiguate_pattern_names(
patterns: Sequence[Pattern],
names: Iterable[str],
max_name_length: int = 32,
suffix_length: int = 6,
dup_warn_filter: Optional[Callable[[str], bool]] = None,
) -> None:
) -> List[str]:
"""
Args:
patterns: List of patterns to disambiguate
names: List of pattern names to disambiguate
max_name_length: Names longer than this will be truncated
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
leave room for a suffix if one is necessary.
dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives
the cell name and returns `False` if the warning should be suppressed and `True` if it should
be displayed. Default displays all warnings.
"""
used_names = []
for pat in set(patterns):
new_names = []
for name in names:
# Shorten names which already exceed max-length
if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
if len(name) > max_name_length:
shortened_name = name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{name}" is too long ({len(name)}/{max_name_length} chars),\n'
+ f' shortening to "{shortened_name}" before generating suffix')
else:
shortened_name = pat.name
shortened_name = name
# Remove invalid characters
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
@ -533,7 +539,7 @@ def disambiguate_pattern_names(
# Add a suffix that makes the name unique
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
while suffixed_name in new_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
@ -542,27 +548,25 @@ def disambiguate_pattern_names(
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
if dup_warn_filter is None or dup_warn_filter(name):
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII')
if len(encoded_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{name}"')
if len(encoded_name) > max_name_length:
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
+ f' originally "{name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)
new_names.append(suffixed_name)
return new_names
def load_library(
stream: BinaryIO,
tag: str,
is_secondary: Optional[Callable[[str], bool]] = None,
*,
full_load: bool = False,
) -> Tuple[Library, Dict[str, Any]]:
@ -574,28 +578,17 @@ def load_library(
Args:
stream: Seekable stream. Position 0 should be the start of the file.
The caller should leave the stream open while the library
is still in use, since the library will need to access it
in order to read the structure contents.
tag: Unique identifier that will be used to identify this data source
is_secondary: Function which takes a structure name and returns
True if the structure should only be used as a subcell
and not appear in the main Library interface.
Default always returns False.
The caller should leave the stream open while the library
is still in use, since the library will need to access it
in order to read the structure contents.
full_load: If True, force all structures to be read immediately rather
than as-needed. Since data is read sequentially from the file,
this will be faster than using the resulting library's
`precache` method.
than as-needed. Since data is read sequentially from the file, this
will be faster than using the resulting library's `precache` method.
Returns:
Library object, allowing for deferred load of structures.
Additional library info (dict, same format as from `read`).
"""
if is_secondary is None:
def is_secondary(k: str) -> bool:
return False
assert(is_secondary is not None)
stream.seek(0)
lib = Library()
@ -603,7 +596,7 @@ def load_library(
# Full load approach (immediately load everything)
patterns, library_info = read(stream)
for name, pattern in patterns.items():
lib.set_const(name, tag, pattern, secondary=is_secondary(name))
lib[name] = lambda: pattern
return lib, library_info
# Normal approach (scan and defer load)
@ -613,19 +606,17 @@ def load_library(
for name_bytes, pos in structs.items():
name = name_bytes.decode('ASCII')
def mkstruct(pos: int = pos, name: str = name) -> Pattern:
def mkstruct(pos: int = pos) -> Pattern:
stream.seek(pos)
return read_elements(stream, name, raw_mode=True)
return read_elements(stream, raw_mode=True)
lib.set_value(name, tag, mkstruct, secondary=is_secondary(name))
lib[name] = mkstruct
return lib, library_info
def load_libraryfile(
filename: Union[str, pathlib.Path],
tag: str,
is_secondary: Optional[Callable[[str], bool]] = None,
*,
use_mmap: bool = True,
full_load: bool = False,
@ -640,8 +631,6 @@ def load_libraryfile(
Args:
path: filename or path to read from
tag: Unique identifier for library, see `load_library`
is_secondary: Function specifying subcess, see `load_library`
use_mmap: If `True`, will attempt to memory-map the file instead
of buffering. In the case of gzipped files, the file
is decompressed into a python `bytes` object in memory
@ -667,4 +656,4 @@ def load_libraryfile(
stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ)
else:
stream = io.BufferedReader(base_stream)
return load_library(stream, tag, is_secondary)
return load_library(stream, full_load=full_load)

View File

@ -11,7 +11,7 @@ Note that OASIS references follow the same convention as `masque`,
Scaling, rotation, and mirroring apply to individual instances, not grid
vectors or offsets.
"""
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Mapping, Optional
import re
import io
import copy
@ -22,11 +22,12 @@ import pathlib
import gzip
import numpy
from numpy.typing import ArrayLike, NDArray
import fatamorgana
import fatamorgana.records as fatrec
from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference
from .utils import clean_pattern_vertices, is_gzipped
from .utils import is_gzipped
from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path, Circle
from ..repetition import Grid, Arbitrary, Repetition
@ -47,19 +48,22 @@ path_cap_map = {
#TODO implement more shape types?
def rint_cast(val: ArrayLike) -> NDArray[numpy.int64]:
return numpy.rint(val, dtype=numpy.int64, casting='unsafe')
def build(
patterns: Union[Pattern, Sequence[Pattern]],
library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
units_per_micron: int,
layer_map: Optional[Dict[str, Union[int, Tuple[int, int]]]] = None,
*,
modify_originals: bool = False,
disambiguate_func: Optional[Callable[[Iterable[Pattern]], None]] = None,
disambiguate_func: Optional[Callable[[Iterable[str]], List[str]]] = None,
annotations: Optional[annotations_t] = None,
) -> fatamorgana.OasisLayout:
"""
Convert a `Pattern` or list of patterns to an OASIS stream, writing patterns
as OASIS cells, subpatterns as Placement records, and other shapes and labels
mapped to equivalent record types (Polygon, Path, Circle, Text).
Convert a collection of {name: Pattern} pairs to an OASIS stream, writing patterns
as OASIS cells, subpatterns as Placement records, and mapping other shapes and labels
to equivalent record types (Polygon, Path, Circle, Text).
Other shape types may be converted to polygons if no equivalent
record type exists (or is not implemented here yet).
@ -75,7 +79,7 @@ def build(
prior to calling this function.
Args:
patterns: A Pattern or list of patterns to convert.
library: A {name: Pattern} mapping of patterns to write.
units_per_micron: Written into the OASIS file, number of grid steps per micrometer.
All distances are assumed to be an integer multiple of the grid step, and are stored as such.
layer_map: Dictionary which translates layer names into layer numbers. If this argument is
@ -86,11 +90,8 @@ def build(
into numbers, omit this argument, and manually generate the required
`fatamorgana.records.LayerName` entries.
Default is an empty dict (no names provided).
modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`.
disambiguate_func: Function which takes a list of pattern names and returns a list of names
altered to be valid and unique. Default is `disambiguate_pattern_names`.
annotations: dictionary of key-value pairs which are saved as library-level properties
Returns:
@ -108,9 +109,6 @@ def build(
if annotations is None:
annotations = {}
if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
# Create library
lib = fatamorgana.OasisLayout(unit=units_per_micron, validation=None)
lib.properties = annotations_to_properties(annotations)
@ -119,10 +117,12 @@ def build(
for name, layer_num in layer_map.items():
layer, data_type = _mlayer2oas(layer_num)
lib.layers += [
fatrec.LayerName(nstring=name,
layer_interval=(layer, layer),
type_interval=(data_type, data_type),
is_textlayer=tt)
fatrec.LayerName(
nstring=name,
layer_interval=(layer, layer),
type_interval=(data_type, data_type),
is_textlayer=tt,
)
for tt in (True, False)]
def layer2oas(mlayer: layer_t) -> Tuple[int, int]:
@ -132,17 +132,14 @@ def build(
else:
layer2oas = _mlayer2oas
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
disambiguate_func(patterns_by_id.values())
old_names = list(library.keys())
new_names = disambiguate_func(old_names)
renamed_lib = {new_name: library[old_name]
for old_name, new_name in zip(old_names, new_names)}
# Now create a structure for each pattern
for pat in patterns_by_id.values():
structure = fatamorgana.Cell(name=pat.name)
for name, pat in renamed_lib.items():
structure = fatamorgana.Cell(name=name)
lib.cells.append(structure)
structure.properties += annotations_to_properties(pat.annotations)
@ -229,7 +226,6 @@ def readfile(
def read(
stream: io.BufferedIOBase,
clean_vertices: bool = True,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
"""
Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are
@ -243,9 +239,6 @@ def read(
Args:
stream: Stream to read from.
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
The cleaning process removes any polygons with zero area or <3 vertices.
Default `True`.
Returns:
- Dict of `pattern_name`:`Pattern`s generated from OASIS cells
@ -264,14 +257,14 @@ def read(
layer_map[str(layer_name.nstring)] = layer_name
library_info['layer_map'] = layer_map
patterns = []
patterns_dict = {}
for cell in lib.cells:
if isinstance(cell.name, int):
cell_name = lib.cellnames[cell.name].nstring.string
else:
cell_name = cell.name.string
pat = Pattern(name=cell_name)
pat = Pattern()
for element in cell.geometry:
if isinstance(element, fatrec.XElement):
logger.warning('Skipping XElement record')
@ -450,19 +443,7 @@ def read(
for placement in cell.placements:
pat.subpatterns.append(_placement_to_subpat(placement, lib))
if clean_vertices:
clean_pattern_vertices(pat)
patterns.append(pat)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
ident = sp.identifier[0]
name = ident if isinstance(ident, str) else lib.cellnames[ident].nstring.string
sp.pattern = patterns_dict[name]
del sp.identifier
patterns_dict[name] = pat
return patterns_dict, library_info
@ -486,8 +467,7 @@ def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]:
def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayout) -> SubPattern:
"""
Helper function to create a SubPattern from a placment. Sets subpat.pattern to None
and sets the instance .identifier to (struct_name,).
Helper function to create a SubPattern from a placment. Sets subpat.target to the placemen name.
"""
assert(not isinstance(placement.repetition, fatamorgana.ReuseRepetition))
xy = numpy.array((placement.x, placement.y))
@ -499,14 +479,15 @@ def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayo
rotation = 0
else:
rotation = numpy.deg2rad(float(placement.angle))
subpat = SubPattern(offset=xy,
pattern=None,
mirrored=(placement.flip, False),
rotation=rotation,
scale=float(mag),
identifier=(name,),
repetition=repetition_fata2masq(placement.repetition),
annotations=annotations)
subpat = SubPattern(
target=name,
offset=xy,
mirrored=(placement.flip, False),
rotation=rotation,
scale=float(mag),
repetition=repetition_fata2masq(placement.repetition),
annotations=annotations,
)
return subpat
@ -515,17 +496,17 @@ def _subpatterns_to_placements(
) -> List[fatrec.Placement]:
refs = []
for subpat in subpatterns:
if subpat.pattern is None:
if subpat.target is None:
continue
# Note: OASIS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
frep, rep_offset = repetition_masq2fata(subpat.repetition)
offset = numpy.round(subpat.offset + rep_offset).astype(int)
offset = rint_cast(subpat.offset + rep_offset)
angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360
ref = fatrec.Placement(
name=subpat.pattern.name,
name=subpat.target,
flip=mirror_across_x,
angle=angle,
magnification=subpat.scale,
@ -549,46 +530,51 @@ def _shapes_to_elements(
repetition, rep_offset = repetition_masq2fata(shape.repetition)
properties = annotations_to_properties(shape.annotations)
if isinstance(shape, Circle):
offset = numpy.round(shape.offset + rep_offset).astype(int)
radius = numpy.round(shape.radius).astype(int)
circle = fatrec.Circle(layer=layer,
datatype=datatype,
radius=radius,
x=offset[0],
y=offset[1],
properties=properties,
repetition=repetition)
offset = rint_cast(shape.offset + rep_offset)
radius = rint_cast(shape.radius)
circle = fatrec.Circle(
layer=layer,
datatype=datatype,
radius=radius,
x=offset[0],
y=offset[1],
properties=properties,
repetition=repetition,
)
elements.append(circle)
elif isinstance(shape, Path):
xy = numpy.round(shape.offset + shape.vertices[0] + rep_offset).astype(int)
deltas = numpy.round(numpy.diff(shape.vertices, axis=0)).astype(int)
half_width = numpy.round(shape.width / 2).astype(int)
xy = rint_cast(shape.offset + shape.vertices[0] + rep_offset)
deltas = rint_cast(numpy.diff(shape.vertices, axis=0))
half_width = rint_cast(shape.width / 2)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension_start = (path_type, shape.cap_extensions[0] if shape.cap_extensions is not None else None)
extension_end = (path_type, shape.cap_extensions[1] if shape.cap_extensions is not None else None)
path = fatrec.Path(layer=layer,
datatype=datatype,
point_list=deltas,
half_width=half_width,
x=xy[0],
y=xy[1],
extension_start=extension_start, # TODO implement multiple cap types?
extension_end=extension_end,
properties=properties,
repetition=repetition,
)
path = fatrec.Path(
layer=layer,
datatype=datatype,
point_list=deltas,
half_width=half_width,
x=xy[0],
y=xy[1],
extension_start=extension_start, # TODO implement multiple cap types?
extension_end=extension_end,
properties=properties,
repetition=repetition,
)
elements.append(path)
else:
for polygon in shape.to_polygons():
xy = numpy.round(polygon.offset + polygon.vertices[0] + rep_offset).astype(int)
points = numpy.round(numpy.diff(polygon.vertices, axis=0)).astype(int)
elements.append(fatrec.Polygon(layer=layer,
datatype=datatype,
x=xy[0],
y=xy[1],
point_list=points,
properties=properties,
repetition=repetition))
xy = rint_cast(polygon.offset + polygon.vertices[0] + rep_offset)
points = rint_cast(numpy.diff(polygon.vertices, axis=0))
elements.append(fatrec.Polygon(
layer=layer,
datatype=datatype,
x=xy[0],
y=xy[1],
point_list=points,
properties=properties,
repetition=repetition,
))
return elements
@ -600,29 +586,31 @@ def _labels_to_texts(
for label in labels:
layer, datatype = layer2oas(label.layer)
repetition, rep_offset = repetition_masq2fata(label.repetition)
xy = numpy.round(label.offset + rep_offset).astype(int)
xy = rint_cast(label.offset + rep_offset)
properties = annotations_to_properties(label.annotations)
texts.append(fatrec.Text(layer=layer,
datatype=datatype,
x=xy[0],
y=xy[1],
string=label.string,
properties=properties,
repetition=repetition))
texts.append(fatrec.Text(
layer=layer,
datatype=datatype,
x=xy[0],
y=xy[1],
string=label.string,
properties=properties,
repetition=repetition,
))
return texts
def disambiguate_pattern_names(
patterns,
names: Iterable[str],
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
) -> None:
used_names = []
for pat in patterns:
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
) -> List[str]:
new_names = []
for name in names:
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', name)
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
while suffixed_name in new_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
@ -631,16 +619,16 @@ def disambiguate_pattern_names(
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
if dup_warn_filter is None or dup_warn_filter(name):
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
if len(suffixed_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)
new_names.append(suffixed_name)
return new_names
def repetition_fata2masq(

View File

@ -18,7 +18,7 @@ Notes:
* GDS does not support library- or structure-level annotations
"""
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
from typing import Sequence
from typing import Sequence, Mapping
import re
import io
import copy
@ -59,13 +59,13 @@ def rint_cast(val: ArrayLike) -> NDArray[numpy.int32]:
def build(
patterns: Union[Pattern, Sequence[Pattern]],
library: Mapping[str, Pattern],
meters_per_unit: float,
logical_units_per_unit: float = 1,
library_name: str = 'masque-gdsii-write',
*,
modify_originals: bool = False,
disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
) -> gdsii.library.Library:
"""
Convert a `Pattern` or list of patterns to a GDSII stream, by first calling
@ -86,7 +86,7 @@ def build(
prior to calling this function.
Args:
patterns: A Pattern or list of patterns to convert.
library: A {name: Pattern} mapping of patterns to write.
meters_per_unit: Written into the GDSII file, meters per (database) length unit.
All distances are assumed to be an integer multiple of this unit, and are stored as such.
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
@ -95,27 +95,29 @@ def build(
library_name: Library name written into the GDSII file.
Default 'masque-gdsii-write'.
modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
process. Otherwise, a copy is made.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard as well as possible.
disambiguate_func: Function which takes a list of pattern names and returns a list of names
altered to be valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard reasonably well.
WARNING: No additional error checking is performed on the results.
Returns:
`gdsii.library.Library`
"""
if isinstance(patterns, Pattern):
patterns = [patterns]
if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names # type: ignore
assert(disambiguate_func is not None) # placate mypy
disambiguate_func = disambiguate_pattern_names
if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
library = copy.deepcopy(library)
patterns = [p.wrap_repeated_shapes() for p in patterns]
for p in library.values():
library.add(p.wrap_repeated_shapes())
old_names = list(library.keys())
new_names = disambiguate_func(old_names)
renamed_lib = {new_name: library[old_name]
for old_name, new_name in zip(old_names, new_names)}
# Create library
lib = gdsii.library.Library(version=600,
@ -123,17 +125,9 @@ def build(
logical_unit=logical_units_per_unit,
physical_unit=meters_per_unit)
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
disambiguate_func(patterns_by_id.values())
# Now create a structure for each pattern, and add in any Boundary and SREF elements
for pat in patterns_by_id.values():
structure = gdsii.structure.Structure(name=pat.name.encode('ASCII'))
for name, pat in renamed_lib.items():
structure = gdsii.structure.Structure(name=name.encode('ASCII'))
lib.append(structure)
structure += _shapes_to_elements(pat.shapes)
@ -144,7 +138,7 @@ def build(
def write(
patterns: Union[Pattern, Sequence[Pattern]],
library: Mapping[str, Pattern],
stream: io.BufferedIOBase,
*args,
**kwargs,
@ -154,31 +148,31 @@ def write(
See `masque.file.gdsii.build()` for details.
Args:
patterns: A Pattern or list of patterns to write to file.
library: A {name: Pattern} mapping of patterns to write.
stream: Stream to write to.
*args: passed to `masque.file.gdsii.build()`
**kwargs: passed to `masque.file.gdsii.build()`
"""
lib = build(patterns, *args, **kwargs)
lib = build(library, *args, **kwargs)
lib.save(stream)
return
def writefile(
patterns: Union[Sequence[Pattern], Pattern],
library: Mapping[str, Pattern],
filename: Union[str, pathlib.Path],
*args,
**kwargs,
) -> None:
"""
Wrapper for `masque.file.gdsii.write()` that takes a filename or path instead of a stream.
Wrapper for `write()` that takes a filename or path instead of a stream.
Will automatically compress the file if it has a .gz suffix.
Args:
patterns: `Pattern` or list of patterns to save
library: {name: Pattern} pairs to save.
filename: Filename to save to.
*args: passed to `masque.file.gdsii.write`
**kwargs: passed to `masque.file.gdsii.write`
*args: passed to `write()`
**kwargs: passed to `write()`
"""
path = pathlib.Path(filename)
if path.suffix == '.gz':
@ -196,14 +190,14 @@ def readfile(
**kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
"""
Wrapper for `masque.file.gdsii.read()` that takes a filename or path instead of a stream.
Wrapper for `read()` that takes a filename or path instead of a stream.
Will automatically decompress gzipped files.
Args:
filename: Filename to save to.
*args: passed to `masque.file.gdsii.read`
**kwargs: passed to `masque.file.gdsii.read`
*args: passed to `read()`
**kwargs: passed to `read()`
"""
path = pathlib.Path(filename)
if is_gzipped(path):
@ -251,9 +245,10 @@ def read(
raw_mode = True # Whether to construct shapes in raw mode (less error checking)
patterns = []
patterns_dict = {}
for structure in lib:
pat = Pattern(name=structure.name.decode('ASCII'))
pat = Pattern()
name=structure.name.decode('ASCII')
for element in structure:
# Switch based on element type:
if isinstance(element, gdsii.elements.Boundary):
@ -275,15 +270,7 @@ def read(
if clean_vertices:
clean_pattern_vertices(pat)
patterns.append(pat)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0].decode('ASCII')]
del sp.identifier
patterns_dict[name] = pat
return patterns_dict, library_info
@ -309,8 +296,7 @@ def _ref_to_subpat(
gdsii.elements.ARef]
) -> SubPattern:
"""
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None
and sets the instance .identifier to (struct_name,).
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.target to struct_name.
NOTE: "Absolute" means not affected by parent elements.
That's not currently supported by masque at all (and not planned).
@ -351,7 +337,6 @@ def _ref_to_subpat(
mirrored=(mirror_across_x, False),
annotations=_properties_to_annotations(element.properties),
repetition=repetition)
subpat.identifier = (element.struct_name,)
return subpat
@ -395,9 +380,9 @@ def _subpatterns_to_refs(
) -> List[Union[gdsii.elements.ARef, gdsii.elements.SRef]]:
refs = []
for subpat in subpatterns:
if subpat.pattern is None:
if subpat.target is None:
continue
encoded_name = subpat.pattern.name.encode('ASCII')
encoded_name = subpat.target.encode('ASCII')
# Note: GDS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
@ -523,14 +508,14 @@ def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
def disambiguate_pattern_names(
patterns: Sequence[Pattern],
names: Iterable[str],
max_name_length: int = 32,
suffix_length: int = 6,
dup_warn_filter: Optional[Callable[[str], bool]] = None,
) -> None:
) -> List[str]:
"""
Args:
patterns: List of patterns to disambiguate
names: List of pattern names to disambiguate
max_name_length: Names longer than this will be truncated
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
leave room for a suffix if one is necessary.
@ -538,15 +523,15 @@ def disambiguate_pattern_names(
the cell name and returns `False` if the warning should be suppressed and `True` if it should
be displayed. Default displays all warnings.
"""
used_names = []
for pat in set(patterns):
new_names = []
for name in names:
# Shorten names which already exceed max-length
if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
if len(name) > max_name_length:
shortened_name = name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{name}" is too long ({len(name)}/{max_name_length} chars),\n'
+ f' shortening to "{shortened_name}" before generating suffix')
else:
shortened_name = pat.name
shortened_name = name
# Remove invalid characters
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
@ -554,7 +539,7 @@ def disambiguate_pattern_names(
# Add a suffix that makes the name unique
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
while suffixed_name in new_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
@ -563,18 +548,19 @@ def disambiguate_pattern_names(
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
if dup_warn_filter is None or dup_warn_filter(name):
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII')
if len(encoded_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{name}"')
if len(encoded_name) > max_name_length:
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
+ f' originally "{name}"')
new_names.append(suffixed_name)
return new_names
pat.name = suffixed_name
used_names.append(suffixed_name)

View File

@ -1,7 +1,7 @@
"""
SVG file format readers and writers
"""
from typing import Dict, Optional
from typing import Dict, Optional, Mapping
import warnings
import numpy
@ -13,7 +13,8 @@ from .. import Pattern
def writefile(
pattern: Pattern,
library: Mapping[str, Pattern],
top: str,
filename: str,
custom_attributes: bool = False,
) -> None:
@ -41,11 +42,12 @@ def writefile(
custom_attributes: Whether to write non-standard `pattern_layer` and
`pattern_dose` attributes to the SVG elements.
"""
pattern = library[top]
# Polygonize pattern
pattern.polygonize()
bounds = pattern.get_bounds()
bounds = pattern.get_bounds(library=library)
if bounds is None:
bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]])
warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox')
@ -59,15 +61,10 @@ def writefile(
svg = svgwrite.Drawing(filename, profile='full', viewBox=viewbox_string,
debug=(not custom_attributes))
# Get a dict of id(pattern) -> pattern
patterns_by_id = {**(pattern.referenced_patterns_by_id()), id(pattern): pattern} # type: Dict[int, Optional[Pattern]]
# Now create a group for each row in sd_table (ie, each pattern + dose combination)
# and add in any Boundary and Use elements
for pat in patterns_by_id.values():
if pat is None:
continue
svg_group = svg.g(id=mangle_name(pat), fill='blue', stroke='red')
for name, pat in library.items():
svg_group = svg.g(id=mangle_name(name), fill='blue', stroke='red')
for shape in pat.shapes:
for polygon in shape.to_polygons():
@ -81,20 +78,24 @@ def writefile(
svg_group.add(path)
for subpat in pat.subpatterns:
if subpat.pattern is None:
if subpat.target is None:
continue
transform = f'scale({subpat.scale:g}) rotate({subpat.rotation:g}) translate({subpat.offset[0]:g},{subpat.offset[1]:g})'
use = svg.use(href='#' + mangle_name(subpat.pattern), transform=transform)
use = svg.use(href='#' + mangle_name(subpat.target), transform=transform)
if custom_attributes:
use['pattern_dose'] = subpat.dose
svg_group.add(use)
svg.defs.add(svg_group)
svg.add(svg.use(href='#' + mangle_name(pattern)))
svg.add(svg.use(href='#' + mangle_name(top)))
svg.save()
def writefile_inverted(pattern: Pattern, filename: str):
def writefile_inverted(
library: Mapping[str, Pattern],
top: str,
filename: str,
) -> None:
"""
Write an inverted Pattern to an SVG file, by first calling `.polygonize()` and
`.flatten()` on it to change the shapes into polygons, then drawing a bounding
@ -110,10 +111,12 @@ def writefile_inverted(pattern: Pattern, filename: str):
pattern: Pattern to write to file. Modified by this function.
filename: Filename to write to.
"""
pattern = library[top]
# Polygonize and flatten pattern
pattern.polygonize().flatten()
bounds = pattern.get_bounds()
bounds = pattern.get_bounds(library=library)
if bounds is None:
bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]])
warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox')

View File

@ -1,7 +1,7 @@
"""
Helper functions for file reading and writing
"""
from typing import Set, Tuple, List
from typing import Set, Tuple, List, Iterable, Mapping
import re
import copy
import pathlib
@ -10,19 +10,22 @@ from .. import Pattern, PatternError
from ..shapes import Polygon, Path
def mangle_name(pattern: Pattern, dose_multiplier: float = 1.0) -> str:
def mangle_name(name: str, dose_multiplier: float = 1.0) -> str:
"""
Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier.
Create a new name using `name` and the `dose_multiplier`.
Args:
pattern: Pattern whose name we want to mangle.
name: Name we want to mangle.
dose_multiplier: Dose multiplier to mangle with.
Returns:
Mangled name.
"""
if dose_multiplier == 1:
full_name = name
else:
full_name = f'{name}_dm{dose_multiplier}'
expression = re.compile(r'[^A-Za-z0-9_\?\$]')
full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern))
sanitized_name = expression.sub('_', full_name)
return sanitized_name
@ -51,25 +54,30 @@ def clean_pattern_vertices(pat: Pattern) -> Pattern:
return pat
def make_dose_table(patterns: List[Pattern], dose_multiplier: float = 1.0) -> Set[Tuple[int, float]]:
def make_dose_table(
top_names: Iterable[str],
library: Mapping[str, Pattern],
dose_multiplier: float = 1.0,
) -> Set[Tuple[int, float]]:
"""
Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns)
Create a set containing `(name, written_dose)` for each pattern (including subpatterns)
Args:
top_names: Names of all topcells
pattern: Source Patterns.
dose_multiplier: Multiplier for all written_dose entries.
Returns:
`{(id(subpat.pattern), written_dose), ...}`
`{(name, written_dose), ...}`
"""
dose_table = {(id(pattern), dose_multiplier) for pattern in patterns}
for pattern in patterns:
dose_table = {(top_name, dose_multiplier) for top_name in top_names}
for name, pattern in library.items():
for subpat in pattern.subpatterns:
if subpat.pattern is None:
if subpat.target is None:
continue
subpat_dose_entry = (id(subpat.pattern), subpat.dose * dose_multiplier)
subpat_dose_entry = (subpat.target, subpat.dose * dose_multiplier)
if subpat_dose_entry not in dose_table:
subpat_dose_table = make_dose_table([subpat.pattern], subpat.dose * dose_multiplier)
subpat_dose_table = make_dose_table(subpat.target, library, subpat.dose * dose_multiplier)
dose_table = dose_table.union(subpat_dose_table)
return dose_table
@ -96,7 +104,7 @@ def dtype2dose(pattern: Pattern) -> Pattern:
def dose2dtype(
patterns: List[Pattern],
library: List[Pattern],
) -> Tuple[List[Pattern], List[float]]:
"""
For each shape in each pattern, set shape.layer to the tuple
@ -119,21 +127,16 @@ def dose2dtype(
dose_list: A list of doses, providing a mapping between datatype (int, list index)
and dose (float, list entry).
"""
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
logger.warning('TODO: dose2dtype() needs to be tested!')
# Get a table of (id(pat), written_dose) for each pattern and subpattern
sd_table = make_dose_table(patterns)
sd_table = make_dose_table(library.find_topcells(), library)
# Figure out all the unique doses necessary to write this pattern
# This means going through each row in sd_table and adding the dose values needed to write
# that subpattern at that dose level
dose_vals = set()
for pat_id, pat_dose in sd_table:
pat = patterns_by_id[pat_id]
for name, pat_dose in sd_table:
pat = library[name]
for shape in pat.shapes:
dose_vals.add(shape.dose * pat_dose)
@ -144,21 +147,22 @@ def dose2dtype(
# Create a new pattern for each non-1-dose entry in the dose table
# and update the shapes to reflect their new dose
new_pats = {} # (id, dose) -> new_pattern mapping
for pat_id, pat_dose in sd_table:
new_names = {} # {(old name, dose): new name} mapping
new_lib = {} # {new_name: new_pattern} mapping
for name, pat_dose in sd_table:
mangled_name = mangle_name(name, pat_dose)
new_names[(name, pat_dose)] = mangled_name
old_pat = library[name]
if pat_dose == 1:
new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id]
new_lib[mangled_name] = old_pat
continue
old_pat = patterns_by_id[pat_id]
pat = old_pat.copy() # keep old subpatterns
pat.shapes = copy.deepcopy(old_pat.shapes)
pat.labels = copy.deepcopy(old_pat.labels)
pat = old_pat.deepcopy()
encoded_name = mangle_name(pat, pat_dose)
if len(encoded_name) == 0:
raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name))
pat.name = encoded_name
raise PatternError('Zero-length name after mangle+encode, originally "{name}"'.format(pat.name))
for shape in pat.shapes:
data_type = dose_vals_list.index(shape.dose * pat_dose)
@ -169,15 +173,9 @@ def dose2dtype(
else:
raise PatternError(f'Invalid layer for gdsii: {shape.layer}')
new_pats[(pat_id, pat_dose)] = pat
new_lib[mangled_name] = pat
# Go back through all the dose-specific patterns and fix up their subpattern entries
for (pat_id, pat_dose), pat in new_pats.items():
for subpat in pat.subpatterns:
dose_mult = subpat.dose * pat_dose
subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)]
return patterns, dose_vals_list
return new_lib, dose_vals_list
def is_gzipped(path: pathlib.Path) -> bool:

View File

@ -6,14 +6,14 @@ from numpy.typing import ArrayLike, NDArray
from .repetition import Repetition
from .utils import rotation_matrix_2d, layer_t, AutoSlots, annotations_t
from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl
from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, RepeatableImpl
from .traits import AnnotatableImpl
L = TypeVar('L', bound='Label')
class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, AnnotatableImpl,
class Label(PositionableImpl, LayerableImpl, RepeatableImpl, AnnotatableImpl,
Pivotable, Copyable, metaclass=AutoSlots):
"""
A text annotation with a position and layer (but no size; it is not drawn)
@ -49,32 +49,28 @@ class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, Annot
layer: layer_t = 0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
identifier: Tuple = (),
) -> None:
LockableImpl.unlock(self)
self.identifier = identifier
self.string = string
self.offset = numpy.array(offset, dtype=float, copy=True)
self.layer = layer
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.set_locked(locked)
def __copy__(self: L) -> L:
return type(self)(string=self.string,
offset=self.offset.copy(),
layer=self.layer,
repetition=self.repetition,
locked=self.locked,
identifier=self.identifier)
return type(self)(
string=self.string,
offset=self.offset.copy(),
layer=self.layer,
repetition=self.repetition,
identifier=self.identifier,
)
def __deepcopy__(self: L, memo: Dict = None) -> L:
memo = {} if memo is None else memo
new = copy.copy(self)
LockableImpl.unlock(new)
new._offset = self._offset.copy()
new.set_locked(self.locked)
return new
def rotate_around(self: L, pivot: ArrayLike, rotation: float) -> L:
@ -106,17 +102,3 @@ class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, Annot
Bounds [[xmin, xmax], [ymin, ymax]]
"""
return numpy.array([self.offset, self.offset])
def lock(self: L) -> L:
PositionableImpl._lock(self)
LockableImpl.lock(self)
return self
def unlock(self: L) -> L:
LockableImpl.unlock(self)
PositionableImpl._unlock(self)
return self
def __repr__(self) -> str:
locked = ' L' if self.locked else ''
return f'<Label "{self.string}" l{self.layer} o{self.offset}{locked}>'

594
masque/library.py Normal file
View File

@ -0,0 +1,594 @@
"""
Library class for managing unique name->pattern mappings and
deferred loading or creation.
"""
from typing import List, Dict, Callable, TypeVar, Type, TYPE_CHECKING
from typing import Any, Tuple, Union, Iterator, Mapping, MutableMapping, Set, Optional, Sequence
import logging
import copy
import base64
import struct
import re
from pprint import pformat
from collections import defaultdict
import numpy
from numpy.typing import ArrayLike, NDArray, NDArray
from .error import LibraryError, PatternError
from .utils import rotation_matrix_2d, normalize_mirror
from .shapes import Shape, Polygon
from .label import Label
if TYPE_CHECKING:
from .pattern import Pattern
logger = logging.getLogger(__name__)
visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, NDArray[numpy.float64]], 'Pattern']
L = TypeVar('L', bound='Library')
class Library:
"""
This class is usually used to create a library of Patterns by mapping names to
functions which generate or load the relevant `Pattern` object as-needed.
The cache can be disabled by setting the `enable_cache` attribute to `False`.
"""
dict: Dict[str, Callable[[], Pattern]]
cache: Dict[str, 'Pattern']
enable_cache: bool = True
def __init__(self) -> None:
self.dict = {}
self.cache = {}
def __setitem__(self, key: str, value: Callable[[], Pattern]) -> None:
self.dict[key] = value
if key in self.cache:
del self.cache[key]
def __delitem__(self, key: str) -> None:
del self.dict[key]
if key in self.cache:
del self.cache[key]
def __getitem__(self, key: str) -> 'Pattern':
logger.debug(f'loading {key}')
if self.enable_cache and key in self.cache:
logger.debug(f'found {key} in cache')
return self.cache[key]
func = self.dict[key]
pat = func()
self.cache[key] = pat
return pat
def __iter__(self) -> Iterator[str]:
return iter(self.keys())
def __contains__(self, key: str) -> bool:
return key in self.dict
def keys(self) -> Iterator[str]:
return iter(self.dict.keys())
def values(self) -> Iterator['Pattern']:
return iter(self[key] for key in self.keys())
def items(self) -> Iterator[Tuple[str, 'Pattern']]:
return iter((key, self[key]) for key in self.keys())
def __repr__(self) -> str:
return '<Library with keys ' + repr(list(self.dict.keys())) + '>'
def precache(self: L) -> L:
"""
Force all patterns into the cache
Returns:
self
"""
for key in self.dict:
_ = self.dict.__getitem__(key)
return self
def add(
self: L,
other: L,
use_ours: Callable[[str], bool] = lambda name: False,
use_theirs: Callable[[str], bool] = lambda name: False,
) -> L:
"""
Add keys from another library into this one.
Args:
other: The library to insert keys from
use_ours: Decision function for name conflicts, called with cell name.
Should return `True` if the value from `self` should be used.
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
Should return `True` if the value from `other` should be used.
`use_ours` takes priority over `use_theirs`.
Returns:
self
"""
duplicates = set(self.keys()) & set(other.keys())
keep_ours = set(name for name in duplicates if use_ours(name))
keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
conflicts = duplicates - keep_ours - keep_theirs
if conflicts:
raise LibraryError('Unresolved duplicate keys encountered in library merge: ' + pformat(conflicts))
for key in set(other.keys()) - keep_ours:
self.dict[key] = other.dict[key]
if key in other.cache:
self.cache[key] = other.cache[key]
return self
def clear_cache(self: L) -> L:
"""
Clear the cache of this library.
This is usually used before modifying or deleting cells, e.g. when merging
with another library.
Returns:
self
"""
self.cache.clear()
return self
def referenced_patterns(
self,
tops: Union[str, Sequence[str]],
skip: Optional[Set[Optional[str]]] = None,
) -> Set[Optional[str]]:
"""
Get the set of all pattern names referenced by `top`. Recursively traverses into any subpatterns.
Args:
top: Name of the top pattern(s) to check.
skip: Memo, set patterns which have already been traversed.
Returns:
Set of all referenced pattern names
"""
if skip is None:
skip = set([None])
if isinstance(tops, str):
tops = (tops,)
# Get referenced patterns for all tops
targets = set()
for top in set(tops):
targets |= self[top].referenced_patterns()
# Perform recursive lookups, but only once for each name
for target in targets - skip:
assert(target is not None)
self.referenced_patterns(target, skip)
skip.add(target)
return targets
def subtree(
self: L,
tops: Union[str, Sequence[str]],
) -> L:
"""
Return a new `Library`, containing only the specified patterns and the patterns they
reference (recursively).
Args:
tops: Name(s) of patterns to keep
Returns:
A `Library` containing only `tops` and the patterns they reference.
"""
keep: Set[str] = self.referenced_patterns(tops) - set((None,)) # type: ignore
new = type(self)()
for key in keep:
new.dict[key] = self.dict[key]
if key in self.cache:
new.cache[key] = self.cache[key]
return new
def dfs(
self: L,
top: str,
visit_before: visitor_function_t = None,
visit_after: visitor_function_t = None,
transform: Union[ArrayLike, bool, None] = False,
memo: Optional[Dict] = None,
hierarchy: Tuple[str, ...] = (),
) -> L:
"""
Convenience function.
Performs a depth-first traversal of a pattern and its subpatterns.
At each pattern in the tree, the following sequence is called:
```
current_pattern = visit_before(current_pattern, **vist_args)
for sp in current_pattern.subpatterns]
self.dfs(sp.target, visit_before, visit_after, updated_transform,
memo, (current_pattern,) + hierarchy)
current_pattern = visit_after(current_pattern, **visit_args)
```
where `visit_args` are
`hierarchy`: (top_pattern, L1_pattern, L2_pattern, ..., parent_pattern)
tuple of all parent-and-higher patterns
`transform`: numpy.ndarray containing cumulative
[x_offset, y_offset, rotation (rad), mirror_x (0 or 1)]
for the instance being visited
`memo`: Arbitrary dict (not altered except by `visit_before()` and `visit_after()`)
Args:
top: Name of the pattern to start at (root node of the tree).
visit_before: Function to call before traversing subpatterns.
Should accept a `Pattern` and `**visit_args`, and return the (possibly modified)
pattern. Default `None` (not called).
visit_after: Function to call after traversing subpatterns.
Should accept a `Pattern` and `**visit_args`, and return the (possibly modified)
pattern. Default `None` (not called).
transform: Initial value for `visit_args['transform']`.
Can be `False`, in which case the transform is not calculated.
`True` or `None` is interpreted as `[0, 0, 0, 0]`.
memo: Arbitrary dict for use by `visit_*()` functions. Default `None` (empty dict).
hierarchy: Tuple of patterns specifying the hierarchy above the current pattern.
Appended to the start of the generated `visit_args['hierarchy']`.
Default is an empty tuple.
Returns:
self
"""
if memo is None:
memo = {}
if transform is None or transform is True:
transform = numpy.zeros(4)
elif transform is not False:
transform = numpy.array(transform)
if top in hierarchy:
raise PatternError('.dfs() called on pattern with circular reference')
pat = self[top]
if visit_before is not None:
pat = visit_before(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore
for subpattern in pat.subpatterns:
if transform is not False:
sign = numpy.ones(2)
if transform[3]:
sign[1] = -1
xy = numpy.dot(rotation_matrix_2d(transform[2]), subpattern.offset * sign)
mirror_x, angle = normalize_mirror(subpattern.mirrored)
angle += subpattern.rotation
sp_transform = transform + (xy[0], xy[1], angle, mirror_x)
sp_transform[3] %= 2
else:
sp_transform = False
if subpattern.target is None:
continue
self.dfs(
top=subpattern.target,
visit_before=visit_before,
visit_after=visit_after,
transform=sp_transform,
memo=memo,
hierarchy=hierarchy + (top,),
)
if visit_after is not None:
pat = visit_after(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore
self[top] = lambda: pat
return self
def polygonize(
self: L,
poly_num_points: Optional[int] = None,
poly_max_arclen: Optional[float] = None,
) -> L:
"""
Calls `.polygonize(...)` on each pattern in this library.
Arguments are passed on to `shape.to_polygons(...)`.
Args:
poly_num_points: Number of points to use for each polygon. Can be overridden by
`poly_max_arclen` if that results in more points. Optional, defaults to shapes'
internal defaults.
poly_max_arclen: Maximum arclength which can be approximated by a single line
segment. Optional, defaults to shapes' internal defaults.
Returns:
self
"""
for pat in self.values():
pat.polygonize(poly_num_points, poly_max_arclen)
return self
def manhattanize(
self: L,
grid_x: ArrayLike,
grid_y: ArrayLike,
) -> L:
"""
Calls `.manhattanize(grid_x, grid_y)` on each pattern in this library.
Args:
grid_x: List of allowed x-coordinates for the Manhattanized polygon edges.
grid_y: List of allowed y-coordinates for the Manhattanized polygon edges.
Returns:
self
"""
for pat in self.values():
pat.manhattanize(grid_x, grid_y)
return self
def subpatternize(
self: L,
norm_value: int = int(1e6),
exclude_types: Tuple[Type] = (Polygon,),
label2name: Optional[Callable[[Tuple], str]] = None,
threshold: int = 2,
) -> L:
"""
Iterates through all `Pattern`s. Within each `Pattern`, it iterates
over all shapes, calling `.normalized_form(norm_value)` on them to retrieve a scale-,
offset-, dose-, and rotation-independent form. Each shape whose normalized form appears
more than once is removed and re-added using subpattern objects referencing a newly-created
`Pattern` containing only the normalized form of the shape.
Note:
The default norm_value was chosen to give a reasonable precision when using
integer values for coordinates.
Args:
norm_value: Passed to `shape.normalized_form(norm_value)`. Default `1e6` (see function
note)
exclude_types: Shape types passed in this argument are always left untouched, for
speed or convenience. Default: `(shapes.Polygon,)`
label2name: Given a label tuple as returned by `shape.normalized_form(...)`, pick
a name for the generated pattern. Default `self.get_name('_shape')`.
threshold: Only replace shapes with subpatterns if there will be at least this many
instances.
Returns:
self
"""
# This currently simplifies globally (same shape in different patterns is
# merged into the same subpattern target.
if exclude_types is None:
exclude_types = ()
if label2name is None:
label2name = lambda label: self.get_name('_shape')
shape_counts: MutableMapping[Tuple, int] = defaultdict(int)
shape_funcs = {}
### First pass ###
# Using the label tuple from `.normalized_form()` as a key, check how many of each shape
# are present and store the shape function for each one
for pat in tuple(self.values()):
for i, shape in enumerate(pat.shapes):
if not any(isinstance(shape, t) for t in exclude_types):
label, _values, func = shape.normalized_form(norm_value)
shape_funcs[label] = func
shape_counts[label] += 1
shape_pats = {}
for label, count in shape_counts.items():
if count < threshold:
continue
shape_func = shape_funcs[label]
shape_pat = Pattern(shapes=[shape_func()])
shape_pats[label] = shape_pat
### Second pass ###
for pat in tuple(self.values()):
# Store `[(index_in_shapes, values_from_normalized_form), ...]` for all shapes which
# are to be replaced.
# The `values` are `(offset, scale, rotation, dose)`.
shape_table: MutableMapping[Tuple, List] = defaultdict(list)
for i, shape in enumerate(pat.shapes):
if any(isinstance(shape, t) for t in exclude_types):
continue
label, values, _func = shape.normalized_form(norm_value)
if label not in shape_pats:
continue
shape_table[label].append((i, values))
# For repeated shapes, create a `Pattern` holding a normalized shape object,
# and add `pat.subpatterns` entries for each occurrence in pat. Also, note down that
# we should delete the `pat.shapes` entries for which we made SubPatterns.
shapes_to_remove = []
for label in shape_table:
target = label2name(label)
for i, values in shape_table[label]:
offset, scale, rotation, mirror_x, dose = values
pat.addsp(target=target, offset=offset, scale=scale,
rotation=rotation, dose=dose, mirrored=(mirror_x, False))
shapes_to_remove.append(i)
# Remove any shapes for which we have created subpatterns.
for i in sorted(shapes_to_remove, reverse=True):
del pat.shapes[i]
for ll, pp in shape_pats.items():
self[label2name(ll)] = lambda: pp
return self
def wrap_repeated_shapes(
self: L,
name_func: Optional[Callable[['Pattern', Union[Shape, Label]], str]] = None,
) -> L:
"""
Wraps all shapes and labels with a non-`None` `repetition` attribute
into a `SubPattern`/`Pattern` combination, and applies the `repetition`
to each `SubPattern` instead of its contained shape.
Args:
name_func: Function f(this_pattern, shape) which generates a name for the
wrapping pattern. Default is `self.get_name('_rep')`.
Returns:
self
"""
if name_func is None:
name_func = lambda _pat, _shape: self.get_name('_rep')
for pat in tuple(self.values()):
new_shapes = []
for shape in pat.shapes:
if shape.repetition is None:
new_shapes.append(shape)
continue
name = name_func(pat, shape)
self[name] = lambda: Pattern(shapes=[shape])
pat.addsp(name, repetition=shape.repetition)
shape.repetition = None
pat.shapes = new_shapes
new_labels = []
for label in pat.labels:
if label.repetition is None:
new_labels.append(label)
continue
name = name_func(pat, label)
self[name] = lambda: Pattern(labels=[label])
pat.addsp(name, repetition=label.repetition)
label.repetition = None
pat.labels = new_labels
return self
def flatten(
self: L,
tops: Union[str, Sequence[str]],
) -> Dict[str, Pattern]:
"""
Removes all subpatterns and adds equivalent shapes.
Also flattens all subpatterns.
Args:
tops: The pattern(s) to flattern.
Returns:
{name: flat_pattern} mapping for all flattened patterns.
"""
if isinstance(tops, str):
tops = (tops,)
flattened: Dict[str, Optional[Pattern]] = {}
def flatten_single(name) -> None:
flattened[name] = None
pat = self[name].deepcopy()
for subpat in pat.subpatterns:
target = subpat.target
if target is None:
continue
if target not in flattened:
flatten_single(target)
if flattened[target] is None:
raise PatternError(f'Circular reference in {name} to {target}')
p = subpat.as_pattern(pattern=flattened[target])
pat.append(p)
pat.subpatterns.clear()
flattened[name] = pat
for top in tops:
flatten_single(top)
assert(None not in flattened.values())
return flattened # type: ignore
def get_name(
self,
name: str = '__',
sanitize: bool = True,
max_length: int = 32,
quiet: bool = False,
) -> str:
"""
Find a unique name for the pattern.
This function may be overridden in a subclass or monkey-patched to fit the caller's requirements.
Args:
name: Preferred name for the pattern. Default '__'.
sanitize: Allows only alphanumeric charaters and _?$. Replaces invalid characters with underscores.
max_length: Names longer than this will be truncated.
quiet: If `True`, suppress log messages.
Returns:
Unique name for this library.
"""
if sanitize:
# Remove invalid characters
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', name)
else:
sanitized_name = name
ii = 0
suffixed_name = sanitized_name
while suffixed_name in self or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', ii), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
ii += 1
if len(suffixed_name) > max_length:
if name == '':
raise LibraryError(f'No valid pattern names remaining within the specified {max_length=}')
cropped_name = self.get_name(sanitized_name[:-1], sanitize=sanitize, max_length=max_length, quiet=True)
else:
cropped_name = suffixed_name
if not quiet:
logger.info(f'Requested name "{name}" changed to "{cropped_name}"')
return cropped_name
def find_toplevel(self) -> List[str]:
"""
Return the list of all patterns that are not referenced by any other pattern in the library.
Returns:
A list of pattern names in which no pattern is referenced by any other pattern.
"""
names = set(self.keys())
not_toplevel: Set[Optional[str]] = set()
for name in names:
not_toplevel |= set(sp.target for sp in self[name].subpatterns)
toplevel = list(names - not_toplevel)
return toplevel
def __deepcopy__(self, memo: Dict = None) -> 'Library':
raise LibraryError('Libraries cannot be deepcopied (deepcopy doesn\'t descend into closures)')

View File

@ -1,2 +0,0 @@
from .library import Library, PatternGenerator
from .device_library import DeviceLibrary, LibDeviceLibrary

View File

@ -1,355 +0,0 @@
"""
Library class for managing unique name->pattern mappings and
deferred loading or creation.
"""
from typing import Dict, Callable, TypeVar, TYPE_CHECKING
from typing import Any, Tuple, Union, Iterator
import logging
from pprint import pformat
from dataclasses import dataclass
import copy
from ..error import LibraryError
if TYPE_CHECKING:
from ..pattern import Pattern
logger = logging.getLogger(__name__)
@dataclass
class PatternGenerator:
__slots__ = ('tag', 'gen')
tag: str
""" Unique identifier for the source """
gen: Callable[[], 'Pattern']
""" Function which generates a pattern when called """
L = TypeVar('L', bound='Library')
class Library:
"""
This class is usually used to create a library of Patterns by mapping names to
functions which generate or load the relevant `Pattern` object as-needed.
Generated/loaded patterns can have "symbolic" references, where a SubPattern
object `sp` has a `None`-valued `sp.pattern` attribute, in which case the
Library expects `sp.identifier[0]` to contain a string which specifies the
referenced pattern's name.
Patterns can either be "primary" (default) or "secondary". Both get the
same deferred-load behavior, but "secondary" patterns may have conflicting
names and are not accessible through basic []-indexing. They are only used
to fill symbolic references in cases where there is no "primary" pattern
available, and only if both the referencing and referenced pattern-generators'
`tag` values match (i.e., only if they came from the same source).
Primary patterns can be turned into secondary patterns with the `demote`
method, `promote` performs the reverse (secondary -> primary) operation.
The `set_const` and `set_value` methods provide an easy way to transparently
construct PatternGenerator objects and directly set create "secondary"
patterns.
The cache can be disabled by setting the `enable_cache` attribute to `False`.
"""
primary: Dict[str, PatternGenerator]
secondary: Dict[Tuple[str, str], PatternGenerator]
cache: Dict[Union[str, Tuple[str, str]], 'Pattern']
enable_cache: bool = True
def __init__(self) -> None:
self.primary = {}
self.secondary = {}
self.cache = {}
def __setitem__(self, key: str, value: PatternGenerator) -> None:
self.primary[key] = value
if key in self.cache:
logger.warning(f'Replaced library item "{key}" & existing cache entry.'
' Previously-generated Pattern will *not* be updated!')
del self.cache[key]
def __delitem__(self, key: str) -> None:
if isinstance(key, str):
del self.primary[key]
elif isinstance(key, tuple):
del self.secondary[key]
if key in self.cache:
logger.warning(f'Deleting library item "{key}" & existing cache entry.'
' Previously-generated Pattern may remain in the wild!')
del self.cache[key]
def __getitem__(self, key: str) -> 'Pattern':
return self.get_primary(key)
def __iter__(self) -> Iterator[str]:
return iter(self.keys())
def __contains__(self, key: str) -> bool:
return key in self.primary
def get_primary(self, key: str) -> 'Pattern':
if self.enable_cache and key in self.cache:
logger.debug(f'found {key} in cache')
return self.cache[key]
logger.debug(f'loading {key}')
pg = self.primary[key]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key] = pat
return pat
def get_secondary(self, key: str, tag: str) -> 'Pattern':
logger.debug(f'get_secondary({key}, {tag})')
key2 = (key, tag)
if self.enable_cache and key2 in self.cache:
return self.cache[key2]
pg = self.secondary[key2]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key2] = pat
return pat
def set_secondary(self, key: str, tag: str, value: PatternGenerator) -> None:
self.secondary[(key, tag)] = value
if (key, tag) in self.cache:
logger.warning(f'Replaced library item "{key}" & existing cache entry.'
' Previously-generated Pattern will *not* be updated!')
del self.cache[(key, tag)]
def resolve_subpatterns(self, pat: 'Pattern', tag: str) -> 'Pattern':
logger.debug(f'Resolving subpatterns in {pat.name}')
for sp in pat.subpatterns:
if sp.pattern is not None:
continue
key = sp.identifier[0]
if key in self.primary:
sp.pattern = self.get_primary(key)
continue
if (key, tag) in self.secondary:
sp.pattern = self.get_secondary(key, tag)
continue
raise LibraryError(f'Broken reference to {key} (tag {tag})')
return pat
def keys(self) -> Iterator[str]:
return iter(self.primary.keys())
def values(self) -> Iterator['Pattern']:
return iter(self[key] for key in self.keys())
def items(self) -> Iterator[Tuple[str, 'Pattern']]:
return iter((key, self[key]) for key in self.keys())
def __repr__(self) -> str:
return '<Library with keys ' + repr(list(self.primary.keys())) + '>'
def set_const(
self,
key: str,
tag: Any,
const: 'Pattern',
secondary: bool = False,
) -> None:
"""
Convenience function to avoid having to manually wrap
constant values into callables.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
const: Pattern object to return
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=lambda: const)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def set_value(
self,
key: str,
tag: str,
value: Callable[[], 'Pattern'],
secondary: bool = False,
) -> None:
"""
Convenience function to automatically build a PatternGenerator.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
value: Callable which takes no arguments and generates the `Pattern` object
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=value)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def precache(self: L) -> L:
"""
Force all patterns into the cache
Returns:
self
"""
for key in self.primary:
_ = self.get_primary(key)
for key2 in self.secondary:
_ = self.get_secondary(*key2)
return self
def add(
self: L,
other: L,
use_ours: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
use_theirs: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
) -> L:
"""
Add keys from another library into this one.
Args:
other: The library to insert keys from
use_ours: Decision function for name conflicts.
May be called with cell names and (name, tag) tuples for primary or
secondary cells, respectively.
Should return `True` if the value from `self` should be used.
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
Should return `True` if the value from `other` should be used.
`use_ours` takes priority over `use_theirs`.
Returns:
self
"""
duplicates1 = set(self.primary.keys()) & set(other.primary.keys())
duplicates2 = set(self.secondary.keys()) & set(other.secondary.keys())
keep_ours1 = set(name for name in duplicates1 if use_ours(name))
keep_ours2 = set(name for name in duplicates2 if use_ours(name))
keep_theirs1 = set(name for name in duplicates1 - keep_ours1 if use_theirs(name))
keep_theirs2 = set(name for name in duplicates2 - keep_ours2 if use_theirs(name))
conflicts1 = duplicates1 - keep_ours1 - keep_theirs1
conflicts2 = duplicates2 - keep_ours2 - keep_theirs2
if conflicts1:
raise LibraryError('Unresolved duplicate keys encountered in library merge: ' + pformat(conflicts1))
if conflicts2:
raise LibraryError('Unresolved duplicate secondary keys encountered in library merge: ' + pformat(conflicts2))
for key1 in set(other.primary.keys()) - keep_ours1:
self[key1] = other.primary[key1]
if key1 in other.cache:
self.cache[key1] = other.cache[key1]
for key2 in set(other.secondary.keys()) - keep_ours2:
self.set_secondary(*key2, other.secondary[key2])
if key2 in other.cache:
self.cache[key2] = other.cache[key2]
return self
def demote(self, key: str) -> None:
"""
Turn a primary pattern into a secondary one.
It will no longer be accessible through [] indexing and will only be used to
when referenced by other patterns from the same source, and only if no primary
pattern with the same name exists.
Args:
key: Lookup key, usually the cell/pattern name
"""
pg = self.primary[key]
key2 = (key, pg.tag)
self.secondary[key2] = pg
if key in self.cache:
self.cache[key2] = self.cache[key]
del self[key]
def promote(self, key: str, tag: str) -> None:
"""
Turn a secondary pattern into a primary one.
It will become accessible through [] indexing and will be used to satisfy any
reference to a pattern with its key, regardless of tag.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for identifying the pattern's source, used to disambiguate
secondary patterns
"""
if key in self.primary:
raise LibraryError(f'Promoting ({key}, {tag}), but {key} already exists in primary!')
key2 = (key, tag)
pg = self.secondary[key2]
self.primary[key] = pg
if key2 in self.cache:
self.cache[key] = self.cache[key2]
del self.secondary[key2]
del self.cache[key2]
def copy(self, preserve_cache: bool = False) -> 'Library':
"""
Create a copy of this `Library`.
A shallow copy is made of the contained dicts.
Note that you should probably clear the cache (with `clear_cache()`) after copying.
Returns:
A copy of self
"""
new = Library()
new.primary.update(self.primary)
new.secondary.update(self.secondary)
new.cache.update(self.cache)
return new
def clear_cache(self: L) -> L:
"""
Clear the cache of this library.
This is usually used before modifying or deleting cells, e.g. when merging
with another library.
Returns:
self
"""
self.cache = {}
return self
r"""
# Add a filter for names which aren't added
- Registration:
- scanned files (tag=filename, gen_fn[stream, {name: pos}])
- generator functions (tag='fn?', gen_fn[params])
- merge decision function (based on tag and cell name, can be "neither") ??? neither=keep both, load using same tag!
- Load process:
- file:
- read single cell
- check subpat identifiers, and load stuff recursively based on those. If not present, load from same file??
- function:
- generate cell
- traverse and check if we should load any subcells from elsewhere. replace if so.
* should fn generate subcells at all, or register those separately and have us control flow? maybe ask us and generate itself if not present?
- Scan all GDS files, save name -> (file, position). Keep the streams handy.
- Merge all names. This requires subcell merge because we don't know hierarchy.
- possibly include a "neither" option during merge, to deal with subcells. Means: just use parent's file.
"""

View File

@ -3,9 +3,8 @@
"""
from typing import List, Callable, Tuple, Dict, Union, Set, Sequence, Optional, Type, overload, cast
from typing import MutableMapping, Iterable, TypeVar, Any
from typing import Mapping, MutableMapping, Iterable, TypeVar, Any
import copy
import pickle
from itertools import chain
from collections import defaultdict
@ -18,23 +17,20 @@ from .subpattern import SubPattern
from .shapes import Shape, Polygon
from .label import Label
from .utils import rotation_matrix_2d, normalize_mirror, AutoSlots, annotations_t
from .error import PatternError, PatternLockedError
from .traits import LockableImpl, AnnotatableImpl, Scalable, Mirrorable
from .error import PatternError
from .traits import AnnotatableImpl, Scalable, Mirrorable
from .traits import Rotatable, Positionable
visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, NDArray[numpy.float64]], 'Pattern']
P = TypeVar('P', bound='Pattern')
class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
"""
2D layout consisting of some set of shapes, labels, and references to other Pattern objects
(via SubPattern). Shapes are assumed to inherit from masque.shapes.Shape or provide equivalent functions.
"""
__slots__ = ('shapes', 'labels', 'subpatterns', 'name')
__slots__ = ('shapes', 'labels', 'subpatterns')
shapes: List[Shape]
""" List of all shapes in this Pattern.
@ -50,18 +46,13 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
(i.e. multiple instances of the same object).
"""
name: str
""" A name for this pattern """
def __init__(
self,
name: str = '',
*,
shapes: Sequence[Shape] = (),
labels: Sequence[Label] = (),
subpatterns: Sequence[SubPattern] = (),
annotations: Optional[annotations_t] = None,
locked: bool = False,
) -> None:
"""
Basic init; arguments get assigned to member variables.
@ -71,10 +62,7 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
shapes: Initial shapes in the Pattern
labels: Initial labels in the Pattern
subpatterns: Initial subpatterns in the Pattern
name: An identifier for the Pattern
locked: Whether to lock the pattern after construction
"""
LockableImpl.unlock(self)
if isinstance(shapes, list):
self.shapes = shapes
else:
@ -91,41 +79,25 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
self.subpatterns = list(subpatterns)
self.annotations = annotations if annotations is not None else {}
self.name = name
self.set_locked(locked)
def __copy__(self, memo: Dict = None) -> 'Pattern':
return Pattern(name=self.name,
shapes=copy.deepcopy(self.shapes),
labels=copy.deepcopy(self.labels),
subpatterns=[copy.copy(sp) for sp in self.subpatterns],
annotations=copy.deepcopy(self.annotations),
locked=self.locked)
return Pattern(
shapes=copy.deepcopy(self.shapes),
labels=copy.deepcopy(self.labels),
subpatterns=[copy.copy(sp) for sp in self.subpatterns],
annotations=copy.deepcopy(self.annotations),
)
def __deepcopy__(self, memo: Dict = None) -> 'Pattern':
memo = {} if memo is None else memo
new = Pattern(
name=self.name,
shapes=copy.deepcopy(self.shapes, memo),
labels=copy.deepcopy(self.labels, memo),
subpatterns=copy.deepcopy(self.subpatterns, memo),
annotations=copy.deepcopy(self.annotations, memo),
locked=self.locked)
)
return new
def rename(self: P, name: str) -> P:
"""
Chainable function for renaming the pattern.
Args:
name: The new name
Returns:
self
"""
self.name = name
return self
def append(self: P, other_pattern: P) -> P:
"""
Appends all shapes, labels and subpatterns from other_pattern to self's shapes,
@ -144,10 +116,9 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
def subset(
self,
shapes_func: Callable[[Shape], bool] = None,
labels_func: Callable[[Label], bool] = None,
subpatterns_func: Callable[[SubPattern], bool] = None,
recursive: bool = False,
shapes: Callable[[Shape], bool] = None,
labels: Callable[[Label], bool] = None,
subpatterns: Callable[[SubPattern], bool] = None,
) -> 'Pattern':
"""
Returns a Pattern containing only the entities (e.g. shapes) for which the
@ -155,169 +126,24 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
Self is _not_ altered, but shapes, labels, and subpatterns are _not_ copied.
Args:
shapes_func: Given a shape, returns a boolean denoting whether the shape is a member
shapes: Given a shape, returns a boolean denoting whether the shape is a member
of the subset. Default always returns False.
labels_func: Given a label, returns a boolean denoting whether the label is a member
labels: Given a label, returns a boolean denoting whether the label is a member
of the subset. Default always returns False.
subpatterns_func: Given a subpattern, returns a boolean denoting if it is a member
subpatterns: Given a subpattern, returns a boolean denoting if it is a member
of the subset. Default always returns False.
recursive: If True, also calls .subset() recursively on patterns referenced by this
pattern.
Returns:
A Pattern containing all the shapes and subpatterns for which the parameter
functions return True
"""
def do_subset(src: Optional['Pattern']) -> Optional['Pattern']:
if src is None:
return None
pat = Pattern(name=src.name)
if shapes_func is not None:
pat.shapes = [s for s in src.shapes if shapes_func(s)]
if labels_func is not None:
pat.labels = [s for s in src.labels if labels_func(s)]
if subpatterns_func is not None:
pat.subpatterns = [s for s in src.subpatterns if subpatterns_func(s)]
return pat
if recursive:
pat = self.apply(do_subset)
else:
pat = do_subset(self)
assert(pat is not None)
return pat
def apply(
self,
func: Callable[[Optional['Pattern']], Optional['Pattern']],
memo: Optional[Dict[int, Optional['Pattern']]] = None,
) -> Optional['Pattern']:
"""
Recursively apply func() to this pattern and any pattern it references.
func() is expected to take and return a Pattern.
func() is first applied to the pattern as a whole, then any referenced patterns.
It is only applied to any given pattern once, regardless of how many times it is
referenced.
Args:
func: Function which accepts a Pattern, and returns a pattern.
memo: Dictionary used to avoid re-running on multiply-referenced patterns.
Stores `{id(pattern): func(pattern)}` for patterns which have already been processed.
Default `None` (no already-processed patterns).
Returns:
The result of applying func() to this pattern and all subpatterns.
Raises:
PatternError if called on a pattern containing a circular reference.
"""
if memo is None:
memo = {}
pat_id = id(self)
if pat_id not in memo:
memo[pat_id] = None
pat = func(self)
if pat is not None:
for subpat in pat.subpatterns:
if subpat.pattern is None:
subpat.pattern = func(None)
else:
subpat.pattern = subpat.pattern.apply(func, memo)
memo[pat_id] = pat
elif memo[pat_id] is None:
raise PatternError('.apply() called on pattern with circular reference')
else:
pat = memo[pat_id]
return pat
def dfs(
self: P,
visit_before: visitor_function_t = None,
visit_after: visitor_function_t = None,
transform: Union[ArrayLike, bool, None] = False,
memo: Optional[Dict] = None,
hierarchy: Tuple[P, ...] = (),
) -> P:
"""
Convenience function.
Performs a depth-first traversal of this pattern and its subpatterns.
At each pattern in the tree, the following sequence is called:
```
current_pattern = visit_before(current_pattern, **vist_args)
for sp in current_pattern.subpatterns]
sp.pattern = sp.pattern.df(visit_before, visit_after, updated_transform,
memo, (current_pattern,) + hierarchy)
current_pattern = visit_after(current_pattern, **visit_args)
```
where `visit_args` are
`hierarchy`: (top_pattern, L1_pattern, L2_pattern, ..., parent_pattern)
tuple of all parent-and-higher patterns
`transform`: numpy.ndarray containing cumulative
[x_offset, y_offset, rotation (rad), mirror_x (0 or 1)]
for the instance being visited
`memo`: Arbitrary dict (not altered except by visit_*())
Args:
visit_before: Function to call before traversing subpatterns.
Should accept a `Pattern` and `**visit_args`, and return the (possibly modified)
pattern. Default `None` (not called).
visit_after: Function to call after traversing subpatterns.
Should accept a Pattern and **visit_args, and return the (possibly modified)
pattern. Default `None` (not called).
transform: Initial value for `visit_args['transform']`.
Can be `False`, in which case the transform is not calculated.
`True` or `None` is interpreted as `[0, 0, 0, 0]`.
memo: Arbitrary dict for use by `visit_*()` functions. Default `None` (empty dict).
hierarchy: Tuple of patterns specifying the hierarchy above the current pattern.
Appended to the start of the generated `visit_args['hierarchy']`.
Default is an empty tuple.
Returns:
The result, including `visit_before(self, ...)` and `visit_after(self, ...)`.
Note that `self` may also be altered!
"""
if memo is None:
memo = {}
if transform is None or transform is True:
transform = numpy.zeros(4)
elif transform is not False:
transform = numpy.array(transform)
if self in hierarchy:
raise PatternError('.dfs() called on pattern with circular reference')
pat = self
if visit_before is not None:
pat = visit_before(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore
for subpattern in self.subpatterns:
if transform is not False:
sign = numpy.ones(2)
if transform[3]:
sign[1] = -1
xy = numpy.dot(rotation_matrix_2d(transform[2]), subpattern.offset * sign)
mirror_x, angle = normalize_mirror(subpattern.mirrored)
angle += subpattern.rotation
sp_transform = transform + (xy[0], xy[1], angle, mirror_x)
sp_transform[3] %= 2
else:
sp_transform = False
if subpattern.pattern is not None:
result = subpattern.pattern.dfs(visit_before=visit_before,
visit_after=visit_after,
transform=sp_transform,
memo=memo,
hierarchy=hierarchy + (self,))
if result is not subpattern.pattern:
# skip assignment to avoid PatternLockedError unless modified
subpattern.pattern = result
if visit_after is not None:
pat = visit_after(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore
pat = Pattern()
if shapes is not None:
pat.shapes = [s for s in self.shapes if shapes(s)]
if labels is not None:
pat.labels = [s for s in self.labels if labels(s)]
if subpatterns is not None:
pat.subpatterns = [s for s in self.subpatterns if subpatterns(s)]
return pat
def polygonize(
@ -326,8 +152,7 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
poly_max_arclen: Optional[float] = None,
) -> P:
"""
Calls `.to_polygons(...)` on all the shapes in this Pattern and any referenced patterns,
replacing them with the returned polygons.
Calls `.to_polygons(...)` on all the shapes in this Pattern, replacing them with the returned polygons.
Arguments are passed directly to `shape.to_polygons(...)`.
Args:
@ -341,12 +166,9 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
self
"""
old_shapes = self.shapes
self.shapes = list(chain.from_iterable(
(shape.to_polygons(poly_num_points, poly_max_arclen)
for shape in old_shapes)))
for subpat in self.subpatterns:
if subpat.pattern is not None:
subpat.pattern.polygonize(poly_num_points, poly_max_arclen)
self.shapes = list(chain.from_iterable((
shape.to_polygons(poly_num_points, poly_max_arclen)
for shape in old_shapes)))
return self
def manhattanize(
@ -355,7 +177,7 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
grid_y: ArrayLike,
) -> P:
"""
Calls `.polygonize()` and `.flatten()` on the pattern, then calls `.manhattanize()` on all the
Calls `.polygonize()` on the pattern, then calls `.manhattanize()` on all the
resulting shapes, replacing them with the returned Manhattan polygons.
Args:
@ -366,84 +188,13 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
self
"""
self.polygonize().flatten()
self.polygonize()
old_shapes = self.shapes
self.shapes = list(chain.from_iterable(
(shape.manhattanize(grid_x, grid_y) for shape in old_shapes)))
return self
def subpatternize(
self: P,
recursive: bool = True,
norm_value: int = int(1e6),
exclude_types: Tuple[Type] = (Polygon,)
) -> P:
"""
Iterates through this `Pattern` and all referenced `Pattern`s. Within each `Pattern`, it iterates
over all shapes, calling `.normalized_form(norm_value)` on them to retrieve a scale-,
offset-, dose-, and rotation-independent form. Each shape whose normalized form appears
more than once is removed and re-added using subpattern objects referencing a newly-created
`Pattern` containing only the normalized form of the shape.
Note:
The default norm_value was chosen to give a reasonable precision when converting
to GDSII, which uses integer values for pixel coordinates.
Args:
recursive: Whether to call recursively on self's subpatterns. Default `True`.
norm_value: Passed to `shape.normalized_form(norm_value)`. Default `1e6` (see function
note about GDSII)
exclude_types: Shape types passed in this argument are always left untouched, for
speed or convenience. Default: `(shapes.Polygon,)`
Returns:
self
"""
if exclude_types is None:
exclude_types = ()
if recursive:
for subpat in self.subpatterns:
if subpat.pattern is None:
continue
subpat.pattern.subpatternize(recursive=True,
norm_value=norm_value,
exclude_types=exclude_types)
# Create a dict which uses the label tuple from `.normalized_form()` as a key, and which
# stores `(function_to_create_normalized_shape, [(index_in_shapes, values), ...])`, where
# values are the `(offset, scale, rotation, dose)` values as calculated by `.normalized_form()`
shape_table: MutableMapping[Tuple, List] = defaultdict(lambda: [None, list()])
for i, shape in enumerate(self.shapes):
if not any((isinstance(shape, t) for t in exclude_types)):
label, values, func = shape.normalized_form(norm_value)
shape_table[label][0] = func
shape_table[label][1].append((i, values))
# Iterate over the normalized shapes in the table. If any normalized shape occurs more than
# once, create a `Pattern` holding a normalized shape object, and add `self.subpatterns`
# entries for each occurrence in self. Also, note down that we should delete the
# `self.shapes` entries for which we made SubPatterns.
shapes_to_remove = []
for label in shape_table:
if len(shape_table[label][1]) > 1:
shape = shape_table[label][0]()
pat = Pattern(shapes=[shape])
for i, values in shape_table[label][1]:
(offset, scale, rotation, mirror_x, dose) = values
self.addsp(pattern=pat, offset=offset, scale=scale,
rotation=rotation, dose=dose, mirrored=(mirror_x, False))
shapes_to_remove.append(i)
# Remove any shapes for which we have created subpatterns.
for i in sorted(shapes_to_remove, reverse=True):
del self.shapes[i]
return self
def as_polygons(self) -> List[NDArray[numpy.float64]]:
def as_polygons(self, library: Mapping[str, Pattern]) -> List[NDArray[numpy.float64]]:
"""
Represents the pattern as a list of polygons.
@ -454,95 +205,22 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
A list of `(Ni, 2)` `numpy.ndarray`s specifying vertices of the polygons. Each ndarray
is of the form `[[x0, y0], [x1, y1],...]`.
"""
pat = self.deepcopy().deepunlock().polygonize().flatten()
pat = self.deepcopy().polygonize().flatten(library=library)
return [shape.vertices + shape.offset for shape in pat.shapes] # type: ignore # mypy can't figure out that shapes are all Polygons now
@overload
def referenced_patterns_by_id(self) -> Dict[int, 'Pattern']:
pass
@overload
def referenced_patterns_by_id(self, include_none: bool) -> Dict[int, Optional['Pattern']]:
pass
def referenced_patterns_by_id(
self,
include_none: bool = False,
recursive: bool = True,
) -> Union[Dict[int, Optional['Pattern']],
Dict[int, 'Pattern']]:
def referenced_patterns(self) -> Set[Optional[str]]:
"""
Create a dictionary with `{id(pat): pat}` for all Pattern objects referenced by this
Pattern (by default, operates recursively on all referenced Patterns as well).
Args:
include_none: If `True`, references to `None` will be included. Default `False`.
recursive: If `True`, operates recursively on all referenced patterns. Default `True`.
Get all pattern namers referenced by this pattern. Non-recursive.
Returns:
Dictionary with `{id(pat): pat}` for all referenced Pattern objects
A set of all pattern names referenced by this pattern.
"""
ids: Dict[int, Optional['Pattern']] = {}
for subpat in self.subpatterns:
pat = subpat.pattern
if id(pat) in ids:
continue
if include_none or pat is not None:
ids[id(pat)] = pat
if recursive and pat is not None:
ids.update(pat.referenced_patterns_by_id())
return ids
return set(sp.target for sp in self.subpatterns)
def referenced_patterns_by_name(
def get_bounds(
self,
**kwargs: Any,
) -> List[Tuple[Optional[str], Optional['Pattern']]]:
"""
Create a list of `(pat.name, pat)` tuples for all Pattern objects referenced by this
Pattern (operates recursively on all referenced Patterns as well).
Note that names are not necessarily unique, so a list of tuples is returned
rather than a dict.
Args:
**kwargs: passed to `referenced_patterns_by_id()`.
Returns:
List of `(pat.name, pat)` tuples for all referenced Pattern objects
"""
pats_by_id = self.referenced_patterns_by_id(**kwargs)
pat_list: List[Tuple[Optional[str], Optional['Pattern']]]
pat_list = [(p.name if p is not None else None, p) for p in pats_by_id.values()]
return pat_list
def subpatterns_by_id(
self,
include_none: bool = False,
recursive: bool = True,
) -> Dict[int, List[SubPattern]]:
"""
Create a dictionary which maps `{id(referenced_pattern): [subpattern0, ...]}`
for all SubPattern objects referenced by this Pattern (by default, operates
recursively on all referenced Patterns as well).
Args:
include_none: If `True`, references to `None` will be included. Default `False`.
recursive: If `True`, operates recursively on all referenced patterns. Default `True`.
Returns:
Dictionary mapping each pattern id to a list of subpattern objects referencing the pattern.
"""
ids: Dict[int, List[SubPattern]] = defaultdict(list)
for subpat in self.subpatterns:
pat = subpat.pattern
if include_none or pat is not None:
ids[id(pat)].append(subpat)
if recursive and pat is not None:
ids.update(pat.subpatterns_by_id(include_none=include_none))
return dict(ids)
def get_bounds(self) -> Union[NDArray[numpy.float64], None]:
library: Optional[Mapping[str, 'Pattern']] = None,
) -> Optional[NDArray[numpy.float64]]:
"""
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the Pattern's contents in each dimension.
@ -557,119 +235,42 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
min_bounds = numpy.array((+inf, +inf))
max_bounds = numpy.array((-inf, -inf))
for entry in chain(self.shapes, self.subpatterns, self.labels):
for entry in chain(self.shapes, self.labels):
bounds = entry.get_bounds()
if bounds is None:
continue
min_bounds = numpy.minimum(min_bounds, bounds[0, :])
max_bounds = numpy.maximum(max_bounds, bounds[1, :])
if self.subpatterns and (library is None):
raise PatternError('Must provide a library to get_bounds() to resolve subpatterns')
for entry in self.subpatterns:
bounds = entry.get_bounds(library=library)
if bounds is None:
continue
min_bounds = numpy.minimum(min_bounds, bounds[0, :])
max_bounds = numpy.maximum(max_bounds, bounds[1, :])
if (max_bounds < min_bounds).any():
return None
else:
return numpy.vstack((min_bounds, max_bounds))
def get_bounds_nonempty(self) -> NDArray[numpy.float64]:
def get_bounds_nonempty(
self,
library: Optional[Mapping[str, 'Pattern']] = None,
) -> NDArray[numpy.float64]:
"""
Convenience wrapper for `get_bounds()` which asserts that the Pattern as non-None bounds.
Returns:
`[[x_min, y_min], [x_max, y_max]]`
"""
bounds = self.get_bounds()
bounds = self.get_bounds(library)
assert(bounds is not None)
return bounds
def flatten(self: P) -> P:
"""
Removes all subpatterns and adds equivalent shapes.
Also flattens all subpatterns.
Modifies patterns in-place.
Shape/label identifiers are changed to represent their original position in the
pattern hierarchy:
`(L1_sp_index (int), L2_sp_index (int), ..., sh_index (int), *original_shape_identifier)`
where the original shape can be accessed as e.g.
`self.subpatterns[L1_sp_index].pattern.subpatterns[L2_sp_index].shapes[L1_sh_index]`
Returns:
self
"""
def flatten_single(pat: P, flattened: Set[P]) -> P:
# Update identifiers so each shape has a unique one
for ss, shape in enumerate(pat.shapes):
shape.identifier = (ss,) + shape.identifier
for ll, label in enumerate(pat.labels):
label.identifier = (ll,) + label.identifier
for pp, subpat in enumerate(pat.subpatterns):
if subpat.pattern is None:
continue
if subpat.pattern not in flattened:
flatten_single(subpat.pattern, flattened)
flattened.add(subpat.pattern)
p = subpat.as_pattern()
for item in chain(p.shapes, p.labels):
item.identifier = (pp,) + item.identifier
pat.append(p)
pat.subpatterns = []
return pat
flatten_single(self, set())
return self
def wrap_repeated_shapes(
self: P,
name_func: Callable[['Pattern', Union[Shape, Label]], str] = lambda p, s: '_repetition',
recursive: bool = True,
) -> P:
"""
Wraps all shapes and labels with a non-`None` `repetition` attribute
into a `SubPattern`/`Pattern` combination, and applies the `repetition`
to each `SubPattern` instead of its contained shape.
Args:
name_func: Function f(this_pattern, shape) which generates a name for the
wrapping pattern. Default always returns '_repetition'.
recursive: If `True`, this function is also applied to all referenced patterns
recursively. Default `True`.
Returns:
self
"""
def do_wrap(pat: Optional[Pattern]) -> Optional[Pattern]:
if pat is None:
return pat
new_shapes = []
for shape in pat.shapes:
if shape.repetition is None:
new_shapes.append(shape)
continue
pat.addsp(Pattern(name_func(pat, shape), shapes=[shape]), repetition=shape.repetition)
shape.repetition = None
pat.shapes = new_shapes
new_labels = []
for label in pat.labels:
if label.repetition is None:
new_labels.append(label)
continue
pat.addsp(Pattern(name_func(pat, label), labels=[label]), repetition=label.repetition)
label.repetition = None
pat.labels = new_labels
return pat
if recursive:
self.apply(do_wrap)
else:
do_wrap(self)
return self
def translate_elements(self: P, offset: ArrayLike) -> P:
"""
Translates all shapes, label, and subpatterns by the given offset.
@ -872,98 +473,51 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
self.subpatterns.append(SubPattern(*args, **kwargs))
return self
def lock(self: P) -> P:
def flatten(
self: P,
library: Mapping[str, P],
) -> 'Pattern':
"""
Lock the pattern, raising an exception if it is modified.
Also see `deeplock()`.
Returns:
self
"""
if not self.locked:
self.shapes = tuple(self.shapes)
self.labels = tuple(self.labels)
self.subpatterns = tuple(self.subpatterns)
LockableImpl.lock(self)
return self
def unlock(self: P) -> P:
"""
Unlock the pattern
Returns:
self
"""
if self.locked:
LockableImpl.unlock(self)
self.shapes = list(self.shapes)
self.labels = list(self.labels)
self.subpatterns = list(self.subpatterns)
return self
def deeplock(self: P) -> P:
"""
Recursively lock the pattern, all referenced shapes, subpatterns, and labels.
Returns:
self
"""
self.lock()
for ss in chain(self.shapes, self.labels):
ss.lock() # type: ignore # mypy struggles with multiple inheritance :(
for sp in self.subpatterns:
sp.deeplock()
return self
def deepunlock(self: P) -> P:
"""
Recursively unlock the pattern, all referenced shapes, subpatterns, and labels.
This is dangerous unless you have just performed a deepcopy, since anything
you change will be changed everywhere it is referenced!
Return:
self
"""
self.unlock()
for ss in chain(self.shapes, self.labels):
ss.unlock() # type: ignore # mypy struggles with multiple inheritance :(
for sp in self.subpatterns:
sp.deepunlock()
return self
@staticmethod
def load(filename: str) -> 'Pattern':
"""
Load a Pattern from a file using pickle
Removes all subpatterns (recursively) and adds equivalent shapes.
Alters the current pattern in-place
Args:
filename: Filename to load from
Returns:
Loaded Pattern
"""
with open(filename, 'rb') as f:
pattern = pickle.load(f)
return pattern
def save(self, filename: str) -> 'Pattern':
"""
Save the Pattern to a file using pickle
Args:
filename: Filename to save to
library: Source for referenced patterns.
Returns:
self
"""
with open(filename, 'wb') as f:
pickle.dump(self, f, protocol=pickle.HIGHEST_PROTOCOL)
flattened: Dict[Optional[str], Optional[P]] = {}
def flatten_single(name: Optional[str]) -> None:
if name is None:
pat = self
else:
pat = library[name].deepcopy()
flattened[name] = None
for subpat in pat.subpatterns:
target = subpat.target
if target is None:
continue
if target not in flattened:
flatten_single(target)
if flattened[target] is None:
raise PatternError(f'Circular reference in {name} to {target}')
p = subpat.as_pattern(pattern=flattened[target])
pat.append(p)
pat.subpatterns.clear()
flattened[name] = pat
flatten_single(None)
return self
def visualize(
self,
self: P,
library: Optional[Mapping[str, P]] = None,
offset: ArrayLike = (0., 0.),
line_color: str = 'k',
fill_color: str = 'none',
@ -987,6 +541,9 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
from matplotlib import pyplot # type: ignore
import matplotlib.collections # type: ignore
if self.subpatterns and library is None:
raise PatternError('Must provide a library when visualizing a pattern with subpatterns')
offset = numpy.array(offset, dtype=float)
if not overdraw:
@ -1001,50 +558,27 @@ class Pattern(LockableImpl, AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
for shape in self.shapes:
polygons += [offset + s.offset + s.vertices for s in shape.to_polygons()]
mpl_poly_collection = matplotlib.collections.PolyCollection(polygons,
facecolors=fill_color,
edgecolors=line_color)
mpl_poly_collection = matplotlib.collections.PolyCollection(
polygons,
facecolors=fill_color,
edgecolors=line_color,
)
axes.add_collection(mpl_poly_collection)
pyplot.axis('equal')
for subpat in self.subpatterns:
subpat.as_pattern().visualize(offset=offset, overdraw=True,
line_color=line_color, fill_color=fill_color)
subpat.as_pattern(library=library).visualize(
library=library,
offset=offset,
overdraw=True,
line_color=line_color,
fill_color=fill_color,
)
if not overdraw:
pyplot.xlabel('x')
pyplot.ylabel('y')
pyplot.show()
@staticmethod
def find_toplevel(patterns: Iterable['Pattern']) -> List['Pattern']:
"""
Given a list of Pattern objects, return those that are not referenced by
any other pattern.
Args:
patterns: A list of patterns to filter.
Returns:
A filtered list in which no pattern is referenced by any other pattern.
"""
def get_children(pat: Pattern, memo: Set) -> Set:
children = set(sp.pattern for sp in pat.subpatterns if sp.pattern is not None)
new_children = children - memo
memo |= new_children
for child_pat in new_children:
memo |= get_children(child_pat, memo)
return memo
patterns = set(patterns)
not_toplevel: Set['Pattern'] = set()
for pattern in patterns:
not_toplevel |= get_children(pattern, not_toplevel)
toplevel = list(patterns - not_toplevel)
return toplevel
def __repr__(self) -> str:
locked = ' L' if self.locked else ''
return (f'<Pattern "{self.name}": sh{len(self.shapes)} sp{len(self.subpatterns)} la{len(self.labels)}{locked}>')
return (f'<Pattern: sh{len(self.shapes)} sp{len(self.subpatterns)} la{len(self.labels)}>')

View File

@ -12,7 +12,7 @@ from numpy.typing import ArrayLike, NDArray
from .error import PatternError
from .utils import rotation_matrix_2d, AutoSlots
from .traits import LockableImpl, Copyable, Scalable, Rotatable, Mirrorable
from .traits import Copyable, Scalable, Rotatable, Mirrorable
class Repetition(Copyable, Rotatable, Mirrorable, Scalable, metaclass=ABCMeta):
@ -30,7 +30,7 @@ class Repetition(Copyable, Rotatable, Mirrorable, Scalable, metaclass=ABCMeta):
pass
class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
class Grid(Repetition, metaclass=AutoSlots):
"""
`Grid` describes a 2D grid formed by two basis vectors and two 'counts' (sizes).
@ -67,7 +67,6 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
a_count: int,
b_vector: Optional[ArrayLike] = None,
b_count: Optional[int] = 1,
locked: bool = False,
) -> None:
"""
Args:
@ -79,7 +78,6 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
Can be omitted when specifying a 1D array.
b_count: Number of elements in the `b_vector` direction.
Should be omitted if `b_vector` was omitted.
locked: Whether the `Grid` is locked after initialization.
Raises:
PatternError if `b_*` inputs conflict with each other
@ -99,12 +97,10 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
if b_count < 1:
raise PatternError(f'Repetition has too-small b_count: {b_count}')
object.__setattr__(self, 'locked', False)
self.a_vector = a_vector # type: ignore # setter handles type conversion
self.b_vector = b_vector # type: ignore # setter handles type conversion
self.a_count = a_count
self.b_count = b_count
self.locked = locked
@classmethod
def aligned(
@ -129,18 +125,17 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
return cls(a_vector=(x, 0), b_vector=(0, y), a_count=x_count, b_count=y_count)
def __copy__(self) -> 'Grid':
new = Grid(a_vector=self.a_vector.copy(),
b_vector=copy.copy(self.b_vector),
a_count=self.a_count,
b_count=self.b_count,
locked=self.locked)
new = Grid(
a_vector=self.a_vector.copy(),
b_vector=copy.copy(self.b_vector),
a_count=self.a_count,
b_count=self.b_count,
)
return new
def __deepcopy__(self, memo: Dict = None) -> 'Grid':
memo = {} if memo is None else memo
new = copy.copy(self)
LocakbleImpl.unlock(new)
new.locked = self.locked
return new
# a_vector property
@ -264,36 +259,9 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
self.b_vector *= c
return self
def lock(self) -> 'Grid':
"""
Lock the `Grid`, disallowing changes.
Returns:
self
"""
self.a_vector.flags.writeable = False
if self.b_vector is not None:
self.b_vector.flags.writeable = False
LockableImpl.lock(self)
return self
def unlock(self) -> 'Grid':
"""
Unlock the `Grid`
Returns:
self
"""
self.a_vector.flags.writeable = True
if self.b_vector is not None:
self.b_vector.flags.writeable = True
LockableImpl.unlock(self)
return self
def __repr__(self) -> str:
locked = ' L' if self.locked else ''
bv = f', {self.b_vector}' if self.b_vector is not None else ''
return (f'<Grid {self.a_count}x{self.b_count} ({self.a_vector}{bv}){locked}>')
return (f'<Grid {self.a_count}x{self.b_count} ({self.a_vector}{bv})>')
def __eq__(self, other: Any) -> bool:
if not isinstance(other, type(self)):
@ -308,12 +276,10 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
return False
if any(self.b_vector[ii] != other.b_vector[ii] for ii in range(2)):
return False
if self.locked != other.locked:
return False
return True
class Arbitrary(LockableImpl, Repetition, metaclass=AutoSlots):
class Arbitrary(Repetition, metaclass=AutoSlots):
"""
`Arbitrary` is a simple list of (absolute) displacements for instances.
@ -342,48 +308,19 @@ class Arbitrary(LockableImpl, Repetition, metaclass=AutoSlots):
def __init__(
self,
displacements: ArrayLike,
locked: bool = False,
) -> None:
"""
Args:
displacements: List of vectors (Nx2 ndarray) specifying displacements.
locked: Whether the object is locked after initialization.
"""
object.__setattr__(self, 'locked', False)
self.displacements = displacements
self.locked = locked
def lock(self) -> 'Arbitrary':
"""
Lock the object, disallowing changes.
Returns:
self
"""
self._displacements.flags.writeable = False
LockableImpl.lock(self)
return self
def unlock(self) -> 'Arbitrary':
"""
Unlock the object
Returns:
self
"""
self._displacements.flags.writeable = True
LockableImpl.unlock(self)
return self
def __repr__(self) -> str:
locked = ' L' if self.locked else ''
return (f'<Arbitrary {len(self.displacements)}pts {locked}>')
return (f'<Arbitrary {len(self.displacements)}pts >')
def __eq__(self, other: Any) -> bool:
if not isinstance(other, type(self)):
return False
if self.locked != other.locked:
return False
return numpy.array_equal(self.displacements, other.displacements)
def rotate(self, rotation: float) -> 'Arbitrary':

View File

@ -10,7 +10,6 @@ from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
from .. import PatternError
from ..repetition import Repetition
from ..utils import is_scalar, layer_t, AutoSlots, annotations_t
from ..traits import LockableImpl
class Arc(Shape, metaclass=AutoSlots):
@ -166,10 +165,8 @@ class Arc(Shape, metaclass=AutoSlots):
dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw:
assert(isinstance(radii, numpy.ndarray))
@ -197,17 +194,14 @@ class Arc(Shape, metaclass=AutoSlots):
self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen
[self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Arc':
memo = {} if memo is None else memo
new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy()
new._radii = self._radii.copy()
new._angles = self._angles.copy()
new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new
def to_polygons(
@ -429,21 +423,8 @@ class Arc(Shape, metaclass=AutoSlots):
a.append((a0, a1))
return numpy.array(a)
def lock(self) -> 'Arc':
self.radii.flags.writeable = False
self.angles.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Arc':
Shape.unlock(self)
self.radii.flags.writeable = True
self.angles.flags.writeable = True
return self
def __repr__(self) -> str:
angles = f'{numpy.rad2deg(self.angles)}'
rotation = f'{numpy.rad2deg(self.rotation):g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>'
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}>'

View File

@ -9,7 +9,6 @@ from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
from .. import PatternError
from ..repetition import Repetition
from ..utils import is_scalar, layer_t, AutoSlots, annotations_t
from ..traits import LockableImpl
class Circle(Shape, metaclass=AutoSlots):
@ -54,10 +53,8 @@ class Circle(Shape, metaclass=AutoSlots):
dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw:
assert(isinstance(offset, numpy.ndarray))
@ -76,15 +73,12 @@ class Circle(Shape, metaclass=AutoSlots):
self.dose = dose
self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Circle':
memo = {} if memo is None else memo
new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy()
new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new
def to_polygons(
@ -138,5 +132,4 @@ class Circle(Shape, metaclass=AutoSlots):
def __repr__(self) -> str:
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
return f'<Circle l{self.layer} o{self.offset} r{self.radius:g}{dose}{locked}>'
return f'<Circle l{self.layer} o{self.offset} r{self.radius:g}{dose}>'

View File

@ -10,7 +10,6 @@ from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
from .. import PatternError
from ..repetition import Repetition
from ..utils import is_scalar, rotation_matrix_2d, layer_t, AutoSlots, annotations_t
from ..traits import LockableImpl
class Ellipse(Shape, metaclass=AutoSlots):
@ -101,10 +100,8 @@ class Ellipse(Shape, metaclass=AutoSlots):
dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw:
assert(isinstance(radii, numpy.ndarray))
@ -127,16 +124,13 @@ class Ellipse(Shape, metaclass=AutoSlots):
[self.mirror(a) for a, do in enumerate(mirrored) if do]
self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Ellipse':
memo = {} if memo is None else memo
new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy()
new._radii = self._radii.copy()
new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new
def to_polygons(
@ -209,18 +203,7 @@ class Ellipse(Shape, metaclass=AutoSlots):
(self.offset, scale / norm_value, angle, False, self.dose),
lambda: Ellipse(radii=radii * norm_value, layer=self.layer))
def lock(self) -> 'Ellipse':
self.radii.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Ellipse':
Shape.unlock(self)
self.radii.flags.writeable = True
return self
def __repr__(self) -> str:
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
return f'<Ellipse l{self.layer} o{self.offset} r{self.radii}{rotation}{dose}{locked}>'
return f'<Ellipse l{self.layer} o{self.offset} r{self.radii}{rotation}{dose}>'

View File

@ -11,7 +11,6 @@ from .. import PatternError
from ..repetition import Repetition
from ..utils import is_scalar, rotation_matrix_2d, layer_t, AutoSlots
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
from ..traits import LockableImpl
class PathCap(Enum):
@ -155,10 +154,8 @@ class Path(Shape, metaclass=AutoSlots):
dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
) -> None:
LockableImpl.unlock(self)
self._cap_extensions = None # Since .cap setter might access it
self.identifier = ()
@ -187,18 +184,15 @@ class Path(Shape, metaclass=AutoSlots):
self.cap_extensions = cap_extensions
self.rotate(rotation)
[self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Path':
memo = {} if memo is None else memo
new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy()
new._vertices = self._vertices.copy()
new._cap = copy.deepcopy(self._cap, memo)
new._cap_extensions = copy.deepcopy(self._cap_extensions, memo)
new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new
@staticmethod
@ -424,22 +418,7 @@ class Path(Shape, metaclass=AutoSlots):
extensions = numpy.zeros(2)
return extensions
def lock(self) -> 'Path':
self.vertices.flags.writeable = False
if self.cap_extensions is not None:
self.cap_extensions.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Path':
Shape.unlock(self)
self.vertices.flags.writeable = True
if self.cap_extensions is not None:
self.cap_extensions.flags.writeable = True
return self
def __repr__(self) -> str:
centroid = self.offset + self.vertices.mean(axis=0)
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
return f'<Path l{self.layer} centroid {centroid} v{len(self.vertices)} w{self.width} c{self.cap}{dose}{locked}>'
return f'<Path l{self.layer} centroid {centroid} v{len(self.vertices)} w{self.width} c{self.cap}{dose}>'

View File

@ -10,7 +10,6 @@ from .. import PatternError
from ..repetition import Repetition
from ..utils import is_scalar, rotation_matrix_2d, layer_t, AutoSlots
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
from ..traits import LockableImpl
class Polygon(Shape, metaclass=AutoSlots):
@ -83,10 +82,8 @@ class Polygon(Shape, metaclass=AutoSlots):
dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw:
assert(isinstance(vertices, numpy.ndarray))
@ -106,16 +103,13 @@ class Polygon(Shape, metaclass=AutoSlots):
self.dose = dose
self.rotate(rotation)
[self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked)
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Polygon':
memo = {} if memo is None else memo
new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy()
new._vertices = self._vertices.copy()
new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new
@staticmethod
@ -430,18 +424,7 @@ class Polygon(Shape, metaclass=AutoSlots):
self.vertices = remove_colinear_vertices(self.vertices, closed_path=True)
return self
def lock(self) -> 'Polygon':
self.vertices.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Polygon':
Shape.unlock(self)
self.vertices.flags.writeable = True
return self
def __repr__(self) -> str:
centroid = self.offset + self.vertices.mean(axis=0)
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
return f'<Polygon l{self.layer} centroid {centroid} v{len(self.vertices)}{dose}{locked}>'
return f'<Polygon l{self.layer} centroid {centroid} v{len(self.vertices)}{dose}>'

View File

@ -4,10 +4,11 @@ from abc import ABCMeta, abstractmethod
import numpy
from numpy.typing import NDArray, ArrayLike
from ..traits import (PositionableImpl, LayerableImpl, DoseableImpl,
Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, LockableImpl, RepeatableImpl,
AnnotatableImpl)
from ..traits import (
PositionableImpl, LayerableImpl, DoseableImpl,
Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, RepeatableImpl, AnnotatableImpl,
)
if TYPE_CHECKING:
from . import Polygon
@ -27,7 +28,7 @@ T = TypeVar('T', bound='Shape')
class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, RepeatableImpl, LockableImpl, AnnotatableImpl, metaclass=ABCMeta):
PivotableImpl, RepeatableImpl, AnnotatableImpl, metaclass=ABCMeta):
"""
Abstract class specifying functions common to all shapes.
"""
@ -303,13 +304,3 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
dose=self.dose))
return manhattan_polygons
def lock(self: T) -> T:
PositionableImpl._lock(self)
LockableImpl.lock(self)
return self
def unlock(self: T) -> T:
LockableImpl.unlock(self)
PositionableImpl._unlock(self)
return self

View File

@ -11,7 +11,6 @@ from ..repetition import Repetition
from ..traits import RotatableImpl
from ..utils import is_scalar, get_bit, normalize_mirror, layer_t, AutoSlots
from ..utils import annotations_t
from ..traits import LockableImpl
# Loaded on use:
# from freetype import Face
@ -74,10 +73,8 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
) -> None:
LockableImpl.unlock(self)
self.identifier = ()
if raw:
assert(isinstance(offset, numpy.ndarray))
@ -102,16 +99,13 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.font_path = font_path
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Text':
memo = {} if memo is None else memo
new = copy.copy(self)
Shape.unlock(new)
new._offset = self._offset.copy()
new._mirrored = copy.deepcopy(self._mirrored, memo)
new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new
def to_polygons(
@ -259,19 +253,8 @@ def get_char_as_polygons(
return polygons, advance
def lock(self) -> 'Text':
self.mirrored.flags.writeable = False
Shape.lock(self)
return self
def unlock(self) -> 'Text':
Shape.unlock(self)
self.mirrored.flags.writeable = True
return self
def __repr__(self) -> str:
rotation = f'{self.rotation*180/pi:g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
return f'<TextShape "{self.string}" l{self.layer} o{self.offset} h{self.height:g}{rotation}{mirrored}{dose}{locked}>'
return f'<TextShape "{self.string}" l{self.layer} o{self.offset} h{self.height:g}{rotation}{mirrored}{dose}>'

View File

@ -4,7 +4,7 @@
"""
#TODO more top-level documentation
from typing import Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any, TypeVar
from typing import Dict, Tuple, Optional, Sequence, Mapping, TYPE_CHECKING, Any, TypeVar
import copy
import numpy
@ -14,9 +14,10 @@ from numpy.typing import NDArray, ArrayLike
from .error import PatternError
from .utils import is_scalar, AutoSlots, annotations_t
from .repetition import Repetition
from .traits import (PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl,
Mirrorable, PivotableImpl, Copyable, LockableImpl, RepeatableImpl,
AnnotatableImpl)
from .traits import (
PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl,
Mirrorable, PivotableImpl, Copyable, RepeatableImpl, AnnotatableImpl,
)
if TYPE_CHECKING:
@ -27,19 +28,16 @@ S = TypeVar('S', bound='SubPattern')
class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mirrorable,
PivotableImpl, Copyable, RepeatableImpl, LockableImpl, AnnotatableImpl,
PivotableImpl, Copyable, RepeatableImpl, AnnotatableImpl,
metaclass=AutoSlots):
"""
SubPattern provides basic support for nesting Pattern objects within each other, by adding
offset, rotation, scaling, and associated methods.
"""
__slots__ = ('_pattern',
'_mirrored',
'identifier',
)
__slots__ = ('_target', '_mirrored', 'identifier')
_pattern: Optional['Pattern']
""" The `Pattern` being instanced """
_target: Optional[str]
""" The name of the `Pattern` being instanced """
_mirrored: NDArray[numpy.bool_]
""" Whether to mirror the instance across the x and/or y axes. """
@ -49,7 +47,7 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
def __init__(
self,
pattern: Optional['Pattern'],
target: Optional[str],
*,
offset: ArrayLike = (0.0, 0.0),
rotation: float = 0.0,
@ -58,24 +56,21 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
scale: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False,
identifier: Tuple[Any, ...] = (),
) -> None:
"""
Args:
pattern: Pattern to reference.
target: Name of the Pattern to reference.
offset: (x, y) offset applied to the referenced pattern. Not affected by rotation etc.
rotation: Rotation (radians, counterclockwise) relative to the referenced pattern's (0, 0).
mirrored: Whether to mirror the referenced pattern across its x and y axes.
dose: Scaling factor applied to the dose.
scale: Scaling factor applied to the pattern's geometry.
repetition: TODO
locked: Whether the `SubPattern` is locked after initialization.
repetition: `Repetition` object, default `None`
identifier: Arbitrary tuple, used internally by some `masque` functions.
"""
LockableImpl.unlock(self)
self.identifier = identifier
self.pattern = pattern
self.target = target
self.offset = offset
self.rotation = rotation
self.dose = dose
@ -85,41 +80,37 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
self.mirrored = mirrored
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.set_locked(locked)
def __copy__(self) -> 'SubPattern':
new = SubPattern(pattern=self.pattern,
offset=self.offset.copy(),
rotation=self.rotation,
dose=self.dose,
scale=self.scale,
mirrored=self.mirrored.copy(),
repetition=copy.deepcopy(self.repetition),
annotations=copy.deepcopy(self.annotations),
locked=self.locked)
new = SubPattern(
target=self.target,
offset=self.offset.copy(),
rotation=self.rotation,
dose=self.dose,
scale=self.scale,
mirrored=self.mirrored.copy(),
repetition=copy.deepcopy(self.repetition),
annotations=copy.deepcopy(self.annotations),
)
return new
def __deepcopy__(self, memo: Dict = None) -> 'SubPattern':
memo = {} if memo is None else memo
new = copy.copy(self)
LockableImpl.unlock(new)
new.pattern = copy.deepcopy(self.pattern, memo)
new.repetition = copy.deepcopy(self.repetition, memo)
new.annotations = copy.deepcopy(self.annotations, memo)
new.set_locked(self.locked)
return new
# pattern property
# target property
@property
def pattern(self) -> Optional['Pattern']:
return self._pattern
def target(self) -> Optional[str]:
return self._target
@pattern.setter
def pattern(self, val: Optional['Pattern']) -> None:
from .pattern import Pattern
if val is not None and not isinstance(val, Pattern):
raise PatternError(f'Provided pattern {val} is not a Pattern object or None!')
self._pattern = val
@target.setter
def target(self, val: Optional[str]) -> None:
if val is not None and not isinstance(val, str):
raise PatternError(f'Provided target {val} is not a str or None!')
self._target = val
# Mirrored property
@property
@ -132,14 +123,31 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
raise PatternError('Mirrored must be a 2-element list of booleans')
self._mirrored = numpy.array(val, dtype=bool, copy=True)
def as_pattern(self) -> 'Pattern':
def as_pattern(
self,
*,
pattern: Optional[Pattern] = None,
library: Optional[Mapping[str, Pattern]] = None,
) -> 'Pattern':
"""
Args:
pattern: Pattern object to transform
library: A str->Pattern mapping, used instead of `pattern`. Must contain
`self.target`.
Returns:
A copy of self.pattern which has been scaled, rotated, etc. according to this
`SubPattern`'s properties.
A copy of the referenced Pattern which has been scaled, rotated, etc.
according to this `SubPattern`'s properties.
"""
assert(self.pattern is not None)
pattern = self.pattern.deepcopy().deepunlock()
if pattern is None:
if library is None:
raise PatternError('as_pattern() must be given a pattern or library.')
assert(self.target is not None)
pattern = library[self.target]
pattern = pattern.deepcopy()
if self.scale != 1:
pattern.scale_by(self.scale)
if numpy.any(self.mirrored):
@ -152,7 +160,7 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
pattern.scale_element_doses(self.dose)
if self.repetition is not None:
combined = type(pattern)(name='__repetition__')
combined = type(pattern)()
for dd in self.repetition.displacements:
temp_pat = pattern.deepcopy()
temp_pat.translate_elements(dd)
@ -174,75 +182,33 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
self.repetition.mirror(axis)
return self
def get_bounds(self) -> Optional[NDArray[numpy.float64]]:
def get_bounds(
self,
*,
pattern: Optional[Pattern] = None,
library: Optional[Mapping[str, Pattern]] = None,
) -> Optional[NDArray[numpy.float64]]:
"""
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the `SubPattern` in each dimension.
Returns `None` if the contained `Pattern` is empty.
Args:
library: Name-to-Pattern mapping for resul
Returns:
`[[x_min, y_min], [x_max, y_max]]` or `None`
"""
if self.pattern is None:
if pattern is None and library is None:
raise PatternError('as_pattern() must be given a pattern or library.')
if pattern is None and self.target is None:
return None
return self.as_pattern().get_bounds()
def lock(self: S) -> S:
"""
Lock the SubPattern, disallowing changes
Returns:
self
"""
self.mirrored.flags.writeable = False
PositionableImpl._lock(self)
LockableImpl.lock(self)
return self
def unlock(self: S) -> S:
"""
Unlock the SubPattern
Returns:
self
"""
LockableImpl.unlock(self)
PositionableImpl._unlock(self)
self.mirrored.flags.writeable = True
return self
def deeplock(self: S) -> S:
"""
Recursively lock the SubPattern and its contained pattern
Returns:
self
"""
assert(self.pattern is not None)
self.lock()
self.pattern.deeplock()
return self
def deepunlock(self: S) -> S:
"""
Recursively unlock the SubPattern and its contained pattern
This is dangerous unless you have just performed a deepcopy, since
the subpattern and its components may be used in more than one once!
Returns:
self
"""
assert(self.pattern is not None)
self.unlock()
self.pattern.deepunlock()
return self
return self.as_pattern(pattern=pattern, library=library).get_bounds()
def __repr__(self) -> str:
name = self.pattern.name if self.pattern is not None else None
name = f'"{self.target}"' if self.target is not None else None
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
scale = f' d{self.scale:g}' if self.scale != 1 else ''
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
return f'<SubPattern "{name}" at {self.offset}{rotation}{scale}{mirrored}{dose}{locked}>'
return f'<SubPattern {name} at {self.offset}{rotation}{scale}{mirrored}{dose}>'

View File

@ -9,5 +9,4 @@ from .repeatable import Repeatable, RepeatableImpl
from .scalable import Scalable, ScalableImpl
from .mirrorable import Mirrorable
from .copyable import Copyable
from .lockable import Lockable, LockableImpl
from .annotatable import Annotatable, AnnotatableImpl

View File

@ -44,9 +44,6 @@ class AnnotatableImpl(Annotatable, metaclass=ABCMeta):
@property
def annotations(self) -> annotations_t:
return self._annotations
# # TODO: Find a way to make sure the subclass implements Lockable without dealing with diamond inheritance or this extra hasattr
# if hasattr(self, 'is_locked') and self.is_locked():
# return MappingProxyType(self._annotations)
@annotations.setter
def annotations(self, annotations: annotations_t):

View File

@ -120,23 +120,3 @@ class PositionableImpl(Positionable, metaclass=ABCMeta):
def translate(self: I, offset: ArrayLike) -> I:
self._offset += offset # type: ignore # NDArray += ArrayLike should be fine??
return self
def _lock(self: I) -> I:
"""
Lock the entity, disallowing further changes
Returns:
self
"""
self._offset.flags.writeable = False
return self
def _unlock(self: I) -> I:
"""
Unlock the entity
Returns:
self
"""
self._offset.flags.writeable = True
return self

View File

@ -5,6 +5,7 @@ from .types import layer_t, annotations_t
from .array import is_scalar
from .autoslots import AutoSlots
from .deferreddict import DeferredDict
from .bitwise import get_bit, set_bit
from .vertices import (

View File

@ -1,7 +1,7 @@
"""
2D bin-packing
"""
from typing import Tuple, List, Set, Sequence, Callable
from typing import Tuple, List, Set, Sequence, Callable, Mapping
import numpy
from numpy.typing import NDArray, ArrayLike
@ -11,16 +11,18 @@ from ..pattern import Pattern
from ..subpattern import SubPattern
def pack_patterns(patterns: Sequence[Pattern],
regions: numpy.ndarray,
spacing: Tuple[float, float],
presort: bool = True,
allow_rejects: bool = True,
packer: Callable = maxrects_bssf,
) -> Tuple[Pattern, List[Pattern]]:
def pack_patterns(
library: Mapping[str, Pattern],
patterns: Sequence[str],
regions: numpy.ndarray,
spacing: Tuple[float, float],
presort: bool = True,
allow_rejects: bool = True,
packer: Callable = maxrects_bssf,
) -> Tuple[Pattern, List[str]]:
half_spacing = numpy.array(spacing) / 2
bounds = [pp.get_bounds() for pp in patterns]
bounds = [library[pp].get_bounds() for pp in patterns]
sizes = [bb[1] - bb[0] + spacing if bb is not None else spacing for bb in bounds]
offsets = [half_spacing - bb[0] if bb is not None else (0, 0) for bb in bounds]