snapshot 2020-11-01 16:48:03.486536

This commit is contained in:
Jan Petykiewicz 2020-11-01 16:48:03 -08:00
commit 75e1caec48
46 changed files with 3704 additions and 1725 deletions

29
.flake8 Normal file
View File

@ -0,0 +1,29 @@
[flake8]
ignore =
# E501 line too long
E501,
# W391 newlines at EOF
W391,
# E241 multiple spaces after comma
E241,
# E302 expected 2 newlines
E302,
# W503 line break before binary operator (to be deprecated)
W503,
# E265 block comment should start with '# '
E265,
# E123 closing bracket does not match indentation of opening bracket's line
E123,
# E124 closing bracket does not match visual indentation
E124,
# E221 multiple spaces before operator
E221,
# E201 whitespace after '['
E201,
# E741 ambiguous variable name 'I'
E741,
per-file-ignores =
# F401 import without use
*/__init__.py: F401,

1
.gitignore vendored
View File

@ -15,5 +15,6 @@ dist/
*.gds.gz *.gds.gz
*.svg *.svg
*.oas *.oas
*.oas.gz
*.dxf *.dxf
*.dxf.gz *.dxf.gz

View File

@ -7,23 +7,26 @@ with some vectorized element types (eg. circles, not just polygons), better supp
E-beam doses, and the ability to output to multiple formats. E-beam doses, and the ability to output to multiple formats.
- [Source repository](https://mpxd.net/code/jan/masque) - [Source repository](https://mpxd.net/code/jan/masque)
- [PyPi](https://pypi.org/project/masque) - [PyPI](https://pypi.org/project/masque)
## Installation ## Installation
Requirements: Requirements:
* python >= 3.5 (written and tested with 3.6) * python >= 3.7 (written and tested with 3.8)
* numpy * numpy
* matplotlib (optional, used for `visualization` functions and `text`) * matplotlib (optional, used for `visualization` functions and `text`)
* python-gdsii (optional, used for `gdsii` i/o) * python-gdsii (optional, used for `gdsii` i/o)
* klamath (optional, used for `gdsii` i/o and library management)
* ezdxf (optional, used for `dxf` i/o)
* fatamorgana (optional, used for `oasis` i/o)
* svgwrite (optional, used for `svg` output) * svgwrite (optional, used for `svg` output)
* freetype (optional, used for `text`) * freetype (optional, used for `text`)
Install with pip: Install with pip:
```bash ```bash
pip3 install 'masque[visualization,gdsii,svg,text]' pip3 install 'masque[visualization,gdsii,oasis,dxf,svg,text,klamath]'
``` ```
Alternatively, install from git Alternatively, install from git
@ -31,9 +34,19 @@ Alternatively, install from git
pip3 install git+https://mpxd.net/code/jan/masque.git@release pip3 install git+https://mpxd.net/code/jan/masque.git@release
``` ```
## Translation
- `Pattern`: OASIS or GDS "Cell", DXF "Block"
- `SubPattern`: GDS "AREF/SREF", OASIS "Placement"
- `Shape`: OASIS or GDS "Geometry element", DXF "LWPolyline" or "Polyline"
- `repetition`: OASIS "repetition". GDS "AREF" is a `SubPattern` combined with a `Grid` repetition.
- `Label`: OASIS, GDS, DXF "Text".
- `annotation`: OASIS or GDS "property"
## TODO ## TODO
* Polygon de-embedding * Better interface for polygon operations (e.g. with `pyclipper`)
* Construct from bitmap - de-embedding
* Boolean operations on polygons (using pyclipper) - boolean ops
* Output to OASIS (using fatamorgana) * Construct polygons from bitmap using `skimage.find_contours`
* Deal with shape repetitions for dxf, svg

View File

@ -3,8 +3,7 @@
import numpy import numpy
import masque import masque
import masque.file.gdsii import masque.file.klamath
import masque.file.dxf
from masque import shapes from masque import shapes
@ -21,15 +20,11 @@ def main():
pat.labels.append(masque.Label(string='grating centerline', offset=(1, 0), layer=(1, 2))) pat.labels.append(masque.Label(string='grating centerline', offset=(1, 0), layer=(1, 2)))
pat.scale_by(1000) pat.scale_by(1000)
# pat.visualize() pat.visualize()
pat2 = pat.copy() pat2 = pat.copy()
pat2.name = 'grating2' pat2.name = 'grating2'
masque.file.gdsii.writefile((pat, pat2), 'out.gds.gz', 1e-9, 1e-3) masque.file.klamath.writefile((pat, pat2), 'out.gds.gz', 1e-9, 1e-3)
masque.file.dxf.writefile(pat, 'out.dxf.gz')
dxf, info = masque.file.dxf.readfile('out.dxf.gz')
masque.file.dxf.writefile(dxf, 'reout.dxf.gz')
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -3,8 +3,11 @@ from numpy import pi
import masque import masque
import masque.file.gdsii import masque.file.gdsii
import masque.file.klamath
import masque.file.dxf import masque.file.dxf
from masque import shapes, Pattern, SubPattern, GridRepetition import masque.file.oasis
from masque import shapes, Pattern, SubPattern
from masque.repetition import Grid
from pprint import pprint from pprint import pprint
@ -15,17 +18,18 @@ def main():
pat.shapes.append(shapes.Arc( pat.shapes.append(shapes.Arc(
radii=(rmin, rmin), radii=(rmin, rmin),
width=0.1, width=0.1,
angles=(0*-numpy.pi/4, numpy.pi/4) angles=(0*-numpy.pi/4, numpy.pi/4),
annotations={'1': ['blah']},
)) ))
pat.scale_by(1000) pat.scale_by(1000)
pat.visualize() # pat.visualize()
pat2 = pat.copy() pat2 = pat.copy()
pat2.name = 'grating2' pat2.name = 'grating2'
pat3 = Pattern('sref_test') pat3 = Pattern('sref_test')
pat3.subpatterns = [ pat3.subpatterns = [
SubPattern(pat, offset=(1e5, 3e5)), SubPattern(pat, offset=(1e5, 3e5), annotations={'4': ['Hello I am the base subpattern']}),
SubPattern(pat, offset=(2e5, 3e5), rotation=pi/3), SubPattern(pat, offset=(2e5, 3e5), rotation=pi/3),
SubPattern(pat, offset=(3e5, 3e5), rotation=pi/2), SubPattern(pat, offset=(3e5, 3e5), rotation=pi/2),
SubPattern(pat, offset=(4e5, 3e5), rotation=pi), SubPattern(pat, offset=(4e5, 3e5), rotation=pi),
@ -51,45 +55,49 @@ def main():
pprint(pat3.subpatterns) pprint(pat3.subpatterns)
pprint(pat.shapes) pprint(pat.shapes)
args = { rep = Grid(a_vector=[1e4, 0],
'pattern': pat, b_vector=[0, 1.5e4],
'a_vector': [1e4, 0], a_count=3,
'b_vector': [0, 1.5e4], b_count=2,)
'a_count': 3,
'b_count': 2,
}
pat4 = Pattern('aref_test') pat4 = Pattern('aref_test')
pat4.subpatterns = [ pat4.subpatterns = [
GridRepetition(**args, offset=(1e5, 3e5)), SubPattern(pat, repetition=rep, offset=(1e5, 3e5)),
GridRepetition(**args, offset=(2e5, 3e5), rotation=pi/3), SubPattern(pat, repetition=rep, offset=(2e5, 3e5), rotation=pi/3),
GridRepetition(**args, offset=(3e5, 3e5), rotation=pi/2), SubPattern(pat, repetition=rep, offset=(3e5, 3e5), rotation=pi/2),
GridRepetition(**args, offset=(4e5, 3e5), rotation=pi), SubPattern(pat, repetition=rep, offset=(4e5, 3e5), rotation=pi),
GridRepetition(**args, offset=(5e5, 3e5), rotation=3*pi/2), SubPattern(pat, repetition=rep, offset=(5e5, 3e5), rotation=3*pi/2),
GridRepetition(**args, mirrored=(True, False), offset=(1e5, 4e5)), SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(1e5, 4e5)),
GridRepetition(**args, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3), SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3),
GridRepetition(**args, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2), SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2),
GridRepetition(**args, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi), SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi),
GridRepetition(**args, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2), SubPattern(pat, repetition=rep, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2),
GridRepetition(**args, mirrored=(False, True), offset=(1e5, 5e5)), SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(1e5, 5e5)),
GridRepetition(**args, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3), SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3),
GridRepetition(**args, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2), SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2),
GridRepetition(**args, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi), SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi),
GridRepetition(**args, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2), SubPattern(pat, repetition=rep, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2),
GridRepetition(**args, mirrored=(True, True), offset=(1e5, 6e5)), SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(1e5, 6e5)),
GridRepetition(**args, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3), SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3),
GridRepetition(**args, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2), SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2),
GridRepetition(**args, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi), SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi),
GridRepetition(**args, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2), SubPattern(pat, repetition=rep, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2),
] ]
masque.file.gdsii.writefile((pat, pat2, pat3, pat4), 'rep.gds.gz', 1e-9, 1e-3) folder = 'layouts/'
masque.file.klamath.writefile((pat, pat2, pat3, pat4), folder + 'rep.gds.gz', 1e-9, 1e-3)
cells = list(masque.file.gdsii.readfile('rep.gds.gz')[0].values()) cells = list(masque.file.klamath.readfile(folder + 'rep.gds.gz')[0].values())
masque.file.gdsii.writefile(cells, 'rerep.gds.gz', 1e-9, 1e-3) masque.file.klamath.writefile(cells, folder + 'rerep.gds.gz', 1e-9, 1e-3)
masque.file.dxf.writefile(pat4, 'rep.dxf.gz') masque.file.dxf.writefile(pat4, folder + 'rep.dxf.gz')
dxf, info = masque.file.dxf.readfile('rep.dxf.gz') dxf, info = masque.file.dxf.readfile(folder + 'rep.dxf.gz')
masque.file.dxf.writefile(dxf, 'rerep.dxf.gz') masque.file.dxf.writefile(dxf, folder + 'rerep.dxf.gz')
layer_map = {'base': (0,0), 'mylabel': (1,2)}
masque.file.oasis.writefile((pat, pat2, pat3, pat4), folder + 'rep.oas.gz', 1000, layer_map=layer_map)
oas, info = masque.file.oasis.readfile(folder + 'rep.oas.gz')
masque.file.oasis.writefile(list(oas.values()), folder + 'rerep.oas.gz', 1000, layer_map=layer_map)
print(info)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1 +1 @@
1.3 2.2

View File

@ -8,22 +8,13 @@
`Pattern` is a basic object containing a 2D lithography mask, composed of a list of `Shape` `Pattern` is a basic object containing a 2D lithography mask, composed of a list of `Shape`
objects, a list of `Label` objects, and a list of references to other `Patterns` (using objects, a list of `Label` objects, and a list of references to other `Patterns` (using
`SubPattern` and `GridRepetition`). `SubPattern`).
`SubPattern` provides basic support for nesting `Pattern` objects within each other, by adding `SubPattern` provides basic support for nesting `Pattern` objects within each other, by adding
offset, rotation, scaling, and other such properties to a Pattern reference. offset, rotation, scaling, repetition, and other such properties to a Pattern reference.
`GridRepetition` provides support for nesting regular arrays of `Pattern` objects.
Note that the methods for these classes try to avoid copying wherever possible, so unless Note that the methods for these classes try to avoid copying wherever possible, so unless
otherwise noted, assume that arguments are stored by-reference. otherwise noted, assume that arguments are stored by-reference.
Dependencies:
- `numpy`
- `matplotlib` [Pattern.visualize(...)]
- `python-gdsii` [masque.file.gdsii]
- `svgwrite` [masque.file.svg]
""" """
import pathlib import pathlib
@ -31,9 +22,10 @@ import pathlib
from .error import PatternError, PatternLockedError from .error import PatternError, PatternLockedError
from .shapes import Shape from .shapes import Shape
from .label import Label from .label import Label
from .subpattern import SubPattern, subpattern_t from .subpattern import SubPattern
from .repetition import GridRepetition
from .pattern import Pattern from .pattern import Pattern
from .utils import layer_t, annotations_t
from .library import Library
__author__ = 'Jan Petykiewicz' __author__ = 'Jan Petykiewicz'

View File

@ -15,3 +15,12 @@ class PatternLockedError(PatternError):
""" """
def __init__(self): def __init__(self):
PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape') PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape')
class LibraryError(Exception):
"""
Exception raised by Library classes
"""
pass

View File

@ -1,3 +1,4 @@
""" """
Functions for reading from and writing to various file formats. Functions for reading from and writing to various file formats.
""" """

View File

@ -1,29 +1,27 @@
""" """
DXF file format readers and writers DXF file format readers and writers
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable
import re import re
import io import io
import copy
import base64 import base64
import struct import struct
import logging import logging
import pathlib import pathlib
import gzip import gzip
import numpy
from numpy import pi
import ezdxf import numpy # type: ignore
import ezdxf # type: ignore
from .utils import mangle_name, make_dose_table from .. import Pattern, SubPattern, PatternError, Label, Shape
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t from ..repetition import Grid
from ..utils import remove_colinear_vertices, normalize_mirror from ..utils import rotation_matrix_2d, layer_t
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.warning('DXF support is experimental and only slightly tested!') logger.warning('DXF support is experimental and only slightly tested!')
@ -32,9 +30,11 @@ DEFAULT_LAYER = 'DEFAULT'
def write(pattern: Pattern, def write(pattern: Pattern,
stream: io.TextIOBase, stream: io.TextIOBase,
*,
modify_originals: bool = False, modify_originals: bool = False,
dxf_version='AC1024', dxf_version='AC1024',
disambiguate_func: Callable[[Iterable[Pattern]], None] = None): disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
) -> None:
""" """
Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes
into polygons, and then writing patterns as DXF `Block`s, polygons as `LWPolyline`s, into polygons, and then writing patterns as DXF `Block`s, polygons as `LWPolyline`s,
@ -54,7 +54,7 @@ def write(pattern: Pattern,
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
prior to calling this function. prior to calling this function.
Only `GridRepetition` objects with manhattan basis vectors are preserved as arrays. Since DXF Only `Grid` repetition objects with manhattan basis vectors are preserved as arrays. Since DXF
rotations apply to basis vectors while `masque`'s rotations do not, the basis vectors of an rotations apply to basis vectors while `masque`'s rotations do not, the basis vectors of an
array with rotated instances must be manhattan _after_ having a compensating rotation applied. array with rotated instances must be manhattan _after_ having a compensating rotation applied.
@ -71,6 +71,7 @@ def write(pattern: Pattern,
#TODO consider supporting DXF arcs? #TODO consider supporting DXF arcs?
if disambiguate_func is None: if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names disambiguate_func = disambiguate_pattern_names
assert(disambiguate_func is not None)
if not modify_originals: if not modify_originals:
pattern = pattern.deepcopy().deepunlock() pattern = pattern.deepcopy().deepunlock()
@ -102,7 +103,7 @@ def writefile(pattern: Pattern,
filename: Union[str, pathlib.Path], filename: Union[str, pathlib.Path],
*args, *args,
**kwargs, **kwargs,
): ) -> None:
""" """
Wrapper for `dxf.write()` that takes a filename or path instead of a stream. Wrapper for `dxf.write()` that takes a filename or path instead of a stream.
@ -121,14 +122,13 @@ def writefile(pattern: Pattern,
open_func = open open_func = open
with open_func(path, mode='wt') as stream: with open_func(path, mode='wt') as stream:
results = write(pattern, stream, *args, **kwargs) write(pattern, stream, *args, **kwargs)
return results
def readfile(filename: Union[str, pathlib.Path], def readfile(filename: Union[str, pathlib.Path],
*args, *args,
**kwargs, **kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> Tuple[Pattern, Dict[str, Any]]:
""" """
Wrapper for `dxf.read()` that takes a filename or path instead of a stream. Wrapper for `dxf.read()` that takes a filename or path instead of a stream.
@ -152,7 +152,7 @@ def readfile(filename: Union[str, pathlib.Path],
def read(stream: io.TextIOBase, def read(stream: io.TextIOBase,
clean_vertices: bool = True, clean_vertices: bool = True,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> Tuple[Pattern, Dict[str, Any]]:
""" """
Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are
translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s
@ -190,35 +190,36 @@ def read(stream: io.TextIOBase,
return pat, library_info return pat, library_info
def _read_block(block, clean_vertices): def _read_block(block, clean_vertices: bool) -> Pattern:
pat = Pattern(block.name) pat = Pattern(block.name)
for element in block: for element in block:
eltype = element.dxftype() eltype = element.dxftype()
if eltype in ('POLYLINE', 'LWPOLYLINE'): if eltype in ('POLYLINE', 'LWPOLYLINE'):
if eltype == 'LWPOLYLINE': if eltype == 'LWPOLYLINE':
points = numpy.array(element.lwpoints) points = numpy.array(tuple(element.lwpoints()))
else: else:
points = numpy.array(element.points) points = numpy.array(tuple(element.points()))
attr = element.dxfattribs() attr = element.dxfattribs()
args = {'layer': attr.get('layer', DEFAULT_LAYER), layer = attr.get('layer', DEFAULT_LAYER)
}
if points.shape[1] == 2: if points.shape[1] == 2:
shape = Polygon(**args) raise PatternError('Invalid or unimplemented polygon?')
#shape = Polygon(layer=layer)
elif points.shape[1] > 2: elif points.shape[1] > 2:
if (points[0, 2] != points[:, 2]).any(): if (points[0, 2] != points[:, 2]).any():
raise PatternError('PolyLine has non-constant width (not yet representable in masque!)') raise PatternError('PolyLine has non-constant width (not yet representable in masque!)')
elif points.shape[1] == 4 and (points[:, 3] != 0).any(): elif points.shape[1] == 4 and (points[:, 3] != 0).any():
raise PatternError('LWPolyLine has bulge (not yet representable in masque!)') raise PatternError('LWPolyLine has bulge (not yet representable in masque!)')
else:
width = points[0, 2]
if width == 0:
width = attr.get('const_width', 0)
if width == 0 and numpy.array_equal(points[0], points[-1]): width = points[0, 2]
shape = Polygon(**args, vertices=points[:-1, :2]) if width == 0:
else: width = attr.get('const_width', 0)
shape = Path(**args, width=width, vertices=points[:, :2])
shape: Union[Path, Polygon]
if width == 0 and numpy.array_equal(points[0], points[-1]):
shape = Polygon(layer=layer, vertices=points[:-1, :2])
else:
shape = Path(layer=layer, width=width, vertices=points[:, :2])
if clean_vertices: if clean_vertices:
try: try:
@ -233,10 +234,10 @@ def _read_block(block, clean_vertices):
'layer': element.dxfattribs().get('layer', DEFAULT_LAYER), 'layer': element.dxfattribs().get('layer', DEFAULT_LAYER),
} }
string = element.dxfattribs().get('text', '') string = element.dxfattribs().get('text', '')
height = element.dxfattribs().get('height', 0) # height = element.dxfattribs().get('height', 0)
if height != 0: # if height != 0:
logger.warning('Interpreting DXF TEXT as a label despite nonzero height. ' # logger.warning('Interpreting DXF TEXT as a label despite nonzero height. '
'This could be changed in the future by setting a font path in the masque DXF code.') # 'This could be changed in the future by setting a font path in the masque DXF code.')
pat.labels.append(Label(string=string, **args)) pat.labels.append(Label(string=string, **args))
# else: # else:
# pat.shapes.append(Text(string=string, height=height, font_path=????)) # pat.shapes.append(Text(string=string, height=height, font_path=????))
@ -248,7 +249,7 @@ def _read_block(block, clean_vertices):
logger.warning('Masque does not support per-axis scaling; using x-scaling only!') logger.warning('Masque does not support per-axis scaling; using x-scaling only!')
scale = abs(xscale) scale = abs(xscale)
mirrored = (yscale < 0, xscale < 0) mirrored = (yscale < 0, xscale < 0)
rotation = attr.get('rotation', 0) * pi/180 rotation = numpy.deg2rad(attr.get('rotation', 0))
offset = attr.get('insert', (0, 0, 0))[:2] offset = attr.get('insert', (0, 0, 0))[:2]
@ -262,20 +263,18 @@ def _read_block(block, clean_vertices):
} }
if 'column_count' in attr: if 'column_count' in attr:
args['a_vector'] = (attr['column_spacing'], 0) args['repetition'] = Grid(a_vector=(attr['column_spacing'], 0),
args['b_vector'] = (0, attr['row_spacing']) b_vector=(0, attr['row_spacing']),
args['a_count'] = attr['column_count'] a_count=attr['column_count'],
args['b_count'] = attr['row_count'] b_count=attr['row_count'])
pat.subpatterns.append(GridRepetition(**args)) pat.subpatterns.append(SubPattern(**args))
else:
pat.subpatterns.append(SubPattern(**args))
else: else:
logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).') logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).')
return pat return pat
def _subpatterns_to_refs(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], def _subpatterns_to_refs(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace],
subpatterns: List[subpattern_t]): subpatterns: List[SubPattern]) -> None:
for subpat in subpatterns: for subpat in subpatterns:
if subpat.pattern is None: if subpat.pattern is None:
continue continue
@ -288,20 +287,23 @@ def _subpatterns_to_refs(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.M
'rotation': rotation, 'rotation': rotation,
} }
if isinstance(subpat, GridRepetition): rep = subpat.repetition
a = subpat.a_vector if rep is None:
b = subpat.b_vector if subpat.b_vector is not None else numpy.zeros(2) block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs)
elif isinstance(rep, Grid):
a = rep.a_vector
b = rep.b_vector if rep.b_vector is not None else numpy.zeros(2)
rotated_a = rotation_matrix_2d(-subpat.rotation) @ a rotated_a = rotation_matrix_2d(-subpat.rotation) @ a
rotated_b = rotation_matrix_2d(-subpat.rotation) @ b rotated_b = rotation_matrix_2d(-subpat.rotation) @ b
if rotated_a[1] == 0 and rotated_b[0] == 0: if rotated_a[1] == 0 and rotated_b[0] == 0:
attribs['column_count'] = subpat.a_count attribs['column_count'] = rep.a_count
attribs['row_count'] = subpat.b_count attribs['row_count'] = rep.b_count
attribs['column_spacing'] = rotated_a[0] attribs['column_spacing'] = rotated_a[0]
attribs['row_spacing'] = rotated_b[1] attribs['row_spacing'] = rotated_b[1]
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs)
elif rotated_a[0] == 0 and rotated_b[1] == 0: elif rotated_a[0] == 0 and rotated_b[1] == 0:
attribs['column_count'] = subpat.b_count attribs['column_count'] = rep.b_count
attribs['row_count'] = subpat.a_count attribs['row_count'] = rep.a_count
attribs['column_spacing'] = rotated_b[0] attribs['column_spacing'] = rotated_b[0]
attribs['row_spacing'] = rotated_a[1] attribs['row_spacing'] = rotated_a[1]
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs)
@ -309,11 +311,11 @@ def _subpatterns_to_refs(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.M
#NOTE: We could still do non-manhattan (but still orthogonal) grids by getting #NOTE: We could still do non-manhattan (but still orthogonal) grids by getting
# creative with counter-rotated nested patterns, but probably not worth it. # creative with counter-rotated nested patterns, but probably not worth it.
# Instead, just break appart the grid into individual elements: # Instead, just break appart the grid into individual elements:
for aa in numpy.arange(subpat.a_count): for dd in rep.displacements:
for bb in numpy.arange(subpat.b_count): block.add_blockref(encoded_name, subpat.offset + dd, dxfattribs=attribs)
block.add_blockref(encoded_name, subpat.offset + aa * a + bb * b, dxfattribs=attribs)
else: else:
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) for dd in rep.displacements:
block.add_blockref(encoded_name, subpat.offset + dd, dxfattribs=attribs)
def _shapes_to_elements(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], def _shapes_to_elements(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace],
@ -330,7 +332,7 @@ def _shapes_to_elements(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Mo
def _labels_to_texts(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], def _labels_to_texts(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace],
labels: List[Label]): labels: List[Label]) -> None:
for label in labels: for label in labels:
attribs = {'layer': _mlayer2dxf(label.layer)} attribs = {'layer': _mlayer2dxf(label.layer)}
xy = label.offset xy = label.offset
@ -347,14 +349,14 @@ def _mlayer2dxf(layer: layer_t) -> str:
raise PatternError(f'Unknown layer type: {layer} ({type(layer)})') raise PatternError(f'Unknown layer type: {layer} ({type(layer)})')
def disambiguate_pattern_names(patterns, def disambiguate_pattern_names(patterns: Sequence[Pattern],
max_name_length: int = 32, max_name_length: int = 32,
suffix_length: int = 6, suffix_length: int = 6,
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
): ) -> None:
used_names = [] used_names = []
for pat in patterns: for pat in patterns:
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name) sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
i = 0 i = 0
suffixed_name = sanitized_name suffixed_name = sanitized_name
@ -365,17 +367,18 @@ def disambiguate_pattern_names(patterns,
i += 1 i += 1
if sanitized_name == '': if sanitized_name == '':
logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name)) logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name: elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name): if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format( logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
pat.name, sanitized_name, suffixed_name)) + f' renaming to "{suffixed_name}"')
if len(suffixed_name) == 0: if len(suffixed_name) == 0:
# Should never happen since zero-length names are replaced # Should never happen since zero-length names are replaced
raise PatternError('Zero-length name after sanitize,\n originally "{}"'.format(pat.name)) raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"')
if len(suffixed_name) > max_name_length: if len(suffixed_name) > max_name_length:
raise PatternError('Pattern name "{!r}" length > {} after encode,\n originally "{}"'.format(suffixed_name, max_name_length, pat.name)) raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
pat.name = suffixed_name pat.name = suffixed_name
used_names.append(suffixed_name) used_names.append(suffixed_name)

View File

@ -10,53 +10,59 @@ Note that GDSII references follow the same convention as `masque`,
Scaling, rotation, and mirroring apply to individual instances, not grid Scaling, rotation, and mirroring apply to individual instances, not grid
vectors or offsets. vectors or offsets.
Notes:
* absolute positioning is not supported
* PLEX is not supported
* ELFLAGS are not supported
* GDS does not support library- or structure-level annotations
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
from typing import Sequence
import re import re
import io import io
import copy import copy
import numpy
import base64 import base64
import struct import struct
import logging import logging
import pathlib import pathlib
import gzip import gzip
import numpy # type: ignore
# python-gdsii # python-gdsii
import gdsii.library import gdsii.library
import gdsii.structure import gdsii.structure
import gdsii.elements import gdsii.elements
from .utils import mangle_name, make_dose_table, dose2dtype, dtype2dose from .utils import clean_pattern_vertices, is_gzipped
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t from ..repetition import Grid
from ..utils import remove_colinear_vertices, normalize_mirror from ..utils import get_bit, set_bit, layer_t, normalize_mirror, annotations_t
#TODO absolute positioning
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
path_cap_map = { path_cap_map = {
None: Path.Cap.Flush, None: Path.Cap.Flush,
0: Path.Cap.Flush, 0: Path.Cap.Flush,
1: Path.Cap.Circle, 1: Path.Cap.Circle,
2: Path.Cap.Square, 2: Path.Cap.Square,
4: Path.Cap.SquareCustom, 4: Path.Cap.SquareCustom,
} }
def write(patterns: Union[Pattern, List[Pattern]], def build(patterns: Union[Pattern, Sequence[Pattern]],
stream: io.BufferedIOBase,
meters_per_unit: float, meters_per_unit: float,
logical_units_per_unit: float = 1, logical_units_per_unit: float = 1,
library_name: str = 'masque-gdsii-write', library_name: str = 'masque-gdsii-write',
*,
modify_originals: bool = False, modify_originals: bool = False,
disambiguate_func: Callable[[Iterable[Pattern]], None] = None): disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
) -> gdsii.library.Library:
""" """
Write a `Pattern` or list of patterns to a GDSII file, by first calling Convert a `Pattern` or list of patterns to a GDSII stream, by first calling
`.polygonize()` to change the shapes into polygons, and then writing patterns `.polygonize()` to change the shapes into polygons, and then writing patterns
as GDSII structures, polygons as boundary elements, and subpatterns as structure as GDSII structures, polygons as boundary elements, and subpatterns as structure
references (sref). references (sref).
@ -74,8 +80,7 @@ def write(patterns: Union[Pattern, List[Pattern]],
prior to calling this function. prior to calling this function.
Args: Args:
patterns: A Pattern or list of patterns to write to the stream. patterns: A Pattern or list of patterns to convert.
stream: Stream object to write to.
meters_per_unit: Written into the GDSII file, meters per (database) length unit. meters_per_unit: Written into the GDSII file, meters per (database) length unit.
All distances are assumed to be an integer multiple of this unit, and are stored as such. All distances are assumed to be an integer multiple of this unit, and are stored as such.
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
@ -90,16 +95,22 @@ def write(patterns: Union[Pattern, List[Pattern]],
to make their names valid and unique. Default is `disambiguate_pattern_names`, which to make their names valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard as well as possible. attempts to adhere to the GDSII standard as well as possible.
WARNING: No additional error checking is performed on the results. WARNING: No additional error checking is performed on the results.
Returns:
`gdsii.library.Library`
""" """
if isinstance(patterns, Pattern): if isinstance(patterns, Pattern):
patterns = [patterns] patterns = [patterns]
if disambiguate_func is None: if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names disambiguate_func = disambiguate_pattern_names # type: ignore
assert(disambiguate_func is not None) # placate mypy
if not modify_originals: if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)] patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
patterns = [p.wrap_repeated_shapes() for p in patterns]
# Create library # Create library
lib = gdsii.library.Library(version=600, lib = gdsii.library.Library(version=600,
name=library_name.encode('ASCII'), name=library_name.encode('ASCII'),
@ -116,32 +127,49 @@ def write(patterns: Union[Pattern, List[Pattern]],
# Now create a structure for each pattern, and add in any Boundary and SREF elements # Now create a structure for each pattern, and add in any Boundary and SREF elements
for pat in patterns_by_id.values(): for pat in patterns_by_id.values():
structure = gdsii.structure.Structure(name=pat.name) structure = gdsii.structure.Structure(name=pat.name.encode('ASCII'))
lib.append(structure) lib.append(structure)
structure += _shapes_to_elements(pat.shapes) structure += _shapes_to_elements(pat.shapes)
structure += _labels_to_texts(pat.labels) structure += _labels_to_texts(pat.labels)
structure += _subpatterns_to_refs(pat.subpatterns) structure += _subpatterns_to_refs(pat.subpatterns)
return lib
def write(patterns: Union[Pattern, Sequence[Pattern]],
stream: io.BufferedIOBase,
*args,
**kwargs):
"""
Write a `Pattern` or list of patterns to a GDSII file.
See `masque.file.gdsii.build()` for details.
Args:
patterns: A Pattern or list of patterns to write to file.
stream: Stream to write to.
*args: passed to `masque.file.gdsii.build()`
**kwargs: passed to `masque.file.gdsii.build()`
"""
lib = build(patterns, *args, **kwargs)
lib.save(stream) lib.save(stream)
return return
def writefile(patterns: Union[Sequence[Pattern], Pattern],
def writefile(patterns: Union[List[Pattern], Pattern],
filename: Union[str, pathlib.Path], filename: Union[str, pathlib.Path],
*args, *args,
**kwargs, **kwargs,
): ):
""" """
Wrapper for `gdsii.write()` that takes a filename or path instead of a stream. Wrapper for `masque.file.gdsii.write()` that takes a filename or path instead of a stream.
Will automatically compress the file if it has a .gz suffix. Will automatically compress the file if it has a .gz suffix.
Args: Args:
patterns: `Pattern` or list of patterns to save patterns: `Pattern` or list of patterns to save
filename: Filename to save to. filename: Filename to save to.
*args: passed to `gdsii.write` *args: passed to `masque.file.gdsii.write`
**kwargs: passed to `gdsii.write` **kwargs: passed to `masque.file.gdsii.write`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz': if path.suffix == '.gz':
@ -159,17 +187,17 @@ def readfile(filename: Union[str, pathlib.Path],
**kwargs, **kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
""" """
Wrapper for `gdsii.read()` that takes a filename or path instead of a stream. Wrapper for `masque.file.gdsii.read()` that takes a filename or path instead of a stream.
Will automatically decompress files with a .gz suffix. Will automatically decompress gzipped files.
Args: Args:
filename: Filename to save to. filename: Filename to save to.
*args: passed to `gdsii.read` *args: passed to `masque.file.gdsii.read`
**kwargs: passed to `gdsii.read` **kwargs: passed to `masque.file.gdsii.read`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz': if is_gzipped(path):
open_func: Callable = gzip.open open_func: Callable = gzip.open
else: else:
open_func = open open_func = open
@ -180,7 +208,6 @@ def readfile(filename: Union[str, pathlib.Path],
def read(stream: io.BufferedIOBase, def read(stream: io.BufferedIOBase,
use_dtype_as_dose: bool = False,
clean_vertices: bool = True, clean_vertices: bool = True,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
""" """
@ -196,11 +223,6 @@ def read(stream: io.BufferedIOBase,
Args: Args:
stream: Stream to read from. stream: Stream to read from.
use_dtype_as_dose: If `False`, set each polygon's layer to `(gds_layer, gds_datatype)`.
If `True`, set the layer to `gds_layer` and the dose to `gds_datatype`.
Default `False`.
NOTE: This will be deprecated in the future in favor of
`pattern.apply(masque.file.utils.dtype2dose)`.
clean_vertices: If `True`, remove any redundant vertices when loading polygons. clean_vertices: If `True`, remove any redundant vertices when loading polygons.
The cleaning process removes any polygons with zero area or <3 vertices. The cleaning process removes any polygons with zero area or <3 vertices.
Default `True`. Default `True`.
@ -217,71 +239,32 @@ def read(stream: io.BufferedIOBase,
'logical_units_per_unit': lib.logical_unit, 'logical_units_per_unit': lib.logical_unit,
} }
raw_mode = True # Whether to construct shapes in raw mode (less error checking)
patterns = [] patterns = []
for structure in lib: for structure in lib:
pat = Pattern(name=structure.name.decode('ASCII')) pat = Pattern(name=structure.name.decode('ASCII'))
for element in structure: for element in structure:
# Switch based on element type: # Switch based on element type:
if isinstance(element, gdsii.elements.Boundary): if isinstance(element, gdsii.elements.Boundary):
args = {'vertices': element.xy[:-1], poly = _boundary_to_polygon(element, raw_mode)
'layer': (element.layer, element.data_type),
}
poly = Polygon(**args)
if clean_vertices:
try:
poly.clean_vertices()
except PatternError:
continue
pat.shapes.append(poly) pat.shapes.append(poly)
if isinstance(element, gdsii.elements.Path): if isinstance(element, gdsii.elements.Path):
if element.path_type in path_cap_map: path = _gpath_to_mpath(element, raw_mode)
cap = path_cap_map[element.path_type]
else:
raise PatternError('Unrecognized path type: {}'.format(element.path_type))
args = {'vertices': element.xy,
'layer': (element.layer, element.data_type),
'width': element.width if element.width is not None else 0.0,
'cap': cap,
}
if cap == Path.Cap.SquareCustom:
args['cap_extensions'] = numpy.zeros(2)
if element.bgn_extn is not None:
args['cap_extensions'][0] = element.bgn_extn
if element.end_extn is not None:
args['cap_extensions'][1] = element.end_extn
path = Path(**args)
if clean_vertices:
try:
path.clean_vertices()
except PatternError as err:
continue
pat.shapes.append(path) pat.shapes.append(path)
elif isinstance(element, gdsii.elements.Text): elif isinstance(element, gdsii.elements.Text):
label = Label(offset=element.xy, label = Label(offset=element.xy.astype(float),
layer=(element.layer, element.text_type), layer=(element.layer, element.text_type),
string=element.string.decode('ASCII')) string=element.string.decode('ASCII'))
pat.labels.append(label) pat.labels.append(label)
elif isinstance(element, gdsii.elements.SRef): elif isinstance(element, (gdsii.elements.SRef, gdsii.elements.ARef)):
pat.subpatterns.append(_sref_to_subpat(element)) pat.subpatterns.append(_ref_to_subpat(element))
elif isinstance(element, gdsii.elements.ARef):
pat.subpatterns.append(_aref_to_gridrep(element))
if use_dose_as_dtype:
logger.warning('use_dose_as_dtype will be removed in the future!')
pat = dose2dtype(pat)
if clean_vertices:
clean_pattern_vertices(pat)
patterns.append(pat) patterns.append(pat)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
@ -307,123 +290,166 @@ def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]:
else: else:
data_type = 0 data_type = 0
else: else:
raise PatternError(f'Invalid layer for gdsii: {layer}. Note that gdsii layers cannot be strings.') raise PatternError(f'Invalid layer for gdsii: {mlayer}. Note that gdsii layers cannot be strings.')
return layer, data_type return layer, data_type
def _sref_to_subpat(element: gdsii.elements.SRef) -> SubPattern: def _ref_to_subpat(element: Union[gdsii.elements.SRef,
gdsii.elements.ARef]
) -> SubPattern:
""" """
Helper function to create a SubPattern from an SREF. Sets subpat.pattern to None Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None
and sets the instance .identifier to (struct_name,). and sets the instance .identifier to (struct_name,).
BUG: NOTE: "Absolute" means not affected by parent elements.
"Absolute" means not affected by parent elements. That's not currently supported by masque at all (and not planned).
That's not currently supported by masque at all, so need to either tag it and
undo the parent transformations, or implement it in masque.
""" """
subpat = SubPattern(pattern=None, offset=element.xy) rotation = 0.0
subpat.identifier = (element.struct_name,) offset = numpy.array(element.xy[0], dtype=float)
if element.strans is not None: scale = 1.0
if element.mag is not None:
subpat.scale = element.mag
# Bit 13 means absolute scale
if get_bit(element.strans, 15 - 13):
#subpat.offset *= subpat.scale
raise PatternError('Absolute scale is not implemented yet!')
if element.angle is not None:
subpat.rotation = element.angle * numpy.pi / 180
# Bit 14 means absolute rotation
if get_bit(element.strans, 15 - 14):
#subpat.offset = numpy.dot(rotation_matrix_2d(subpat.rotation), subpat.offset)
raise PatternError('Absolute rotation is not implemented yet!')
# Bit 0 means mirror x-axis
if get_bit(element.strans, 15 - 0):
subpat.mirrored[0] = 1
return subpat
def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition:
"""
Helper function to create a GridRepetition from an AREF. Sets gridrep.pattern to None
and sets the instance .identifier to (struct_name,).
BUG:
"Absolute" means not affected by parent elements.
That's not currently supported by masque at all, so need to either tag it and
undo the parent transformations, or implement it in masque.
"""
rotation = 0
offset = numpy.array(element.xy[0])
scale = 1
mirror_across_x = False mirror_across_x = False
repetition = None
if element.strans is not None: if element.strans is not None:
if element.mag is not None: if element.mag is not None:
scale = element.mag scale = element.mag
# Bit 13 means absolute scale # Bit 13 means absolute scale
if get_bit(element.strans, 15 - 13): if get_bit(element.strans, 15 - 13):
raise PatternError('Absolute scale is not implemented yet!') raise PatternError('Absolute scale is not implemented in masque!')
if element.angle is not None: if element.angle is not None:
rotation = element.angle * numpy.pi / 180 rotation = numpy.deg2rad(element.angle)
# Bit 14 means absolute rotation # Bit 14 means absolute rotation
if get_bit(element.strans, 15 - 14): if get_bit(element.strans, 15 - 14):
raise PatternError('Absolute rotation is not implemented yet!') raise PatternError('Absolute rotation is not implemented in masque!')
# Bit 0 means mirror x-axis # Bit 0 means mirror x-axis
if get_bit(element.strans, 15 - 0): if get_bit(element.strans, 15 - 0):
mirror_across_x = True mirror_across_x = True
counts = [element.cols, element.rows] if isinstance(element, gdsii.elements.ARef):
a_vector = (element.xy[1] - offset) / counts[0] a_count = element.cols
b_vector = (element.xy[2] - offset) / counts[1] b_count = element.rows
a_vector = (element.xy[1] - offset) / a_count
b_vector = (element.xy[2] - offset) / b_count
repetition = Grid(a_vector=a_vector, b_vector=b_vector,
a_count=a_count, b_count=b_count)
gridrep = GridRepetition(pattern=None, subpat = SubPattern(pattern=None,
a_vector=a_vector, offset=offset,
b_vector=b_vector, rotation=rotation,
a_count=counts[0], scale=scale,
b_count=counts[1], mirrored=(mirror_across_x, False),
offset=offset, annotations=_properties_to_annotations(element.properties),
rotation=rotation, repetition=repetition)
scale=scale, subpat.identifier = (element.struct_name,)
mirrored=(mirror_across_x, False)) return subpat
gridrep.identifier = (element.struct_name,)
return gridrep
def _subpatterns_to_refs(subpatterns: List[subpattern_t] def _gpath_to_mpath(element: gdsii.elements.Path, raw_mode: bool) -> Path:
if element.path_type in path_cap_map:
cap = path_cap_map[element.path_type]
else:
raise PatternError(f'Unrecognized path type: {element.path_type}')
args = {'vertices': element.xy.astype(float),
'layer': (element.layer, element.data_type),
'width': element.width if element.width is not None else 0.0,
'cap': cap,
'offset': numpy.zeros(2),
'annotations': _properties_to_annotations(element.properties),
'raw': raw_mode,
}
if cap == Path.Cap.SquareCustom:
args['cap_extensions'] = numpy.zeros(2)
if element.bgn_extn is not None:
args['cap_extensions'][0] = element.bgn_extn
if element.end_extn is not None:
args['cap_extensions'][1] = element.end_extn
return Path(**args)
def _boundary_to_polygon(element: gdsii.elements.Boundary, raw_mode: bool) -> Polygon:
args = {'vertices': element.xy[:-1].astype(float),
'layer': (element.layer, element.data_type),
'offset': numpy.zeros(2),
'annotations': _properties_to_annotations(element.properties),
'raw': raw_mode,
}
return Polygon(**args)
def _subpatterns_to_refs(subpatterns: List[SubPattern]
) -> List[Union[gdsii.elements.ARef, gdsii.elements.SRef]]: ) -> List[Union[gdsii.elements.ARef, gdsii.elements.SRef]]:
refs = [] refs = []
for subpat in subpatterns: for subpat in subpatterns:
if subpat.pattern is None: if subpat.pattern is None:
continue continue
encoded_name = subpat.pattern.name encoded_name = subpat.pattern.name.encode('ASCII')
# Note: GDS mirrors first and rotates second # Note: GDS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
rep = subpat.repetition
new_refs: List[Union[gdsii.elements.SRef, gdsii.elements.ARef]]
ref: Union[gdsii.elements.SRef, gdsii.elements.ARef] ref: Union[gdsii.elements.SRef, gdsii.elements.ARef]
if isinstance(subpat, GridRepetition): if isinstance(rep, Grid):
xy = numpy.array(subpat.offset) + [ xy = numpy.array(subpat.offset) + [
[0, 0], [0, 0],
subpat.a_vector * subpat.a_count, rep.a_vector * rep.a_count,
subpat.b_vector * subpat.b_count, rep.b_vector * rep.b_count,
] ]
ref = gdsii.elements.ARef(struct_name=encoded_name, ref = gdsii.elements.ARef(struct_name=encoded_name,
xy=numpy.round(xy).astype(int), xy=numpy.round(xy).astype(int),
cols=numpy.round(subpat.a_count).astype(int), cols=numpy.round(rep.a_count).astype(int),
rows=numpy.round(subpat.b_count).astype(int)) rows=numpy.round(rep.b_count).astype(int))
else: new_refs = [ref]
elif rep is None:
ref = gdsii.elements.SRef(struct_name=encoded_name, ref = gdsii.elements.SRef(struct_name=encoded_name,
xy=numpy.round([subpat.offset]).astype(int)) xy=numpy.round([subpat.offset]).astype(int))
new_refs = [ref]
else:
new_refs = [gdsii.elements.SRef(struct_name=encoded_name,
xy=numpy.round([subpat.offset + dd]).astype(int))
for dd in rep.displacements]
ref.angle = ((subpat.rotation + extra_angle) * 180 / numpy.pi) % 360 for ref in new_refs:
# strans must be non-None for angle and mag to take effect ref.angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360
ref.strans = set_bit(0, 15 - 0, mirror_across_x) # strans must be non-None for angle and mag to take effect
ref.mag = subpat.scale ref.strans = set_bit(0, 15 - 0, mirror_across_x)
ref.mag = subpat.scale
ref.properties = _annotations_to_properties(subpat.annotations, 512)
refs.append(ref) refs += new_refs
return refs return refs
def _properties_to_annotations(properties: List[Tuple[int, bytes]]) -> annotations_t:
return {str(k): [v.decode()] for k, v in properties}
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> List[Tuple[int, bytes]]:
cum_len = 0
props = []
for key, vals in annotations.items():
try:
i = int(key)
except ValueError:
raise PatternError(f'Annotation key {key} is not convertable to an integer')
if not (0 < i < 126):
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
val_strings = ' '.join(str(val) for val in vals)
b = val_strings.encode()
if len(b) > 126:
raise PatternError(f'Annotation value {b!r} is longer than 126 characters!')
cum_len += numpy.ceil(len(b) / 2) * 2 + 2
if cum_len > max_len:
raise PatternError(f'Sum of annotation data will be longer than {max_len} bytes! Generated bytes were {b!r}')
props.append((i, b))
return props
def _shapes_to_elements(shapes: List[Shape], def _shapes_to_elements(shapes: List[Shape],
polygonize_paths: bool = False polygonize_paths: bool = False
) -> List[Union[gdsii.elements.Boundary, gdsii.elements.Path]]: ) -> List[Union[gdsii.elements.Boundary, gdsii.elements.Path]]:
@ -431,54 +457,74 @@ def _shapes_to_elements(shapes: List[Shape],
# Add a Boundary element for each shape, and Path elements if necessary # Add a Boundary element for each shape, and Path elements if necessary
for shape in shapes: for shape in shapes:
layer, data_type = _mlayer2gds(shape.layer) layer, data_type = _mlayer2gds(shape.layer)
properties = _annotations_to_properties(shape.annotations, 128)
if isinstance(shape, Path) and not polygonize_paths: if isinstance(shape, Path) and not polygonize_paths:
xy = numpy.round(shape.vertices + shape.offset).astype(int) xy = numpy.round(shape.vertices + shape.offset).astype(int)
width = numpy.round(shape.width).astype(int) width = numpy.round(shape.width).astype(int)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
path = gdsii.elements.Path(layer=layer, path = gdsii.elements.Path(layer=layer,
data_type=data_type, data_type=data_type,
xy=xy) xy=xy)
path.path_type = path_type path.path_type = path_type
path.width = width path.width = width
path.properties = properties
elements.append(path) elements.append(path)
else: else:
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int) xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
xy_closed = numpy.vstack((xy_open, xy_open[0, :])) xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
elements.append(gdsii.elements.Boundary(layer=layer, boundary = gdsii.elements.Boundary(layer=layer,
data_type=data_type, data_type=data_type,
xy=xy_closed)) xy=xy_closed)
boundary.properties = properties
elements.append(boundary)
return elements return elements
def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]: def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
texts = [] texts = []
for label in labels: for label in labels:
properties = _annotations_to_properties(label.annotations, 128)
layer, text_type = _mlayer2gds(label.layer) layer, text_type = _mlayer2gds(label.layer)
xy = numpy.round([label.offset]).astype(int) xy = numpy.round([label.offset]).astype(int)
texts.append(gdsii.elements.Text(layer=layer, text = gdsii.elements.Text(layer=layer,
text_type=text_type, text_type=text_type,
xy=xy, xy=xy,
string=label.string.encode('ASCII'))) string=label.string.encode('ASCII'))
text.properties = properties
texts.append(text)
return texts return texts
def disambiguate_pattern_names(patterns, def disambiguate_pattern_names(patterns: Sequence[Pattern],
max_name_length: int = 32, max_name_length: int = 32,
suffix_length: int = 6, suffix_length: int = 6,
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name dup_warn_filter: Optional[Callable[[str], bool]] = None,
): ):
"""
Args:
patterns: List of patterns to disambiguate
max_name_length: Names longer than this will be truncated
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
leave room for a suffix if one is necessary.
dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives
the cell name and returns `False` if the warning should be suppressed and `True` if it should
be displayed. Default displays all warnings.
"""
used_names = [] used_names = []
for pat in patterns: for pat in set(patterns):
# Shorten names which already exceed max-length
if len(pat.name) > max_name_length: if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length] shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning('Pattern name "{}" is too long ({}/{} chars),\n'.format(pat.name, len(pat.name), max_name_length) + logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
' shortening to "{}" before generating suffix'.format(shortened_name)) + f' shortening to "{shortened_name}" before generating suffix')
else: else:
shortened_name = pat.name shortened_name = pat.name
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name) # Remove invalid characters
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
# Add a suffix that makes the name unique
i = 0 i = 0
suffixed_name = sanitized_name suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '': while suffixed_name in used_names or suffixed_name == '':
@ -488,18 +534,20 @@ def disambiguate_pattern_names(patterns,
i += 1 i += 1
if sanitized_name == '': if sanitized_name == '':
logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name)) logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name: elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name): if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format( logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
pat.name, sanitized_name, suffixed_name)) + f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII') encoded_name = suffixed_name.encode('ASCII')
if len(encoded_name) == 0: if len(encoded_name) == 0:
# Should never happen since zero-length names are replaced # Should never happen since zero-length names are replaced
raise PatternError('Zero-length name after sanitize+encode,\n originally "{}"'.format(pat.name)) raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
if len(encoded_name) > max_name_length: if len(encoded_name) > max_name_length:
raise PatternError('Pattern name "{!r}" length > {} after encode,\n originally "{}"'.format(encoded_name, max_name_length, pat.name)) raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
pat.name = encoded_name pat.name = suffixed_name
used_names.append(suffixed_name) used_names.append(suffixed_name)

638
masque/file/klamath.py Normal file
View File

@ -0,0 +1,638 @@
"""
GDSII file format readers and writers using the `klamath` library.
Note that GDSII references follow the same convention as `masque`,
with this order of operations:
1. Mirroring
2. Rotation
3. Scaling
4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets)
Scaling, rotation, and mirroring apply to individual instances, not grid
vectors or offsets.
Notes:
* absolute positioning is not supported
* PLEX is not supported
* ELFLAGS are not supported
* GDS does not support library- or structure-level annotations
* Creation/modification/access times are set to 1900-01-01 for reproducibility.
"""
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
from typing import Sequence, BinaryIO
import re
import io
import mmap
import copy
import base64
import struct
import logging
import pathlib
import gzip
import numpy # type: ignore
import klamath
from klamath import records
from .utils import is_gzipped
from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path
from ..repetition import Grid
from ..utils import layer_t, normalize_mirror, annotations_t
from ..library import Library
logger = logging.getLogger(__name__)
path_cap_map = {
0: Path.Cap.Flush,
1: Path.Cap.Circle,
2: Path.Cap.Square,
4: Path.Cap.SquareCustom,
}
def write(patterns: Union[Pattern, Sequence[Pattern]],
stream: BinaryIO,
meters_per_unit: float,
logical_units_per_unit: float = 1,
library_name: str = 'masque-klamath',
*,
modify_originals: bool = False,
disambiguate_func: Callable[[Iterable[Pattern]], None] = None,
) -> None:
"""
Convert a `Pattern` or list of patterns to a GDSII stream, and then mapping data as follows:
Pattern -> GDSII structure
SubPattern -> GDSII SREF or AREF
Path -> GSDII path
Shape (other than path) -> GDSII boundary/ies
Label -> GDSII text
annnotations -> properties, where possible
For each shape,
layer is chosen to be equal to `shape.layer` if it is an int,
or `shape.layer[0]` if it is a tuple
datatype is chosen to be `shape.layer[1]` if available,
otherwise `0`
It is often a good idea to run `pattern.subpatternize()` prior to calling this function,
especially if calling `.polygonize()` will result in very many vertices.
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
prior to calling this function.
Args:
patterns: A Pattern or list of patterns to convert.
meters_per_unit: Written into the GDSII file, meters per (database) length unit.
All distances are assumed to be an integer multiple of this unit, and are stored as such.
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
"logical" unit which is different from the "database" unit, for display purposes.
Default `1`.
library_name: Library name written into the GDSII file.
Default 'masque-klamath'.
modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed.
Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`, which
attempts to adhere to the GDSII standard as well as possible.
WARNING: No additional error checking is performed on the results.
"""
if isinstance(patterns, Pattern):
patterns = [patterns]
if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names # type: ignore
assert(disambiguate_func is not None) # placate mypy
if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
patterns = [p.wrap_repeated_shapes() for p in patterns]
# Create library
header = klamath.library.FileHeader(name=library_name.encode('ASCII'),
user_units_per_db_unit=logical_units_per_unit,
meters_per_db_unit=meters_per_unit)
header.write(stream)
# Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns}
for pattern in patterns:
for i, p in pattern.referenced_patterns_by_id().items():
patterns_by_id[i] = p
disambiguate_func(patterns_by_id.values())
# Now create a structure for each pattern, and add in any Boundary and SREF elements
for pat in patterns_by_id.values():
elements: List[klamath.elements.Element] = []
elements += _shapes_to_elements(pat.shapes)
elements += _labels_to_texts(pat.labels)
elements += _subpatterns_to_refs(pat.subpatterns)
klamath.library.write_struct(stream, name=pat.name.encode('ASCII'), elements=elements)
records.ENDLIB.write(stream, None)
def writefile(patterns: Union[Sequence[Pattern], Pattern],
filename: Union[str, pathlib.Path],
*args,
**kwargs,
) -> None:
"""
Wrapper for `write()` that takes a filename or path instead of a stream.
Will automatically compress the file if it has a .gz suffix.
Args:
patterns: `Pattern` or list of patterns to save
filename: Filename to save to.
*args: passed to `write()`
**kwargs: passed to `write()`
"""
path = pathlib.Path(filename)
if path.suffix == '.gz':
open_func: Callable = gzip.open
else:
open_func = open
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
write(patterns, stream, *args, **kwargs)
def readfile(filename: Union[str, pathlib.Path],
*args,
**kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
"""
Wrapper for `read()` that takes a filename or path instead of a stream.
Will automatically decompress gzipped files.
Args:
filename: Filename to save to.
*args: passed to `read()`
**kwargs: passed to `read()`
"""
path = pathlib.Path(filename)
if is_gzipped(path):
open_func: Callable = gzip.open
else:
open_func = open
with io.BufferedReader(open_func(path, mode='rb')) as stream:
results = read(stream, *args, **kwargs)
return results
def read(stream: BinaryIO,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
"""
Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are
translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs
are translated into SubPattern objects.
Additional library info is returned in a dict, containing:
'name': name of the library
'meters_per_unit': number of meters per database unit (all values are in database units)
'logical_units_per_unit': number of "logical" units displayed by layout tools (typically microns)
per database unit
Args:
stream: Stream to read from.
Returns:
- Dict of pattern_name:Patterns generated from GDSII structures
- Dict of GDSII library info
"""
raw_mode = True # Whether to construct shapes in raw mode (less error checking)
library_info = _read_header(stream)
patterns = []
found_struct = records.BGNSTR.skip_past(stream)
while found_struct:
name = records.STRNAME.skip_and_read(stream)
pat = read_elements(stream, name=name.decode('ASCII'), raw_mode=raw_mode)
patterns.append(pat)
found_struct = records.BGNSTR.skip_past(stream)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
# according to the subpattern.identifier (which is deleted after use).
patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values():
for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0]]
del sp.identifier
return patterns_dict, library_info
def _read_header(stream: BinaryIO) -> Dict[str, Any]:
"""
Read the file header and create the library_info dict.
"""
header = klamath.library.FileHeader.read(stream)
library_info = {'name': header.name.decode('ASCII'),
'meters_per_unit': header.meters_per_db_unit,
'logical_units_per_unit': header.user_units_per_db_unit,
}
return library_info
def read_elements(stream: BinaryIO,
name: str,
raw_mode: bool = True,
) -> Pattern:
"""
Read elements from a GDS structure and build a Pattern from them.
Args:
stream: Seekable stream, positioned at a record boundary.
Will be read until an ENDSTR record is consumed.
name: Name of the resulting Pattern
raw_mode: If True, bypass per-shape consistency checking
Returns:
A pattern containing the elements that were read.
"""
pat = Pattern(name)
elements = klamath.library.read_elements(stream)
for element in elements:
if isinstance(element, klamath.elements.Boundary):
poly = _boundary_to_polygon(element, raw_mode)
pat.shapes.append(poly)
elif isinstance(element, klamath.elements.Path):
path = _gpath_to_mpath(element, raw_mode)
pat.shapes.append(path)
elif isinstance(element, klamath.elements.Text):
label = Label(offset=element.xy.astype(float),
layer=element.layer,
string=element.string.decode('ASCII'),
annotations=_properties_to_annotations(element.properties))
pat.labels.append(label)
elif isinstance(element, klamath.elements.Reference):
pat.subpatterns.append(_ref_to_subpat(element))
return pat
def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]:
""" Helper to turn a layer tuple-or-int into a layer and datatype"""
if isinstance(mlayer, int):
layer = mlayer
data_type = 0
elif isinstance(mlayer, tuple):
layer = mlayer[0]
if len(mlayer) > 1:
data_type = mlayer[1]
else:
data_type = 0
else:
raise PatternError(f'Invalid layer for gdsii: {mlayer}. Note that gdsii layers cannot be strings.')
return layer, data_type
def _ref_to_subpat(ref: klamath.library.Reference,
) -> SubPattern:
"""
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.pattern to None
and sets the instance .identifier to (struct_name,).
"""
xy = ref.xy.astype(float)
offset = xy[0]
repetition = None
if ref.colrow is not None:
a_count, b_count = ref.colrow
a_vector = (xy[1] - offset) / a_count
b_vector = (xy[2] - offset) / b_count
repetition = Grid(a_vector=a_vector, b_vector=b_vector,
a_count=a_count, b_count=b_count)
subpat = SubPattern(pattern=None,
offset=offset,
rotation=numpy.deg2rad(ref.angle_deg),
scale=ref.mag,
mirrored=(ref.invert_y, False),
annotations=_properties_to_annotations(ref.properties),
repetition=repetition)
subpat.identifier = (ref.struct_name.decode('ASCII'),)
return subpat
def _gpath_to_mpath(gpath: klamath.library.Path, raw_mode: bool) -> Path:
if gpath.path_type in path_cap_map:
cap = path_cap_map[gpath.path_type]
else:
raise PatternError(f'Unrecognized path type: {gpath.path_type}')
mpath = Path(vertices=gpath.xy.astype(float),
layer=gpath.layer,
width=gpath.width,
cap=cap,
offset=numpy.zeros(2),
annotations=_properties_to_annotations(gpath.properties),
raw=raw_mode,
)
if cap == Path.Cap.SquareCustom:
mpath.cap_extensions = gpath.extension
return mpath
def _boundary_to_polygon(boundary: klamath.library.Boundary, raw_mode: bool) -> Polygon:
return Polygon(vertices=boundary.xy[:-1].astype(float),
layer=boundary.layer,
offset=numpy.zeros(2),
annotations=_properties_to_annotations(boundary.properties),
raw=raw_mode,
)
def _subpatterns_to_refs(subpatterns: List[SubPattern]
) -> List[klamath.library.Reference]:
refs = []
for subpat in subpatterns:
if subpat.pattern is None:
continue
encoded_name = subpat.pattern.name.encode('ASCII')
# Note: GDS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
rep = subpat.repetition
angle_deg = numpy.rad2deg(subpat.rotation + extra_angle) % 360
properties = _annotations_to_properties(subpat.annotations, 512)
if isinstance(rep, Grid):
xy = numpy.array(subpat.offset) + [
[0, 0],
rep.a_vector * rep.a_count,
rep.b_vector * rep.b_count,
]
aref = klamath.library.Reference(struct_name=encoded_name,
xy=numpy.round(xy).astype(int),
colrow=(numpy.round(rep.a_count), numpy.round(rep.b_count)),
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties)
refs.append(aref)
elif rep is None:
ref = klamath.library.Reference(struct_name=encoded_name,
xy=numpy.round([subpat.offset]).astype(int),
colrow=None,
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties)
refs.append(ref)
else:
new_srefs = [klamath.library.Reference(struct_name=encoded_name,
xy=numpy.round([subpat.offset + dd]).astype(int),
colrow=None,
angle_deg=angle_deg,
invert_y=mirror_across_x,
mag=subpat.scale,
properties=properties)
for dd in rep.displacements]
refs += new_srefs
return refs
def _properties_to_annotations(properties: Dict[int, bytes]) -> annotations_t:
return {str(k): [v.decode()] for k, v in properties.items()}
def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -> Dict[int, bytes]:
cum_len = 0
props = {}
for key, vals in annotations.items():
try:
i = int(key)
except ValueError:
raise PatternError(f'Annotation key {key} is not convertable to an integer')
if not (0 < i < 126):
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
val_strings = ' '.join(str(val) for val in vals)
b = val_strings.encode()
if len(b) > 126:
raise PatternError(f'Annotation value {b!r} is longer than 126 characters!')
cum_len += numpy.ceil(len(b) / 2) * 2 + 2
if cum_len > max_len:
raise PatternError(f'Sum of annotation data will be longer than {max_len} bytes! Generated bytes were {b!r}')
props[i] = b
return props
def _shapes_to_elements(shapes: List[Shape],
polygonize_paths: bool = False
) -> List[klamath.elements.Element]:
elements: List[klamath.elements.Element] = []
# Add a Boundary element for each shape, and Path elements if necessary
for shape in shapes:
layer, data_type = _mlayer2gds(shape.layer)
properties = _annotations_to_properties(shape.annotations, 128)
if isinstance(shape, Path) and not polygonize_paths:
xy = numpy.round(shape.vertices + shape.offset).astype(int)
width = numpy.round(shape.width).astype(int)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension: Tuple[int, int]
if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None:
extension = tuple(shape.cap_extensions) # type: ignore
else:
extension = (0, 0)
path = klamath.elements.Path(layer=(layer, data_type),
xy=xy,
path_type=path_type,
width=width,
extension=extension,
properties=properties)
elements.append(path)
elif isinstance(shape, Polygon):
polygon = shape
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
boundary = klamath.elements.Boundary(layer=(layer, data_type),
xy=xy_closed,
properties=properties)
elements.append(boundary)
else:
for polygon in shape.to_polygons():
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
boundary = klamath.elements.Boundary(layer=(layer, data_type),
xy=xy_closed,
properties=properties)
elements.append(boundary)
return elements
def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]:
texts = []
for label in labels:
properties = _annotations_to_properties(label.annotations, 128)
layer, text_type = _mlayer2gds(label.layer)
xy = numpy.round([label.offset]).astype(int)
text = klamath.elements.Text(layer=(layer, text_type),
xy=xy,
string=label.string.encode('ASCII'),
properties=properties,
presentation=0, # TODO maybe set some of these?
angle_deg=0,
invert_y=False,
width=0,
path_type=0,
mag=1)
texts.append(text)
return texts
def disambiguate_pattern_names(patterns: Sequence[Pattern],
max_name_length: int = 32,
suffix_length: int = 6,
dup_warn_filter: Optional[Callable[[str], bool]] = None,
):
"""
Args:
patterns: List of patterns to disambiguate
max_name_length: Names longer than this will be truncated
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
leave room for a suffix if one is necessary.
dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives
the cell name and returns `False` if the warning should be suppressed and `True` if it should
be displayed. Default displays all warnings.
"""
used_names = []
for pat in set(patterns):
# Shorten names which already exceed max-length
if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
+ f' shortening to "{shortened_name}" before generating suffix')
else:
shortened_name = pat.name
# Remove invalid characters
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
# Add a suffix that makes the name unique
i = 0
suffixed_name = sanitized_name
while suffixed_name in used_names or suffixed_name == '':
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
i += 1
if sanitized_name == '':
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
+ f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII')
if len(encoded_name) == 0:
# Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
if len(encoded_name) > max_name_length:
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
+ f' originally "{pat.name}"')
pat.name = suffixed_name
used_names.append(suffixed_name)
def load_library(stream: BinaryIO,
tag: str,
is_secondary: Optional[Callable[[str], bool]] = None,
) -> Tuple[Library, Dict[str, Any]]:
"""
Scan a GDSII stream to determine what structures are present, and create
a library from them. This enables deferred reading of structures
on an as-needed basis.
All structures are loaded as secondary
Args:
stream: Seekable stream. Position 0 should be the start of the file.
The caller should leave the stream open while the library
is still in use, since the library will need to access it
in order to read the structure contents.
tag: Unique identifier that will be used to identify this data source
is_secondary: Function which takes a structure name and returns
True if the structure should only be used as a subcell
and not appear in the main Library interface.
Default always returns False.
Returns:
Library object, allowing for deferred load of structures.
Additional library info (dict, same format as from `read`).
"""
if is_secondary is None:
def is_secondary(k: str):
return False
stream.seek(0)
library_info = _read_header(stream)
structs = klamath.library.scan_structs(stream)
lib = Library()
for name_bytes, pos in structs.items():
name = name_bytes.decode('ASCII')
def mkstruct(pos: int = pos, name: str = name) -> Pattern:
stream.seek(pos)
return read_elements(stream, name, raw_mode=True)
lib.set_value(name, tag, mkstruct, secondary=is_secondary(name))
return lib, library_info
def load_libraryfile(filename: Union[str, pathlib.Path],
tag: str,
is_secondary: Optional[Callable[[str], bool]] = None,
use_mmap: bool = True,
) -> Tuple[Library, Dict[str, Any]]:
"""
Wrapper for `load_library()` that takes a filename or path instead of a stream.
Will automatically decompress the file if it is gzipped.
NOTE that any streams/mmaps opened will remain open until ALL of the
`PatternGenerator` objects in the library are garbage collected.
Args:
path: filename or path to read from
tag: Unique identifier for library, see `load_library`
is_secondary: Function specifying subcess, see `load_library`
use_mmap: If `True`, will attempt to memory-map the file instead
of buffering. In the case of gzipped files, the file
is decompressed into a python `bytes` object in memory
and reopened as an `io.BytesIO` stream.
Returns:
Library object, allowing for deferred load of structures.
Additional library info (dict, same format as from `read`).
"""
path = pathlib.Path(filename)
if is_gzipped(path):
if mmap:
logger.info('Asked to mmap a gzipped file, reading into memory instead...')
base_stream = gzip.open(path, mode='rb')
stream = io.BytesIO(base_stream.read())
else:
base_stream = gzip.open(path, mode='rb')
stream = io.BufferedReader(base_stream)
else:
base_stream = open(path, mode='rb')
if mmap:
stream = mmap.mmap(base_stream.fileno(), 0, access=mmap.ACCESS_READ)
else:
stream = io.BufferedReader(base_stream)
return load_library(stream, tag, is_secondary)

View File

@ -15,45 +15,52 @@ from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable,
import re import re
import io import io
import copy import copy
import numpy
import base64 import base64
import struct import struct
import logging import logging
import pathlib import pathlib
import gzip import gzip
import numpy # type: ignore
import fatamorgana import fatamorgana
import fatamorgana.records as fatrec import fatamorgana.records as fatrec
from fatamorgana.basic import PathExtensionScheme from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference
from .utils import mangle_name, make_dose_table from .utils import clean_pattern_vertices, is_gzipped
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path from ..shapes import Polygon, Path, Circle
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t from ..repetition import Grid, Arbitrary, Repetition
from ..utils import remove_colinear_vertices, normalize_mirror from ..utils import layer_t, normalize_mirror, annotations_t
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.warning('OASIS support is experimental and mostly untested!')
path_cap_map = { path_cap_map = {
PathExtensionScheme.Flush: Path.Cap.Flush, PathExtensionScheme.Flush: Path.Cap.Flush,
PathExtensionScheme.HalfWidth: Path.Cap.Square, PathExtensionScheme.HalfWidth: Path.Cap.Square,
PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom, PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom,
} }
#TODO implement more shape types?
def write(patterns: Union[Pattern, List[Pattern]], def build(patterns: Union[Pattern, Sequence[Pattern]],
stream: io.BufferedIOBase,
units_per_micron: int, units_per_micron: int,
layer_map: Dict[str, Union[int, Tuple[int, int]]] = None, layer_map: Optional[Dict[str, Union[int, Tuple[int, int]]]] = None,
*,
modify_originals: bool = False, modify_originals: bool = False,
disambiguate_func: Callable[[Iterable[Pattern]], None] = None): disambiguate_func: Optional[Callable[[Iterable[Pattern]], None]] = None,
annotations: Optional[annotations_t] = None
) -> fatamorgana.OasisLayout:
""" """
Write a `Pattern` or list of patterns to a OASIS file, writing patterns Convert a `Pattern` or list of patterns to an OASIS stream, writing patterns
as OASIS cells, polygons as Polygon records, and subpatterns as Placement as OASIS cells, subpatterns as Placement records, and other shapes and labels
records. Other shape types may be converted to polygons if no equivalent mapped to equivalent record types (Polygon, Path, Circle, Text).
record type exists (or is not implemented here yet). #TODO Other shape types may be converted to polygons if no equivalent
record type exists (or is not implemented here yet).
For each shape, For each shape,
layer is chosen to be equal to `shape.layer` if it is an int, layer is chosen to be equal to `shape.layer` if it is an int,
@ -67,16 +74,26 @@ def write(patterns: Union[Pattern, List[Pattern]],
prior to calling this function. prior to calling this function.
Args: Args:
patterns: A Pattern or list of patterns to write to file. patterns: A Pattern or list of patterns to convert.
stream: Stream object to write to.
units_per_micron: Written into the OASIS file, number of grid steps per micrometer. units_per_micron: Written into the OASIS file, number of grid steps per micrometer.
All distances are assumed to be an integer multiple of the grid step, and are stored as such. All distances are assumed to be an integer multiple of the grid step, and are stored as such.
layer_map: Dictionary which translates layer_map: Dictionary which translates layer names into layer numbers. If this argument is
provided, input shapes and labels are allowed to have layer names instead of numbers.
It is assumed that geometry and text share the same layer names, and each name is
assigned only to a single layer (not a range).
If more fine-grained control is needed, manually pre-processing shapes' layer names
into numbers, omit this argument, and manually generate the required
`fatamorgana.records.LayerName` entries.
Default is an empty dict (no names provided).
modify_originals: If `True`, the original pattern is modified as part of the writing modify_originals: If `True`, the original pattern is modified as part of the writing
process. Otherwise, a copy is made and `deepunlock()`-ed. process. Otherwise, a copy is made and `deepunlock()`-ed.
Default `False`. Default `False`.
disambiguate_func: Function which takes a list of patterns and alters them disambiguate_func: Function which takes a list of patterns and alters them
to make their names valid and unique. Default is `disambiguate_pattern_names`. to make their names valid and unique. Default is `disambiguate_pattern_names`.
annotations: dictionary of key-value pairs which are saved as library-level properties
Returns:
`fatamorgana.OasisLayout`
""" """
if isinstance(patterns, Pattern): if isinstance(patterns, Pattern):
patterns = [patterns] patterns = [patterns]
@ -87,23 +104,32 @@ def write(patterns: Union[Pattern, List[Pattern]],
if disambiguate_func is None: if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names disambiguate_func = disambiguate_pattern_names
if annotations is None:
annotations = {}
if not modify_originals: if not modify_originals:
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)] patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
# Create library # Create library
lib = fatamorgana.OasisLayout(unit, validation=None) lib = fatamorgana.OasisLayout(unit=units_per_micron, validation=None)
lib.properties = annotations_to_properties(annotations)
for name, layer_num in layer_map.items(): if layer_map:
layer, data_type = _mlayer2oas(layer_num) for name, layer_num in layer_map.items():
lib.layer_names.append( #TODO figure out how to deal with text layers layer, data_type = _mlayer2oas(layer_num)
LayerName(nstring=name, lib.layers += [
layer_interval=(layer, layer), fatrec.LayerName(nstring=name,
type_interval=(data_type, data_type), layer_interval=(layer, layer),
is_textlayer=False)) type_interval=(data_type, data_type),
is_textlayer=tt)
for tt in (True, False)]
def layer2oas(layer: layer_t) -> Tuple[int, int]: def layer2oas(mlayer: layer_t) -> Tuple[int, int]:
layer_num = layer_map[layer] if isinstance(layer, str) else layer assert(layer_map is not None)
return _mlayer2oas(layer_num) layer_num = layer_map[mlayer] if isinstance(mlayer, str) else mlayer
return _mlayer2oas(layer_num)
else:
layer2oas = _mlayer2oas
# Get a dict of id(pattern) -> pattern # Get a dict of id(pattern) -> pattern
patterns_by_id = {id(pattern): pattern for pattern in patterns} patterns_by_id = {id(pattern): pattern for pattern in patterns}
@ -115,18 +141,37 @@ def write(patterns: Union[Pattern, List[Pattern]],
# Now create a structure for each pattern # Now create a structure for each pattern
for pat in patterns_by_id.values(): for pat in patterns_by_id.values():
structure = fatamorgana.Cell(name=NString(pat.name)) structure = fatamorgana.Cell(name=pat.name)
lib.cells.append(structure) lib.cells.append(structure)
structure.properties += annotations_to_properties(pat.annotations)
structure.geometry += _shapes_to_elements(pat.shapes, layer2oas) structure.geometry += _shapes_to_elements(pat.shapes, layer2oas)
structure.geometry += _labels_to_texts(pat.labels, layer2oas) structure.geometry += _labels_to_texts(pat.labels, layer2oas)
structure.placements += _subpatterns_to_refs(pat.subpatterns) structure.placements += _subpatterns_to_placements(pat.subpatterns)
return lib
def write(patterns: Union[Sequence[Pattern], Pattern],
stream: io.BufferedIOBase,
*args,
**kwargs):
"""
Write a `Pattern` or list of patterns to a OASIS file. See `oasis.build()`
for details.
Args:
patterns: A Pattern or list of patterns to write to file.
stream: Stream to write to.
*args: passed to `oasis.build()`
**kwargs: passed to `oasis.build()`
"""
lib = build(patterns, *args, **kwargs)
lib.write(stream) lib.write(stream)
return
def writefile(patterns: Union[List[Pattern], Pattern], def writefile(patterns: Union[Sequence[Pattern], Pattern],
filename: Union[str, pathlib.Path], filename: Union[str, pathlib.Path],
*args, *args,
**kwargs, **kwargs,
@ -160,7 +205,7 @@ def readfile(filename: Union[str, pathlib.Path],
""" """
Wrapper for `oasis.read()` that takes a filename or path instead of a stream. Wrapper for `oasis.read()` that takes a filename or path instead of a stream.
Will automatically decompress files with a .gz suffix. Will automatically decompress gzipped files.
Args: Args:
filename: Filename to save to. filename: Filename to save to.
@ -168,7 +213,7 @@ def readfile(filename: Union[str, pathlib.Path],
**kwargs: passed to `oasis.read` **kwargs: passed to `oasis.read`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz': if is_gzipped(path):
open_func: Callable = gzip.open open_func: Callable = gzip.open
else: else:
open_func = open open_func = open
@ -184,10 +229,12 @@ def read(stream: io.BufferedIOBase,
""" """
Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are
translated into Pattern objects; Polygons are translated into polygons, and Placements translated into Pattern objects; Polygons are translated into polygons, and Placements
are translated into SubPattern or GridRepetition objects. are translated into SubPattern objects.
Additional library info is returned in a dict, containing: Additional library info is returned in a dict, containing:
'units_per_micrometer': number of database units per micrometer (all values are in database units) 'units_per_micrometer': number of database units per micrometer (all values are in database units)
'layer_map': Mapping from layer names to fatamorgana.LayerName objects
'annotations': Mapping of {key: value} pairs from library's properties
Args: Args:
stream: Stream to read from. stream: Stream to read from.
@ -196,75 +243,205 @@ def read(stream: io.BufferedIOBase,
Default `True`. Default `True`.
Returns: Returns:
- Dict of pattern_name:Patterns generated from GDSII structures - Dict of `pattern_name`:`Pattern`s generated from OASIS cells
- Dict of GDSII library info - Dict of OASIS library info
""" """
lib = fatamorgana.OASISLayout.read(stream) lib = fatamorgana.OasisLayout.read(stream)
library_info = {'units_per_micrometer': lib.unit, library_info: Dict[str, Any] = {
} 'units_per_micrometer': lib.unit,
'annotations': properties_to_annotations(lib.properties, lib.propnames, lib.propstrings),
}
layer_map = {}
for layer_name in lib.layers:
layer_map[str(layer_name.nstring)] = layer_name
library_info['layer_map'] = layer_map
patterns = [] patterns = []
for cell in lib.cells: for cell in lib.cells:
pat = Pattern(name=cell.name.string) if isinstance(cell.name, int):
cell_name = lib.cellnames[cell.name].nstring.string
else:
cell_name = cell.name.string
pat = Pattern(name=cell_name)
for element in cell.geometry: for element in cell.geometry:
if element.repetition is not None: if isinstance(element, fatrec.XElement):
raise PatternError('masque OASIS reader does not implement repetitions for shapes yet') logger.warning('Skipping XElement record')
# note XELEMENT has no repetition
continue
assert(not isinstance(element.repetition, fatamorgana.ReuseRepetition))
repetition = repetition_fata2masq(element.repetition)
# Switch based on element type: # Switch based on element type:
if isinstance(element, fatrec.Polygon): if isinstance(element, fatrec.Polygon):
args = {'vertices': element.point_list, vertices = numpy.cumsum(numpy.vstack(((0, 0), element.get_point_list())), axis=0)
'layer': (element.layer, element.data_type) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
'offset': (element.x, element.y), poly = Polygon(vertices=vertices,
} layer=element.get_layer_tuple(),
poly = Polygon(**args) offset=element.get_xy(),
annotations=annotations,
if clean_vertices: repetition=repetition)
try:
poly.clean_vertices()
except PatternError:
continue
pat.shapes.append(poly) pat.shapes.append(poly)
if isinstance(element, fatrec.Path): elif isinstance(element, fatrec.Path):
cap_start = path_cap_map[element.extension_start[0]] vertices = numpy.cumsum(numpy.vstack(((0, 0), element.get_point_list())), axis=0)
cap_end = path_cap_map[element.extension_end[0]]
cap_start = path_cap_map[element.get_extension_start()[0]]
cap_end = path_cap_map[element.get_extension_end()[0]]
if cap_start != cap_end: if cap_start != cap_end:
raise Exception('masque does not support multiple cap types on a single path.') #TODO handle multiple cap types raise Exception('masque does not support multiple cap types on a single path.') # TODO handle multiple cap types
cap = cap_start cap = cap_start
args = {'vertices': element.point_list, path_args: Dict[str, Any] = {}
'layer': (element.layer, element.data_type)
'offset': (element.x, element.y),
'width': element.half_width * 2,
'cap': cap,
}
if cap == Path.Cap.SquareCustom: if cap == Path.Cap.SquareCustom:
args['cap_extensions'] = numpy.array((element.extension_start[1], path_args['cap_extensions'] = numpy.array((element.get_extension_start()[1],
element.extension_end[1])) element.get_extension_end()[1]))
path = Path(**args)
if clean_vertices: annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
try: path = Path(vertices=vertices,
path.clean_vertices() layer=element.get_layer_tuple(),
except PatternError as err: offset=element.get_xy(),
continue repetition=repetition,
annotations=annotations,
width=element.get_half_width() * 2,
cap=cap,
**path_args)
pat.shapes.append(path) pat.shapes.append(path)
elif isinstance(element, fatrec.Rectangle):
width = element.get_width()
height = element.get_height()
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
rect = Polygon(layer=element.get_layer_tuple(),
offset=element.get_xy(),
repetition=repetition,
vertices=numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (width, height),
annotations=annotations,
)
pat.shapes.append(rect)
elif isinstance(element, fatrec.Trapezoid):
vertices = numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (element.get_width(), element.get_height())
a = element.get_delta_a()
b = element.get_delta_b()
if element.get_is_vertical():
if a > 0:
vertices[0, 1] += a
else:
vertices[3, 1] += a
if b > 0:
vertices[2, 1] -= b
else:
vertices[1, 1] -= b
else:
if a > 0:
vertices[1, 0] += a
else:
vertices[0, 0] += a
if b > 0:
vertices[3, 0] -= b
else:
vertices[2, 0] -= b
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
trapz = Polygon(layer=element.get_layer_tuple(),
offset=element.get_xy(),
repetition=repetition,
vertices=vertices,
annotations=annotations,
)
pat.shapes.append(trapz)
elif isinstance(element, fatrec.CTrapezoid):
cttype = element.get_ctrapezoid_type()
height = element.get_height()
width = element.get_width()
vertices = numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (width, height)
if cttype in (0, 4, 7):
vertices[2, 0] -= height
if cttype in (1, 5, 6):
vertices[3, 0] -= height
if cttype in (2, 4, 6):
vertices[1, 0] += height
if cttype in (3, 5, 7):
vertices[0, 0] += height
if cttype in (8, 12, 15):
vertices[2, 0] -= width
if cttype in (9, 13, 14):
vertices[1, 0] -= width
if cttype in (10, 12, 14):
vertices[3, 0] += width
if cttype in (11, 13, 15):
vertices[0, 0] += width
if cttype == 16:
vertices = vertices[[0, 1, 3], :]
elif cttype == 17:
vertices = vertices[[0, 1, 2], :]
elif cttype == 18:
vertices = vertices[[0, 2, 3], :]
elif cttype == 19:
vertices = vertices[[1, 2, 3], :]
elif cttype == 20:
vertices = vertices[[0, 1, 3], :]
vertices[1, 0] += height
elif cttype == 21:
vertices = vertices[[0, 1, 2], :]
vertices[0, 0] += height
elif cttype == 22:
vertices = vertices[[0, 1, 3], :]
vertices[3, 1] += width
elif cttype == 23:
vertices = vertices[[0, 2, 3], :]
vertices[0, 1] += width
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
ctrapz = Polygon(layer=element.get_layer_tuple(),
offset=element.get_xy(),
repetition=repetition,
vertices=vertices,
annotations=annotations,
)
pat.shapes.append(ctrapz)
elif isinstance(element, fatrec.Circle):
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
circle = Circle(layer=element.get_layer_tuple(),
offset=element.get_xy(),
repetition=repetition,
annotations=annotations,
radius=float(element.get_radius()))
pat.shapes.append(circle)
elif isinstance(element, fatrec.Text): elif isinstance(element, fatrec.Text):
args = {'layer': (element.layer, element.data_type) annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
'offset': (element.x, element.y), label = Label(layer=element.get_layer_tuple(),
'string': str(element.string), offset=element.get_xy(),
} repetition=repetition,
pat.labels.append(Label(**args)) annotations=annotations,
string=str(element.get_string()))
pat.labels.append(label)
else:
logger.warning(f'Skipping record {element} (unimplemented)')
continue
for placement in cell.placements: for placement in cell.placements:
pat.subpattterns.append += _placement_to_subpats(placement) pat.subpatterns.append(_placement_to_subpat(placement, lib))
if clean_vertices:
clean_pattern_vertices(pat)
patterns.append(pat) patterns.append(pat)
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
@ -272,7 +449,9 @@ def read(stream: io.BufferedIOBase,
patterns_dict = dict(((p.name, p) for p in patterns)) patterns_dict = dict(((p.name, p) for p in patterns))
for p in patterns_dict.values(): for p in patterns_dict.values():
for sp in p.subpatterns: for sp in p.subpatterns:
sp.pattern = patterns_dict[sp.identifier[0]] ident = sp.identifier[0]
name = ident if isinstance(ident, str) else lib.cellnames[ident].nstring.string
sp.pattern = patterns_dict[name]
del sp.identifier del sp.identifier
return patterns_dict, library_info return patterns_dict, library_info
@ -290,45 +469,35 @@ def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]:
else: else:
data_type = 0 data_type = 0
else: else:
raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be strings.') #TODO allow string layers using layer map def raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be '
f'strings unless a layer map is provided.')
return layer, data_type return layer, data_type
def _placement_to_subpats(placement: fatrec.Placement) -> List[subpattern_t]: def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayout) -> SubPattern:
""" """
Helper function to create a SubPattern from a placment. Sets subpat.pattern to None Helper function to create a SubPattern from a placment. Sets subpat.pattern to None
and sets the instance .identifier to (struct_name,). and sets the instance .identifier to (struct_name,).
""" """
assert(not isinstance(placement.repetition, fatamorgana.ReuseRepetition))
xy = numpy.array((placement.x, placement.y)) xy = numpy.array((placement.x, placement.y))
kwargs = { mag = placement.magnification if placement.magnification is not None else 1
'pattern': None, pname = placement.get_name()
'mirrored': (placement.flip, False), name = pname if isinstance(pname, int) else pname.string
'rotation': float(placement.angle * pi/180) annotations = properties_to_annotations(placement.properties, lib.propnames, lib.propstrings)
'scale': placement.magnification, subpat = SubPattern(offset=xy,
'identifier': (placement.name,), pattern=None,
} mirrored=(placement.flip, False),
rotation=numpy.deg2rad(placement.angle),
rep = placement.repetition scale=float(mag),
if isinstance(rep, fatamorgana.GridRepetition): identifier=(name,),
subpat = GridRepetition(a_vector=rep.a_vector, repetition=repetition_fata2masq(placement.repetition),
b_vector=rep.b_vector, annotations=annotations)
a_count=rep.a_count, return subpat
b_count=rep.b_count,
offset=xy,
**kwargs)
subpats = [subpat]
elif isinstance(rep, fatamorgana.ArbitraryRepetition):
subpats = []
for rep_offset in numpy.cumsum(numpy.column_stack((rep.x_displacements,
rep.y_displacements))):
subpats.append(SubPattern(offset=xy + rep_offset, **kwargs))
elif rep is None
subpats = [SubPattern(offset=xy + rep_offset, **kwargs)]
return subpats
def _subpatterns_to_refs(subpatterns: List[subpattern_t] def _subpatterns_to_placements(subpatterns: List[SubPattern]
) -> List[fatrec.Placement]]: ) -> List[fatrec.Placement]:
refs = [] refs = []
for subpat in subpatterns: for subpat in subpatterns:
if subpat.pattern is None: if subpat.pattern is None:
@ -336,25 +505,19 @@ def _subpatterns_to_refs(subpatterns: List[subpattern_t]
# Note: OASIS mirrors first and rotates second # Note: OASIS mirrors first and rotates second
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
xy = numpy.round(subpat.offset).astype(int) frep, rep_offset = repetition_masq2fata(subpat.repetition)
args = {
'x': xy[0],
'y': xy[1],
}
if isinstance(subpat, GridRepetition):
kwargs['rep'] = fatamorgana.GridRepetition(
a_vector=numpy.round(subpat.a_vector).astype(int),
b_vector=numpy.round(subpat.b_vector).astype(int),
a_count=numpy.round(subpat.a_count).astype(int),
b_count=numpy.round(subpat.b_count).astype(int))
offset = numpy.round(subpat.offset + rep_offset).astype(int)
angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360
ref = fatrec.Placement( ref = fatrec.Placement(
name=subpat.pattern.name, name=subpat.pattern.name,
flip=mirror_across_x, flip=mirror_across_x,
angle=((subpat.rotation + extra_angle) * 180 / numpy.pi) % 360, angle=angle,
magnification=subpat.scale, magnification=subpat.scale,
**kwargs) properties=annotations_to_properties(subpat.annotations),
x=offset[0],
y=offset[1],
repetition=frep)
refs.append(ref) refs.append(ref)
return refs return refs
@ -362,36 +525,54 @@ def _subpatterns_to_refs(subpatterns: List[subpattern_t]
def _shapes_to_elements(shapes: List[Shape], def _shapes_to_elements(shapes: List[Shape],
layer2oas: Callable[[layer_t], Tuple[int, int]], layer2oas: Callable[[layer_t], Tuple[int, int]],
polygonize_paths: bool = False, ) -> List[Union[fatrec.Polygon, fatrec.Path, fatrec.Circle]]:
) -> List[Union[fatrec.Polygon, fatrec.Path]]:
# Add a Polygon record for each shape, and Path elements if necessary # Add a Polygon record for each shape, and Path elements if necessary
elements: List[Union[fatrec.Polygon, fatrec.Path]] = [] elements: List[Union[fatrec.Polygon, fatrec.Path, fatrec.Circle]] = []
for shape in shapes: for shape in shapes:
layer, data_type = layer2oas(shape.layer) layer, datatype = layer2oas(shape.layer)
if isinstance(shape, Path) and not polygonize_paths: repetition, rep_offset = repetition_masq2fata(shape.repetition)
offset = numpy.round(shape.offset).astype(int) properties = annotations_to_properties(shape.annotations)
points = numpy.round(shape.vertices).astype(int) if isinstance(shape, Circle):
offset = numpy.round(shape.offset + rep_offset).astype(int)
radius = numpy.round(shape.radius).astype(int)
circle = fatrec.Circle(layer=layer,
datatype=datatype,
radius=radius,
x=offset[0],
y=offset[1],
properties=properties,
repetition=repetition)
elements.append(circle)
elif isinstance(shape, Path):
xy = numpy.round(shape.offset + shape.vertices[0] + rep_offset).astype(int)
deltas = numpy.round(numpy.diff(shape.vertices, axis=0)).astype(int)
half_width = numpy.round(shape.width / 2).astype(int) half_width = numpy.round(shape.width / 2).astype(int)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension_start = (path_type, shape.cap_extensions[0] if shape.cap_extensions is not None else None)
extension_end = (path_type, shape.cap_extensions[1] if shape.cap_extensions is not None else None)
path = fatrec.Path(layer=layer, path = fatrec.Path(layer=layer,
data_type=data_type, datatype=datatype,
point_list=points, point_list=deltas,
half_width=half_width, half_width=half_width,
x=offset[0], x=xy[0],
y=offset[1], y=xy[1],
extension_start=path_type, #TODO implement multiple cap types? extension_start=extension_start, # TODO implement multiple cap types?
extension_end=path_type, extension_end=extension_end,
properties=properties,
repetition=repetition,
) )
elements.append(path) elements.append(path)
else: else:
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
points = numpy.round(polygon.vertices).astype(int) xy = numpy.round(polygon.offset + polygon.vertices[0] + rep_offset).astype(int)
offset = numpy.round(polygon.offset).astype(int) points = numpy.round(numpy.diff(polygon.vertices, axis=0)).astype(int)
elements.append(fatrec.Polygon(layer=layer, elements.append(fatrec.Polygon(layer=layer,
data_type=data_type, datatype=datatype,
x=offset[0], x=xy[0],
y=offset[1], y=xy[1],
point_list=point_list)) point_list=points,
properties=properties,
repetition=repetition))
return elements return elements
@ -400,22 +581,26 @@ def _labels_to_texts(labels: List[Label],
) -> List[fatrec.Text]: ) -> List[fatrec.Text]:
texts = [] texts = []
for label in labels: for label in labels:
layer, text_type = layer2oas(label.layer) layer, datatype = layer2oas(label.layer)
xy = numpy.round(label.offset).astype(int) repetition, rep_offset = repetition_masq2fata(label.repetition)
xy = numpy.round(label.offset + rep_offset).astype(int)
properties = annotations_to_properties(label.annotations)
texts.append(fatrec.Text(layer=layer, texts.append(fatrec.Text(layer=layer,
text_type=text_type, datatype=datatype,
x=xy[0], x=xy[0],
y=xy[1], y=xy[1],
string=string)) string=label.string,
properties=properties,
repetition=repetition))
return texts return texts
def disambiguate_pattern_names(patterns, def disambiguate_pattern_names(patterns,
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
): ):
used_names = [] used_names = []
for pat in patterns: for pat in patterns:
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name) sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
i = 0 i = 0
suffixed_name = sanitized_name suffixed_name = sanitized_name
@ -426,16 +611,101 @@ def disambiguate_pattern_names(patterns,
i += 1 i += 1
if sanitized_name == '': if sanitized_name == '':
logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name)) logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name: elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name): if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format( logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
pat.name, sanitized_name, suffixed_name)) + f' renaming to "{suffixed_name}"')
encoded_name = suffixed_name.encode('ASCII') if len(suffixed_name) == 0:
if len(encoded_name) == 0:
# Should never happen since zero-length names are replaced # Should never happen since zero-length names are replaced
raise PatternError('Zero-length name after sanitize+encode,\n originally "{}"'.format(pat.name)) raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
pat.name = encoded_name pat.name = suffixed_name
used_names.append(suffixed_name) used_names.append(suffixed_name)
def repetition_fata2masq(rep: Union[fatamorgana.GridRepetition, fatamorgana.ArbitraryRepetition, None]
) -> Optional[Repetition]:
mrep: Optional[Repetition]
if isinstance(rep, fatamorgana.GridRepetition):
mrep = Grid(a_vector=rep.a_vector,
b_vector=rep.b_vector,
a_count=rep.a_count,
b_count=rep.b_count)
elif isinstance(rep, fatamorgana.ArbitraryRepetition):
displacements = numpy.cumsum(numpy.column_stack((rep.x_displacements,
rep.y_displacements)))
displacements = numpy.vstack(([0, 0], displacements))
mrep = Arbitrary(displacements)
elif rep is None:
mrep = None
return mrep
def repetition_masq2fata(rep: Optional[Repetition]
) -> Tuple[Union[fatamorgana.GridRepetition,
fatamorgana.ArbitraryRepetition,
None],
Tuple[int, int]]:
frep: Union[fatamorgana.GridRepetition, fatamorgana.ArbitraryRepetition, None]
if isinstance(rep, Grid):
frep = fatamorgana.GridRepetition(
a_vector=numpy.round(rep.a_vector).astype(int),
b_vector=numpy.round(rep.b_vector).astype(int),
a_count=numpy.round(rep.a_count).astype(int),
b_count=numpy.round(rep.b_count).astype(int))
offset = (0, 0)
elif isinstance(rep, Arbitrary):
diffs = numpy.diff(rep.displacements, axis=0)
diff_ints = numpy.round(diffs).astype(int)
frep = fatamorgana.ArbitraryRepetition(diff_ints[:, 0], diff_ints[:, 1])
offset = rep.displacements[0, :]
else:
assert(rep is None)
frep = None
offset = (0, 0)
return frep, offset
def annotations_to_properties(annotations: annotations_t) -> List[fatrec.Property]:
#TODO determine is_standard based on key?
properties = []
for key, values in annotations.items():
vals = [AString(v) if isinstance(v, str) else v
for v in values]
properties.append(fatrec.Property(key, vals, is_standard=False))
return properties
def properties_to_annotations(properties: List[fatrec.Property],
propnames: Dict[int, NString],
propstrings: Dict[int, AString],
) -> annotations_t:
annotations = {}
for proprec in properties:
assert(proprec.name is not None)
if isinstance(proprec.name, int):
key = propnames[proprec.name].string
else:
key = proprec.name.string
values: List[Union[str, float, int]] = []
assert(proprec.values is not None)
for value in proprec.values:
if isinstance(value, (float, int)):
values.append(value)
elif isinstance(value, (NString, AString)):
values.append(value.string)
elif isinstance(value, PropStringReference):
values.append(propstrings[value.ref].string) # dereference
else:
string = repr(value)
logger.warning(f'Converting property value for key ({key}) to string ({string})')
values.append(string)
annotations[key] = values
return annotations
properties = [fatrec.Property(key, vals, is_standard=False)
for key, vals in annotations.items()]
return properties

View File

@ -2,17 +2,19 @@
SVG file format readers and writers SVG file format readers and writers
""" """
from typing import Dict, Optional from typing import Dict, Optional
import svgwrite
import numpy
import warnings import warnings
import numpy # type: ignore
import svgwrite # type: ignore
from .utils import mangle_name from .utils import mangle_name
from .. import Pattern from .. import Pattern
def writefile(pattern: Pattern, def writefile(pattern: Pattern,
filename: str, filename: str,
custom_attributes: bool=False): custom_attributes: bool = False,
) -> None:
""" """
Write a Pattern to an SVG file, by first calling .polygonize() on it Write a Pattern to an SVG file, by first calling .polygonize() on it
to change the shapes into polygons, and then writing patterns as SVG to change the shapes into polygons, and then writing patterns as SVG
@ -79,8 +81,7 @@ def writefile(pattern: Pattern,
for subpat in pat.subpatterns: for subpat in pat.subpatterns:
if subpat.pattern is None: if subpat.pattern is None:
continue continue
transform = 'scale({:g}) rotate({:g}) translate({:g},{:g})'.format( transform = f'scale({subpat.scale:g}) rotate({subpat.rotation:g}) translate({subpat.offset[0]:g},{subpat.offset[1]:g})'
subpat.scale, subpat.rotation, subpat.offset[0], subpat.offset[1])
use = svg.use(href='#' + mangle_name(subpat.pattern), transform=transform) use = svg.use(href='#' + mangle_name(subpat.pattern), transform=transform)
if custom_attributes: if custom_attributes:
use['pattern_dose'] = subpat.dose use['pattern_dose'] = subpat.dose

View File

@ -1,13 +1,16 @@
""" """
Helper functions for file reading and writing Helper functions for file reading and writing
""" """
import re
from typing import Set, Tuple, List from typing import Set, Tuple, List
import re
import copy
import pathlib
from masque.pattern import Pattern from .. import Pattern, PatternError
from ..shapes import Polygon, Path
def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str: def mangle_name(pattern: Pattern, dose_multiplier: float = 1.0) -> str:
""" """
Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier. Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier.
@ -18,13 +21,37 @@ def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str:
Returns: Returns:
Mangled name. Mangled name.
""" """
expression = re.compile('[^A-Za-z0-9_\?\$]') expression = re.compile(r'[^A-Za-z0-9_\?\$]')
full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern)) full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern))
sanitized_name = expression.sub('_', full_name) sanitized_name = expression.sub('_', full_name)
return sanitized_name return sanitized_name
def make_dose_table(patterns: List[Pattern], dose_multiplier: float=1.0) -> Set[Tuple[int, float]]: def clean_pattern_vertices(pat: Pattern) -> Pattern:
"""
Given a pattern, remove any redundant vertices in its polygons and paths.
The cleaning process completely removes any polygons with zero area or <3 vertices.
Args:
pat: Pattern to clean
Returns:
pat
"""
remove_inds = []
for ii, shape in enumerate(pat.shapes):
if not isinstance(shape, (Polygon, Path)):
continue
try:
shape.clean_vertices()
except PatternError:
remove_inds.append(ii)
for ii in sorted(remove_inds, reverse=True):
del pat.shapes[ii]
return pat
def make_dose_table(patterns: List[Pattern], dose_multiplier: float = 1.0) -> Set[Tuple[int, float]]:
""" """
Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns) Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns)
@ -116,14 +143,14 @@ def dose2dtype(patterns: List[Pattern],
# Create a new pattern for each non-1-dose entry in the dose table # Create a new pattern for each non-1-dose entry in the dose table
# and update the shapes to reflect their new dose # and update the shapes to reflect their new dose
new_pats = {} # (id, dose) -> new_pattern mapping new_pats = {} # (id, dose) -> new_pattern mapping
for pat_id, pat_dose in sd_table: for pat_id, pat_dose in sd_table:
if pat_dose == 1: if pat_dose == 1:
new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id] new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id]
continue continue
old_pat = patterns_by_id[pat_id] old_pat = patterns_by_id[pat_id]
pat = old_pat.copy() # keep old subpatterns pat = old_pat.copy() # keep old subpatterns
pat.shapes = copy.deepcopy(old_pat.shapes) pat.shapes = copy.deepcopy(old_pat.shapes)
pat.labels = copy.deepcopy(old_pat.labels) pat.labels = copy.deepcopy(old_pat.labels)
@ -150,3 +177,9 @@ def dose2dtype(patterns: List[Pattern],
subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)] subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)]
return patterns, dose_vals_list return patterns, dose_vals_list
def is_gzipped(path: pathlib.Path) -> bool:
with open(path, 'rb') as stream:
magic_bytes = stream.read(2)
return magic_bytes == b'\x1f\x8b'

View File

@ -1,23 +1,22 @@
from typing import List, Tuple, Dict from typing import Tuple, Dict, Optional, TypeVar
import copy import copy
import numpy import numpy # type: ignore
from numpy import pi
from .error import PatternError, PatternLockedError from .repetition import Repetition
from .utils import is_scalar, vector2, rotation_matrix_2d, layer_t from .utils import vector2, rotation_matrix_2d, layer_t, AutoSlots, annotations_t
from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl
from .traits import AnnotatableImpl
class Label: L = TypeVar('L', bound='Label')
class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, AnnotatableImpl,
Pivotable, Copyable, metaclass=AutoSlots):
""" """
A text annotation with a position and layer (but no size; it is not drawn) A text annotation with a position and layer (but no size; it is not drawn)
""" """
__slots__ = ('_offset', '_layer', '_string', 'identifier', 'locked') __slots__ = ( '_string', 'identifier')
_offset: numpy.ndarray
""" [x_offset, y_offset] """
_layer: layer_t
""" Layer (integer >= 0, or 2-Tuple of integers) """
_string: str _string: str
""" Label string """ """ Label string """
@ -25,44 +24,9 @@ class Label:
identifier: Tuple identifier: Tuple
""" Arbitrary identifier tuple, useful for keeping track of history when flattening """ """ Arbitrary identifier tuple, useful for keeping track of history when flattening """
locked: bool '''
""" If `True`, any changes to the label will raise a `PatternLockedError` """ ---- Properties
'''
def __setattr__(self, name, value):
if self.locked and name != 'locked':
raise PatternLockedError()
object.__setattr__(self, name, value)
# ---- Properties
# offset property
@property
def offset(self) -> numpy.ndarray:
"""
[x, y] offset
"""
return self._offset
@offset.setter
def offset(self, val: vector2):
if not isinstance(val, numpy.ndarray):
val = numpy.array(val, dtype=float)
if val.size != 2:
raise PatternError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten().astype(float)
# layer property
@property
def layer(self) -> layer_t:
"""
Layer number or name (int, tuple of ints, or string)
"""
return self._layer
@layer.setter
def layer(self, val: layer_t):
self._layer = val
# string property # string property
@property @property
def string(self) -> str: def string(self) -> str:
@ -77,49 +41,37 @@ class Label:
def __init__(self, def __init__(self,
string: str, string: str,
*,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
layer: layer_t = 0, layer: layer_t = 0,
locked: bool = False): repetition: Optional[Repetition] = None,
object.__setattr__(self, 'locked', False) annotations: Optional[annotations_t] = None,
locked: bool = False,
) -> None:
LockableImpl.unlock(self)
self.identifier = () self.identifier = ()
self.string = string self.string = string
self.offset = numpy.array(offset, dtype=float, copy=True) self.offset = numpy.array(offset, dtype=float, copy=True)
self.layer = layer self.layer = layer
self.locked = locked self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.set_locked(locked)
def __copy__(self) -> 'Label': def __copy__(self: L) -> L:
return Label(string=self.string, return Label(string=self.string,
offset=self.offset.copy(), offset=self.offset.copy(),
layer=self.layer, layer=self.layer,
repetition=self.repetition,
locked=self.locked) locked=self.locked)
def __deepcopy__(self, memo: Dict = None) -> 'Label': def __deepcopy__(self: L, memo: Dict = None) -> L:
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
new.locked = self.locked new.set_locked(self.locked)
return new return new
def copy(self) -> 'Label': def rotate_around(self: L, pivot: vector2, rotation: float) -> L:
"""
Returns a deep copy of the label.
"""
return copy.deepcopy(self)
def translate(self, offset: vector2) -> 'Label':
"""
Translate the label by the given offset
Args:
offset: [x_offset, y,offset]
Returns:
self
"""
self.offset += offset
return self
def rotate_around(self, pivot: vector2, rotation: float) -> 'Label':
""" """
Rotate the label around a point. Rotate the label around a point.
@ -149,26 +101,14 @@ class Label:
""" """
return numpy.array([self.offset, self.offset]) return numpy.array([self.offset, self.offset])
def lock(self) -> 'Label': def lock(self: L) -> L:
""" PositionableImpl._lock(self)
Lock the Label, causing any modifications to raise an exception. LockableImpl.lock(self)
Return:
self
"""
self.offset.flags.writeable = False
object.__setattr__(self, 'locked', True)
return self return self
def unlock(self) -> 'Label': def unlock(self: L) -> L:
""" LockableImpl.unlock(self)
Unlock the Label, re-allowing changes. PositionableImpl._unlock(self)
Return:
self
"""
object.__setattr__(self, 'locked', False)
self.offset.flags.writeable = True
return self return self
def __repr__(self) -> str: def __repr__(self) -> str:

8
masque/library/NOTES.md Normal file
View File

@ -0,0 +1,8 @@
- library:
- takes in a string
- builds device if not ready yet
- returns device
- can overwrite device and update pointers?
- lockable?
- add functions?

View File

@ -0,0 +1 @@
from .library import Library, PatternGenerator

282
masque/library/library.py Normal file
View File

@ -0,0 +1,282 @@
"""
Library class for managing unique name->pattern mappings and
deferred loading or creation.
"""
from typing import Dict, Callable, TypeVar, TYPE_CHECKING
from typing import Any, Tuple, Union, Iterator
import logging
from pprint import pformat
from dataclasses import dataclass
from ..error import LibraryError
if TYPE_CHECKING:
from ..pattern import Pattern
logger = logging.getLogger(__name__)
@dataclass
class PatternGenerator:
__slots__ = ('tag', 'gen')
tag: str
""" Unique identifier for the source """
gen: Callable[[], 'Pattern']
""" Function which generates a pattern when called """
L = TypeVar('L', bound='Library')
class Library:
"""
This class is usually used to create a device library by mapping names to
functions which generate or load the relevant `Pattern` object as-needed.
Generated/loaded patterns can have "symbolic" references, where a SubPattern
object `sp` has a `None`-valued `sp.pattern` attribute, in which case the
Library expects `sp.identifier[0]` to contain a string which specifies the
referenced pattern's name.
Patterns can either be "primary" (default) or "secondary". Both get the
same deferred-load behavior, but "secondary" patterns may have conflicting
names and are not accessible through basic []-indexing. They are only used
to fill symbolic references in cases where there is no "primary" pattern
available, and only if both the referencing and referenced pattern-generators'
`tag` values match (i.e., only if they came from the same source).
Primary patterns can be turned into secondary patterns with the `demote`
method, `promote` performs the reverse (secondary -> primary) operation.
The `set_const` and `set_value` methods provide an easy way to transparently
construct PatternGenerator objects and directly set create "secondary"
patterns.
The cache can be disabled by setting the `enable_cache` attribute to `False`.
"""
primary: Dict[str, PatternGenerator]
secondary: Dict[Tuple[str, str], PatternGenerator]
cache: Dict[Union[str, Tuple[str, str]], 'Pattern']
enable_cache: bool = True
def __init__(self) -> None:
self.primary = {}
self.secondary = {}
self.cache = {}
def __setitem__(self, key: str, value: PatternGenerator) -> None:
self.primary[key] = value
if key in self.cache:
del self.cache[key]
def __delitem__(self, key: str) -> None:
if isinstance(key, str):
del self.primary[key]
elif isinstance(key, tuple):
del self.secondary[key]
if key in self.cache:
del self.cache[key]
def __getitem__(self, key: str) -> 'Pattern':
return self.get_primary(key)
def __iter__(self) -> Iterator[str]:
return iter(self.keys())
def __contains__(self, key: str) -> bool:
return key in self.primary
def get_primary(self, key: str) -> 'Pattern':
if self.enable_cache and key in self.cache:
logger.debug(f'found {key} in cache')
return self.cache[key]
logger.debug(f'loading {key}')
pg = self.primary[key]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key] = pat
return pat
def get_secondary(self, key: str, tag: str) -> 'Pattern':
logger.debug(f'get_secondary({key}, {tag})')
key2 = (key, tag)
if self.enable_cache and key2 in self.cache:
return self.cache[key2]
pg = self.secondary[key2]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key2] = pat
return pat
def resolve_subpatterns(self, pat: 'Pattern', tag: str) -> 'Pattern':
logger.debug(f'Resolving subpatterns in {pat.name}')
for sp in pat.subpatterns:
if sp.pattern is not None:
continue
key = sp.identifier[0]
if key in self.primary:
sp.pattern = self.get_primary(key)
continue
if (key, tag) in self.secondary:
sp.pattern = self.get_secondary(key, tag)
continue
raise LibraryError(f'Broken reference to {key} (tag {tag})')
return pat
def keys(self) -> Iterator[str]:
return iter(self.primary.keys())
def values(self) -> Iterator['Pattern']:
return iter(self[key] for key in self.keys())
def items(self) -> Iterator[Tuple[str, 'Pattern']]:
return iter((key, self[key]) for key in self.keys())
def __repr__(self) -> str:
return '<Library with keys ' + repr(list(self.primary.keys())) + '>'
def set_const(self, key: str, tag: Any, const: 'Pattern', secondary: bool = False) -> None:
"""
Convenience function to avoid having to manually wrap
constant values into callables.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
const: Pattern object to return
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=lambda: const)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def set_value(self, key: str, tag: str, value: Callable[[], 'Pattern'], secondary: bool = False) -> None:
"""
Convenience function to automatically build a PatternGenerator.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
value: Callable which takes no arguments and generates the `Pattern` object
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=value)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def precache(self: L) -> L:
"""
Force all patterns into the cache
Returns:
self
"""
for key in self.primary:
_ = self.get_primary(key)
for key2 in self.secondary:
_ = self.get_secondary(*key2)
return self
def add(self: L, other: L) -> L:
"""
Add keys from another library into this one.
There must be no conflicting keys.
Args:
other: The library to insert keys from
Returns:
self
"""
conflicts = [key for key in other.primary
if key in self.primary]
if conflicts:
raise LibraryError('Duplicate keys encountered in library merge: ' + pformat(conflicts))
conflicts2 = [key2 for key2 in other.secondary
if key2 in self.secondary]
if conflicts2:
raise LibraryError('Duplicate secondary keys encountered in library merge: ' + pformat(conflicts2))
self.primary.update(other.primary)
self.secondary.update(other.secondary)
self.cache.update(other.cache)
return self
def demote(self, key: str) -> None:
"""
Turn a primary pattern into a secondary one.
It will no longer be accessible through [] indexing and will only be used to
when referenced by other patterns from the same source, and only if no primary
pattern with the same name exists.
Args:
key: Lookup key, usually the cell/pattern name
"""
pg = self.primary[key]
key2 = (key, pg.tag)
self.secondary[key2] = pg
if key in self.cache:
self.cache[key2] = self.cache[key]
del self[key]
def promote(self, key: str, tag: str) -> None:
"""
Turn a secondary pattern into a primary one.
It will become accessible through [] indexing and will be used to satisfy any
reference to a pattern with its key, regardless of tag.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for identifying the pattern's source, used to disambiguate
secondary patterns
"""
if key in self.primary:
raise LibraryError(f'Promoting ({key}, {tag}), but {key} already exists in primary!')
key2 = (key, tag)
pg = self.secondary[key2]
self.primary[key] = pg
if key2 in self.cache:
self.cache[key] = self.cache[key2]
del self.secondary[key2]
del self.cache[key2]
r"""
# Add a filter for names which aren't added
- Registration:
- scanned files (tag=filename, gen_fn[stream, {name: pos}])
- generator functions (tag='fn?', gen_fn[params])
- merge decision function (based on tag and cell name, can be "neither") ??? neither=keep both, load using same tag!
- Load process:
- file:
- read single cell
- check subpat identifiers, and load stuff recursively based on those. If not present, load from same file??
- function:
- generate cell
- traverse and check if we should load any subcells from elsewhere. replace if so.
* should fn generate subcells at all, or register those separately and have us control flow? maybe ask us and generate itself if not present?
- Scan all GDS files, save name -> (file, position). Keep the streams handy.
- Merge all names. This requires subcell merge because we don't know hierarchy.
- possibly include a "neither" option during merge, to deal with subcells. Means: just use parent's file.
"""

48
masque/library/utils.py Normal file
View File

@ -0,0 +1,48 @@
from typing import Callable, TypeVar, Generic
from functools import lru_cache
Key = TypeVar('Key')
Value = TypeVar('Value')
class DeferredDict(dict, Generic[Key, Value]):
"""
This is a modified `dict` which is used to defer loading/generating
values until they are accessed.
```
bignum = my_slow_function() # slow function call, would like to defer this
numbers = Library()
numbers['big'] = my_slow_function # no slow function call here
assert(bignum == numbers['big']) # first access is slow (function called)
assert(bignum == numbers['big']) # second access is fast (result is cached)
```
The `set_const` method is provided for convenience;
`numbers['a'] = lambda: 10` is equivalent to `numbers.set_const('a', 10)`.
"""
def __init__(self, *args, **kwargs) -> None:
dict.__init__(self)
self.update(*args, **kwargs)
def __setitem__(self, key: Key, value: Callable[[], Value]) -> None:
cached_fn = lru_cache(maxsize=1)(value)
dict.__setitem__(self, key, cached_fn)
def __getitem__(self, key: Key) -> Value:
return dict.__getitem__(self, key)()
def update(self, *args, **kwargs) -> None:
for k, v in dict(*args, **kwargs).items():
self[k] = v
def __repr__(self) -> str:
return '<Library with keys ' + repr(set(self.keys())) + '>'
def set_const(self, key: Key, value: Value) -> None:
"""
Convenience function to avoid having to manually wrap
constant values into callables.
"""
self[key] = lambda: value

View File

@ -3,34 +3,36 @@
""" """
from typing import List, Callable, Tuple, Dict, Union, Set, Sequence, Optional, Type, overload from typing import List, Callable, Tuple, Dict, Union, Set, Sequence, Optional, Type, overload
from typing import MutableMapping, Iterable from typing import MutableMapping, Iterable, TypeVar, Any
import copy import copy
import itertools
import pickle import pickle
from itertools import chain
from collections import defaultdict from collections import defaultdict
import numpy import numpy # type: ignore
from numpy import inf from numpy import inf
# .visualize imports matplotlib and matplotlib.collections # .visualize imports matplotlib and matplotlib.collections
from .subpattern import SubPattern, subpattern_t from .subpattern import SubPattern
from .repetition import GridRepetition
from .shapes import Shape, Polygon from .shapes import Shape, Polygon
from .label import Label from .label import Label
from .utils import rotation_matrix_2d, vector2, normalize_mirror from .utils import rotation_matrix_2d, vector2, normalize_mirror, AutoSlots, annotations_t
from .error import PatternError, PatternLockedError from .error import PatternError, PatternLockedError
from .traits import LockableImpl, AnnotatableImpl, Scalable
visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, numpy.ndarray], 'Pattern'] visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, numpy.ndarray], 'Pattern']
class Pattern: P = TypeVar('P', bound='Pattern')
class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
""" """
2D layout consisting of some set of shapes, labels, and references to other Pattern objects 2D layout consisting of some set of shapes, labels, and references to other Pattern objects
(via SubPattern and GridRepetition). Shapes are assumed to inherit from (via SubPattern). Shapes are assumed to inherit from masque.shapes.Shape or provide equivalent functions.
masque.shapes.Shape or provide equivalent functions.
""" """
__slots__ = ('shapes', 'labels', 'subpatterns', 'name', 'locked') __slots__ = ('shapes', 'labels', 'subpatterns', 'name')
shapes: List[Shape] shapes: List[Shape]
""" List of all shapes in this Pattern. """ List of all shapes in this Pattern.
@ -40,26 +42,24 @@ class Pattern:
labels: List[Label] labels: List[Label]
""" List of all labels in this Pattern. """ """ List of all labels in this Pattern. """
subpatterns: List[subpattern_t] subpatterns: List[SubPattern]
""" List of all objects referencing other patterns in this Pattern. """ List of all references to other patterns (`SubPattern`s) in this `Pattern`.
Examples are SubPattern (gdsii "instances") or GridRepetition (gdsii "arrays")
Multiple objects in this list may reference the same Pattern object Multiple objects in this list may reference the same Pattern object
(multiple instances of the same object). (i.e. multiple instances of the same object).
""" """
name: str name: str
""" A name for this pattern """ """ A name for this pattern """
locked: bool
""" When the pattern is locked, no changes may be made. """
def __init__(self, def __init__(self,
name: str = '', name: str = '',
*,
shapes: Sequence[Shape] = (), shapes: Sequence[Shape] = (),
labels: Sequence[Label] = (), labels: Sequence[Label] = (),
subpatterns: Sequence[subpattern_t] = (), subpatterns: Sequence[SubPattern] = (),
annotations: Optional[annotations_t] = None,
locked: bool = False, locked: bool = False,
): ) -> None:
""" """
Basic init; arguments get assigned to member variables. Basic init; arguments get assigned to member variables.
Non-list inputs for shapes and subpatterns get converted to lists. Non-list inputs for shapes and subpatterns get converted to lists.
@ -71,7 +71,7 @@ class Pattern:
name: An identifier for the Pattern name: An identifier for the Pattern
locked: Whether to lock the pattern after construction locked: Whether to lock the pattern after construction
""" """
object.__setattr__(self, 'locked', False) LockableImpl.unlock(self)
if isinstance(shapes, list): if isinstance(shapes, list):
self.shapes = shapes self.shapes = shapes
else: else:
@ -87,31 +87,34 @@ class Pattern:
else: else:
self.subpatterns = list(subpatterns) self.subpatterns = list(subpatterns)
self.annotations = annotations if annotations is not None else {}
self.name = name self.name = name
self.locked = locked self.set_locked(locked)
def __setattr__(self, name, value): def __copy__(self: P, memo: Dict = None) -> P:
if self.locked and name != 'locked':
raise PatternLockedError()
object.__setattr__(self, name, value)
def __copy__(self, memo: Dict = None) -> 'Pattern':
return Pattern(name=self.name, return Pattern(name=self.name,
shapes=copy.deepcopy(self.shapes), shapes=copy.deepcopy(self.shapes),
labels=copy.deepcopy(self.labels), labels=copy.deepcopy(self.labels),
subpatterns=[copy.copy(sp) for sp in self.subpatterns], subpatterns=[copy.copy(sp) for sp in self.subpatterns],
annotations=copy.deepcopy(self.annotations),
locked=self.locked) locked=self.locked)
def __deepcopy__(self, memo: Dict = None) -> 'Pattern': def __deepcopy__(self: P, memo: Dict = None) -> P:
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = Pattern(name=self.name, new = Pattern(
shapes=copy.deepcopy(self.shapes, memo), name=self.name,
labels=copy.deepcopy(self.labels, memo), shapes=copy.deepcopy(self.shapes, memo),
subpatterns=copy.deepcopy(self.subpatterns, memo), labels=copy.deepcopy(self.labels, memo),
locked=self.locked) subpatterns=copy.deepcopy(self.subpatterns, memo),
annotations=copy.deepcopy(self.annotations, memo),
locked=self.locked)
return new return new
def append(self, other_pattern: 'Pattern') -> 'Pattern': def rename(self: P, name: str) -> P:
self.name = name
return self
def append(self: P, other_pattern: P) -> P:
""" """
Appends all shapes, labels and subpatterns from other_pattern to self's shapes, Appends all shapes, labels and subpatterns from other_pattern to self's shapes,
labels, and supbatterns. labels, and supbatterns.
@ -127,12 +130,12 @@ class Pattern:
self.labels += other_pattern.labels self.labels += other_pattern.labels
return self return self
def subset(self, def subset(self: P,
shapes_func: Callable[[Shape], bool] = None, shapes_func: Callable[[Shape], bool] = None,
labels_func: Callable[[Label], bool] = None, labels_func: Callable[[Label], bool] = None,
subpatterns_func: Callable[[subpattern_t], bool] = None, subpatterns_func: Callable[[SubPattern], bool] = None,
recursive: bool = False, recursive: bool = False,
) -> 'Pattern': ) -> P:
""" """
Returns a Pattern containing only the entities (e.g. shapes) for which the Returns a Pattern containing only the entities (e.g. shapes) for which the
given entity_func returns True. given entity_func returns True.
@ -152,7 +155,7 @@ class Pattern:
A Pattern containing all the shapes and subpatterns for which the parameter A Pattern containing all the shapes and subpatterns for which the parameter
functions return True functions return True
""" """
def do_subset(src: Optional['Pattern']) -> Optional['Pattern']: def do_subset(src: Optional[P]) -> Optional[P]:
if src is None: if src is None:
return None return None
pat = Pattern(name=src.name) pat = Pattern(name=src.name)
@ -172,10 +175,10 @@ class Pattern:
assert(pat is not None) assert(pat is not None)
return pat return pat
def apply(self, def apply(self: P,
func: Callable[[Optional['Pattern']], Optional['Pattern']], func: Callable[[Optional[P]], Optional[P]],
memo: Optional[Dict[int, Optional['Pattern']]] = None, memo: Optional[Dict[int, Optional[P]]] = None,
) -> Optional['Pattern']: ) -> Optional[P]:
""" """
Recursively apply func() to this pattern and any pattern it references. Recursively apply func() to this pattern and any pattern it references.
func() is expected to take and return a Pattern. func() is expected to take and return a Pattern.
@ -215,13 +218,13 @@ class Pattern:
pat = memo[pat_id] pat = memo[pat_id]
return pat return pat
def dfs(self, def dfs(self: P,
visit_before: visitor_function_t = None, visit_before: visitor_function_t = None,
visit_after: visitor_function_t = None, visit_after: visitor_function_t = None,
transform: Union[numpy.ndarray, bool, None] = False, transform: Union[numpy.ndarray, bool, None] = False,
memo: Optional[Dict] = None, memo: Optional[Dict] = None,
hierarchy: Tuple['Pattern', ...] = (), hierarchy: Tuple[P, ...] = (),
) -> 'Pattern': ) -> P:
""" """
Experimental convenience function. Experimental convenience function.
Performs a depth-first traversal of this pattern and its subpatterns. Performs a depth-first traversal of this pattern and its subpatterns.
@ -277,7 +280,7 @@ class Pattern:
if transform is not False: if transform is not False:
sign = numpy.ones(2) sign = numpy.ones(2)
if transform[3]: if transform[3]:
sign[1] = -1 sign[1] = -1
xy = numpy.dot(rotation_matrix_2d(transform[2]), subpattern.offset * sign) xy = numpy.dot(rotation_matrix_2d(transform[2]), subpattern.offset * sign)
mirror_x, angle = normalize_mirror(subpattern.mirrored) mirror_x, angle = normalize_mirror(subpattern.mirrored)
angle += subpattern.rotation angle += subpattern.rotation
@ -287,20 +290,23 @@ class Pattern:
sp_transform = False sp_transform = False
if subpattern.pattern is not None: if subpattern.pattern is not None:
subpattern.pattern = subpattern.pattern.dfs(visit_before=visit_before, result = subpattern.pattern.dfs(visit_before=visit_before,
visit_after=visit_after, visit_after=visit_after,
transform=sp_transform, transform=sp_transform,
memo=memo, memo=memo,
hierarchy=hierarchy + (self,)) hierarchy=hierarchy + (self,))
if result is not subpattern.pattern:
# skip assignment to avoid PatternLockedError unless modified
subpattern.pattern = result
if visit_after is not None: if visit_after is not None:
pat = visit_after(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore pat = visit_after(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore
return pat return pat
def polygonize(self, def polygonize(self: P,
poly_num_points: Optional[int] = None, poly_num_points: Optional[int] = None,
poly_max_arclen: Optional[float] = None, poly_max_arclen: Optional[float] = None,
) -> 'Pattern': ) -> P:
""" """
Calls `.to_polygons(...)` on all the shapes in this Pattern and any referenced patterns, Calls `.to_polygons(...)` on all the shapes in this Pattern and any referenced patterns,
replacing them with the returned polygons. replacing them with the returned polygons.
@ -317,18 +323,18 @@ class Pattern:
self self
""" """
old_shapes = self.shapes old_shapes = self.shapes
self.shapes = list(itertools.chain.from_iterable( self.shapes = list(chain.from_iterable(
(shape.to_polygons(poly_num_points, poly_max_arclen) (shape.to_polygons(poly_num_points, poly_max_arclen)
for shape in old_shapes))) for shape in old_shapes)))
for subpat in self.subpatterns: for subpat in self.subpatterns:
if subpat.pattern is not None: if subpat.pattern is not None:
subpat.pattern.polygonize(poly_num_points, poly_max_arclen) subpat.pattern.polygonize(poly_num_points, poly_max_arclen)
return self return self
def manhattanize(self, def manhattanize(self: P,
grid_x: numpy.ndarray, grid_x: numpy.ndarray,
grid_y: numpy.ndarray, grid_y: numpy.ndarray,
) -> 'Pattern': ) -> P:
""" """
Calls `.polygonize()` and `.flatten()` on the pattern, then calls `.manhattanize()` on all the Calls `.polygonize()` and `.flatten()` on the pattern, then calls `.manhattanize()` on all the
resulting shapes, replacing them with the returned Manhattan polygons. resulting shapes, replacing them with the returned Manhattan polygons.
@ -343,15 +349,15 @@ class Pattern:
self.polygonize().flatten() self.polygonize().flatten()
old_shapes = self.shapes old_shapes = self.shapes
self.shapes = list(itertools.chain.from_iterable( self.shapes = list(chain.from_iterable(
(shape.manhattanize(grid_x, grid_y) for shape in old_shapes))) (shape.manhattanize(grid_x, grid_y) for shape in old_shapes)))
return self return self
def subpatternize(self, def subpatternize(self: P,
recursive: bool = True, recursive: bool = True,
norm_value: int = int(1e6), norm_value: int = int(1e6),
exclude_types: Tuple[Type] = (Polygon,) exclude_types: Tuple[Type] = (Polygon,)
) -> 'Pattern': ) -> P:
""" """
Iterates through this `Pattern` and all referenced `Pattern`s. Within each `Pattern`, it iterates Iterates through this `Pattern` and all referenced `Pattern`s. Within each `Pattern`, it iterates
over all shapes, calling `.normalized_form(norm_value)` on them to retrieve a scale-, over all shapes, calling `.normalized_form(norm_value)` on them to retrieve a scale-,
@ -407,9 +413,8 @@ class Pattern:
for i, values in shape_table[label][1]: for i, values in shape_table[label][1]:
(offset, scale, rotation, mirror_x, dose) = values (offset, scale, rotation, mirror_x, dose) = values
subpat = SubPattern(pattern=pat, offset=offset, scale=scale, self.addsp(pattern=pat, offset=offset, scale=scale,
rotation=rotation, dose=dose, mirrored=(mirror_x, False)) rotation=rotation, dose=dose, mirrored=(mirror_x, False))
self.subpatterns.append(subpat)
shapes_to_remove.append(i) shapes_to_remove.append(i)
# Remove any shapes for which we have created subpatterns. # Remove any shapes for which we have created subpatterns.
@ -441,28 +446,31 @@ class Pattern:
pass pass
def referenced_patterns_by_id(self, def referenced_patterns_by_id(self,
include_none: bool = False include_none: bool = False,
recursive: bool = True,
) -> Union[Dict[int, Optional['Pattern']], ) -> Union[Dict[int, Optional['Pattern']],
Dict[int, 'Pattern']]: Dict[int, 'Pattern']]:
""" """
Create a dictionary with `{id(pat): pat}` for all Pattern objects referenced by this Create a dictionary with `{id(pat): pat}` for all Pattern objects referenced by this
Pattern (operates recursively on all referenced Patterns as well) Pattern (by default, operates recursively on all referenced Patterns as well).
Args: Args:
include_none: If `True`, references to `None` will be included. Default `False`. include_none: If `True`, references to `None` will be included. Default `False`.
recursive: If `True`, operates recursively on all referenced patterns. Default `True`.
Returns: Returns:
Dictionary with `{id(pat): pat}` for all referenced Pattern objects Dictionary with `{id(pat): pat}` for all referenced Pattern objects
""" """
ids: Dict[int, Optional['Pattern']] = {} ids: Dict[int, Optional['Pattern']] = {}
for subpat in self.subpatterns: for subpat in self.subpatterns:
if id(subpat.pattern) not in ids: pat = subpat.pattern
if subpat.pattern is not None: if id(pat) in ids:
ids[id(subpat.pattern)] = subpat.pattern continue
ids.update(subpat.pattern.referenced_patterns_by_id()) if include_none or pat is not None:
elif include_none: ids[id(pat)] = pat
ids[id(subpat.pattern)] = subpat.pattern if recursive and pat is not None:
ids.update(pat.referenced_patterns_by_id())
return ids return ids
def referenced_patterns_by_name(self, **kwargs) -> List[Tuple[Optional[str], Optional['Pattern']]]: def referenced_patterns_by_name(self, **kwargs) -> List[Tuple[Optional[str], Optional['Pattern']]]:
@ -483,6 +491,31 @@ class Pattern:
pat_list = [(p.name if p is not None else None, p) for p in pats_by_id.values()] pat_list = [(p.name if p is not None else None, p) for p in pats_by_id.values()]
return pat_list return pat_list
def subpatterns_by_id(self,
include_none: bool = False,
recursive: bool = True,
) -> Dict[int, List[SubPattern]]:
"""
Create a dictionary which maps `{id(referenced_pattern): [subpattern0, ...]}`
for all SubPattern objects referenced by this Pattern (by default, operates
recursively on all referenced Patterns as well).
Args:
include_none: If `True`, references to `None` will be included. Default `False`.
recursive: If `True`, operates recursively on all referenced patterns. Default `True`.
Returns:
Dictionary mapping each pattern id to a list of subpattern objects referencing the pattern.
"""
ids: Dict[int, List[SubPattern]] = defaultdict(list)
for subpat in self.subpatterns:
pat = subpat.pattern
if include_none or pat is not None:
ids[id(pat)].append(subpat)
if recursive and pat is not None:
ids.update(pat.subpatterns_by_id(include_none=include_none))
return dict(ids)
def get_bounds(self) -> Union[numpy.ndarray, None]: def get_bounds(self) -> Union[numpy.ndarray, None]:
""" """
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
@ -492,13 +525,12 @@ class Pattern:
Returns: Returns:
`[[x_min, y_min], [x_max, y_max]]` or `None` `[[x_min, y_min], [x_max, y_max]]` or `None`
""" """
entries = self.shapes + self.subpatterns + self.labels if self.is_empty():
if not entries:
return None return None
min_bounds = numpy.array((+inf, +inf)) min_bounds = numpy.array((+inf, +inf))
max_bounds = numpy.array((-inf, -inf)) max_bounds = numpy.array((-inf, -inf))
for entry in entries: for entry in chain(self.shapes, self.subpatterns, self.labels):
bounds = entry.get_bounds() bounds = entry.get_bounds()
if bounds is None: if bounds is None:
continue continue
@ -509,7 +541,7 @@ class Pattern:
else: else:
return numpy.vstack((min_bounds, max_bounds)) return numpy.vstack((min_bounds, max_bounds))
def flatten(self) -> 'Pattern': def flatten(self: P) -> P:
""" """
Removes all subpatterns and adds equivalent shapes. Removes all subpatterns and adds equivalent shapes.
@ -545,7 +577,50 @@ class Pattern:
self.append(p) self.append(p)
return self return self
def translate_elements(self, offset: vector2) -> 'Pattern': def wrap_repeated_shapes(self: P,
name_func: Callable[['Pattern', Union[Shape, Label]], str] = lambda p, s: '_repetition',
recursive: bool = True,
) -> P:
"""
Wraps all shapes and labels with a non-`None` `repetition` attribute
into a `SubPattern`/`Pattern` combination, and applies the `repetition`
to each `SubPattern` instead of its contained shape.
Args:
name_func: Function f(this_pattern, shape) which generates a name for the
wrapping pattern. Default always returns '_repetition'.
recursive: If `True`, this function is also applied to all referenced patterns
recursively. Default `True`.
Returns:
self
"""
def do_wrap(pat: Optional[Pattern]) -> Optional[Pattern]:
if pat is None:
return pat
for shape in pat.shapes:
if shape.repetition is None:
continue
pat.addsp(Pattern(name_func(pat, shape), shapes=[shape]))
shape.repetition = None
for label in self.labels:
if label.repetition is None:
continue
pat.addsp(Pattern(name_func(pat, shape), labels=[label]))
label.repetition = None
return pat
if recursive:
self.apply(do_wrap)
else:
do_wrap(self)
return self
def translate_elements(self: P, offset: vector2) -> P:
""" """
Translates all shapes, label, and subpatterns by the given offset. Translates all shapes, label, and subpatterns by the given offset.
@ -555,11 +630,11 @@ class Pattern:
Returns: Returns:
self self
""" """
for entry in self.shapes + self.subpatterns + self.labels: for entry in chain(self.shapes, self.subpatterns, self.labels):
entry.translate(offset) entry.translate(offset)
return self return self
def scale_elements(self, c: float) -> 'Pattern': def scale_elements(self: P, c: float) -> P:
"""" """"
Scales all shapes and subpatterns by the given value. Scales all shapes and subpatterns by the given value.
@ -569,11 +644,11 @@ class Pattern:
Returns: Returns:
self self
""" """
for entry in self.shapes + self.subpatterns: for entry in chain(self.shapes, self.subpatterns):
entry.scale_by(c) entry.scale_by(c)
return self return self
def scale_by(self, c: float) -> 'Pattern': def scale_by(self: P, c: float) -> P:
""" """
Scale this Pattern by the given value Scale this Pattern by the given value
(all shapes and subpatterns and their offsets are scaled) (all shapes and subpatterns and their offsets are scaled)
@ -584,14 +659,15 @@ class Pattern:
Returns: Returns:
self self
""" """
for entry in self.shapes + self.subpatterns: entry: Scalable
for entry in chain(self.shapes, self.subpatterns):
entry.offset *= c entry.offset *= c
entry.scale_by(c) entry.scale_by(c)
for label in self.labels: for label in self.labels:
label.offset *= c label.offset *= c
return self return self
def rotate_around(self, pivot: vector2, rotation: float) -> 'Pattern': def rotate_around(self: P, pivot: vector2, rotation: float) -> P:
""" """
Rotate the Pattern around the a location. Rotate the Pattern around the a location.
@ -609,7 +685,7 @@ class Pattern:
self.translate_elements(+pivot) self.translate_elements(+pivot)
return self return self
def rotate_element_centers(self, rotation: float) -> 'Pattern': def rotate_element_centers(self: P, rotation: float) -> P:
""" """
Rotate the offsets of all shapes, labels, and subpatterns around (0, 0) Rotate the offsets of all shapes, labels, and subpatterns around (0, 0)
@ -619,11 +695,11 @@ class Pattern:
Returns: Returns:
self self
""" """
for entry in self.shapes + self.subpatterns + self.labels: for entry in chain(self.shapes, self.subpatterns, self.labels):
entry.offset = numpy.dot(rotation_matrix_2d(rotation), entry.offset) entry.offset = numpy.dot(rotation_matrix_2d(rotation), entry.offset)
return self return self
def rotate_elements(self, rotation: float) -> 'Pattern': def rotate_elements(self: P, rotation: float) -> P:
""" """
Rotate each shape and subpattern around its center (offset) Rotate each shape and subpattern around its center (offset)
@ -633,11 +709,11 @@ class Pattern:
Returns: Returns:
self self
""" """
for entry in self.shapes + self.subpatterns: for entry in chain(self.shapes, self.subpatterns):
entry.rotate(rotation) entry.rotate(rotation)
return self return self
def mirror_element_centers(self, axis: int) -> 'Pattern': def mirror_element_centers(self: P, axis: int) -> P:
""" """
Mirror the offsets of all shapes, labels, and subpatterns across an axis Mirror the offsets of all shapes, labels, and subpatterns across an axis
@ -648,11 +724,11 @@ class Pattern:
Returns: Returns:
self self
""" """
for entry in self.shapes + self.subpatterns + self.labels: for entry in chain(self.shapes, self.subpatterns, self.labels):
entry.offset[axis - 1] *= -1 entry.offset[axis - 1] *= -1
return self return self
def mirror_elements(self, axis: int) -> 'Pattern': def mirror_elements(self: P, axis: int) -> P:
""" """
Mirror each shape and subpattern across an axis, relative to its Mirror each shape and subpattern across an axis, relative to its
offset offset
@ -664,11 +740,11 @@ class Pattern:
Returns: Returns:
self self
""" """
for entry in self.shapes + self.subpatterns: for entry in chain(self.shapes, self.subpatterns):
entry.mirror(axis) entry.mirror(axis)
return self return self
def mirror(self, axis: int) -> 'Pattern': def mirror(self: P, axis: int) -> P:
""" """
Mirror the Pattern across an axis Mirror the Pattern across an axis
@ -683,7 +759,7 @@ class Pattern:
self.mirror_element_centers(axis) self.mirror_element_centers(axis)
return self return self
def scale_element_doses(self, c: float) -> 'Pattern': def scale_element_doses(self: P, c: float) -> P:
""" """
Multiply all shape and subpattern doses by a factor Multiply all shape and subpattern doses by a factor
@ -693,11 +769,11 @@ class Pattern:
Return: Return:
self self
""" """
for entry in self.shapes + self.subpatterns: for entry in chain(self.shapes, self.subpatterns):
entry.dose *= c entry.dose *= c
return self return self
def copy(self) -> 'Pattern': def copy(self: P) -> P:
""" """
Return a copy of the Pattern, deep-copying shapes and copying subpattern Return a copy of the Pattern, deep-copying shapes and copying subpattern
entries, but not deep-copying any referenced patterns. entries, but not deep-copying any referenced patterns.
@ -709,7 +785,7 @@ class Pattern:
""" """
return copy.copy(self) return copy.copy(self)
def deepcopy(self) -> 'Pattern': def deepcopy(self: P) -> P:
""" """
Convenience method for `copy.deepcopy(pattern)` Convenience method for `copy.deepcopy(pattern)`
@ -723,11 +799,26 @@ class Pattern:
Returns: Returns:
True if the pattern is contains no shapes, labels, or subpatterns. True if the pattern is contains no shapes, labels, or subpatterns.
""" """
return (len(self.subpatterns) == 0 and return (len(self.subpatterns) == 0
len(self.shapes) == 0 and and len(self.shapes) == 0
len(self.labels) == 0) and len(self.labels) == 0)
def lock(self) -> 'Pattern': def addsp(self, *args: Dict[str, Any], **kwargs: Dict[str, Any]):
"""
Convenience function which constructs a subpattern object and adds it
to this pattern.
Args:
*args: Passed to SubPattern()
**kwargs: Passed to SubPattern()
Returns:
self
"""
self.subpatterns.append(SubPattern(*args, **kwargs))
return self
def lock(self: P) -> P:
""" """
Lock the pattern, raising an exception if it is modified. Lock the pattern, raising an exception if it is modified.
Also see `deeplock()`. Also see `deeplock()`.
@ -735,26 +826,28 @@ class Pattern:
Returns: Returns:
self self
""" """
self.shapes = tuple(self.shapes) if not self.locked:
self.labels = tuple(self.labels) self.shapes = tuple(self.shapes)
self.subpatterns = tuple(self.subpatterns) self.labels = tuple(self.labels)
object.__setattr__(self, 'locked', True) self.subpatterns = tuple(self.subpatterns)
LockableImpl.lock(self)
return self return self
def unlock(self) -> 'Pattern': def unlock(self: P) -> P:
""" """
Unlock the pattern Unlock the pattern
Returns: Returns:
self self
""" """
object.__setattr__(self, 'locked', False) if self.locked:
self.shapes = list(self.shapes) LockableImpl.unlock(self)
self.labels = list(self.labels) self.shapes = list(self.shapes)
self.subpatterns = list(self.subpatterns) self.labels = list(self.labels)
self.subpatterns = list(self.subpatterns)
return self return self
def deeplock(self) -> 'Pattern': def deeplock(self: P) -> P:
""" """
Recursively lock the pattern, all referenced shapes, subpatterns, and labels. Recursively lock the pattern, all referenced shapes, subpatterns, and labels.
@ -762,13 +855,13 @@ class Pattern:
self self
""" """
self.lock() self.lock()
for ss in self.shapes + self.labels: for ss in chain(self.shapes, self.labels):
ss.lock() ss.lock()
for sp in self.subpatterns: for sp in self.subpatterns:
sp.deeplock() sp.deeplock()
return self return self
def deepunlock(self) -> 'Pattern': def deepunlock(self: P) -> P:
""" """
Recursively unlock the pattern, all referenced shapes, subpatterns, and labels. Recursively unlock the pattern, all referenced shapes, subpatterns, and labels.
@ -779,7 +872,7 @@ class Pattern:
self self
""" """
self.unlock() self.unlock()
for ss in self.shapes + self.labels: for ss in chain(self.shapes, self.labels):
ss.unlock() ss.unlock()
for sp in self.subpatterns: for sp in self.subpatterns:
sp.deepunlock() sp.deepunlock()
@ -819,7 +912,8 @@ class Pattern:
offset: vector2 = (0., 0.), offset: vector2 = (0., 0.),
line_color: str = 'k', line_color: str = 'k',
fill_color: str = 'none', fill_color: str = 'none',
overdraw: bool = False): overdraw: bool = False,
) -> None:
""" """
Draw a picture of the Pattern and wait for the user to inspect it Draw a picture of the Pattern and wait for the user to inspect it
@ -835,8 +929,8 @@ class Pattern:
overdraw: Whether to create a new figure or draw on a pre-existing one overdraw: Whether to create a new figure or draw on a pre-existing one
""" """
# TODO: add text labels to visualize() # TODO: add text labels to visualize()
from matplotlib import pyplot from matplotlib import pyplot # type: ignore
import matplotlib.collections import matplotlib.collections # type: ignore
offset = numpy.array(offset, dtype=float) offset = numpy.array(offset, dtype=float)
@ -878,12 +972,9 @@ class Pattern:
A filtered list in which no pattern is referenced by any other pattern. A filtered list in which no pattern is referenced by any other pattern.
""" """
def get_children(pat: Pattern, memo: Set) -> Set: def get_children(pat: Pattern, memo: Set) -> Set:
if pat in memo:
return memo
children = set(sp.pattern for sp in pat.subpatterns if sp.pattern is not None) children = set(sp.pattern for sp in pat.subpatterns if sp.pattern is not None)
new_children = children - memo new_children = children - memo
memo |= children memo |= new_children
for child_pat in new_children: for child_pat in new_children:
memo |= get_children(child_pat, memo) memo |= get_children(child_pat, memo)

View File

@ -1,81 +0,0 @@
from typing import List, Tuple, Callable, TypeVar, Optional
from abc import ABCMeta, abstractmethod
import copy
import numpy
from ..error import PatternError, PatternLockedError
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t
T = TypeVar('T', bound='Positionable')
class Positionable(metaclass=ABCMeta):
"""
Abstract class for all positionable entities
"""
__slots__ = ('_offset',)
_offset: numpy.ndarray
""" `[x_offset, y_offset]` """
# --- Abstract methods
@abstractmethod
def get_bounds(self) -> numpy.ndarray:
"""
Returns `[[x_min, y_min], [x_max, y_max]]` which specify a minimal bounding box for the entity.
"""
pass
# ---- Non-abstract properties
# offset property
@property
def offset(self) -> numpy.ndarray:
"""
[x, y] offset
"""
return self._offset
@offset.setter
def offset(self, val: vector2):
if not isinstance(val, numpy.ndarray):
val = numpy.array(val, dtype=float)
if val.size != 2:
raise PatternError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten()
# ---- Non-abstract methods
def translate(self: T, offset: vector2) -> T:
"""
Translate the entity by the given offset
Args:
offset: [x_offset, y,offset]
Returns:
self
"""
self.offset += offset
return self
def lock(self: T) -> T:
"""
Lock the entity, disallowing further changes
Returns:
self
"""
self.offset.flags.writeable = False
return self
def unlock(self: T) -> T:
"""
Unlock the entity
Returns:
self
"""
self.offset.flags.writeable = True
return self

0
masque/py.typed Normal file
View File

View File

@ -1,78 +1,47 @@
""" """
Repetitions provides support for efficiently nesting multiple identical Repetitions provide support for efficiently representing multiple identical
instances of a Pattern in the same parent Pattern. instances of an object .
""" """
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any from typing import Union, Dict, Optional, Sequence, Any
import copy import copy
from abc import ABCMeta, abstractmethod
import numpy import numpy # type: ignore
from numpy import pi
from .error import PatternError, PatternLockedError from .error import PatternError
from .utils import is_scalar, rotation_matrix_2d, vector2 from .utils import rotation_matrix_2d, vector2, AutoSlots
from .traits import LockableImpl, Copyable, Scalable, Rotatable, Mirrorable
if TYPE_CHECKING:
from . import Pattern
# TODO need top-level comment about what order rotation/scale/offset/mirror/array are applied class Repetition(Copyable, Rotatable, Mirrorable, Scalable, metaclass=ABCMeta):
class GridRepetition:
""" """
GridRepetition provides support for efficiently embedding multiple copies of a `Pattern` Interface common to all objects which specify repetitions
into another `Pattern` at regularly-spaced offsets.
Note that rotation, scaling, and mirroring are applied to individual instances of the
pattern, not to the grid vectors.
The order of operations is
1. A single refernce instance to the target pattern is mirrored
2. The single instance is rotated.
3. The instance is scaled by the scaling factor.
4. The instance is shifted by the provided offset
(no mirroring/scaling/rotation is applied to the offset).
5. Additional copies of the instance will appear at coordinates specified by
`(offset + aa * a_vector + bb * b_vector)`, with `aa in range(0, a_count)`
and `bb in range(0, b_count)`. All instance locations remain unaffected by
mirroring/scaling/rotation, though each instance's data will be transformed
relative to the instance's location (i.e. relative to the contained pattern's
(0, 0) point).
""" """
__slots__ = ('_pattern', __slots__ = ()
'_offset',
'_rotation', @property
'_dose', @abstractmethod
'_scale', def displacements(self) -> numpy.ndarray:
'_mirrored', """
'_a_vector', An Nx2 ndarray specifying all offsets generated by this repetition
"""
pass
class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
"""
`Grid` describes a 2D grid formed by two basis vectors and two 'counts' (sizes).
The second basis vector and count (`b_vector` and `b_count`) may be omitted,
which makes the grid describe a 1D array.
Note that the offsets in either the 2D or 1D grids do not have to be axis-aligned.
"""
__slots__ = ('_a_vector',
'_b_vector', '_b_vector',
'_a_count', '_a_count',
'_b_count', '_b_count')
'identifier',
'locked')
_pattern: Optional['Pattern']
""" The `Pattern` being instanced """
_offset: numpy.ndarray
""" (x, y) offset for the base instance """
_dose: float
""" Scaling factor applied to the dose """
_rotation: float
""" Rotation of the individual instances in the grid (not the grid vectors).
Radians, counterclockwise.
"""
_scale: float
""" Scaling factor applied to individual instances in the grid (not the grid vectors) """
_mirrored: numpy.ndarray # ndarray[bool]
""" Whether to mirror individual instances across the x and y axes
(Applies to individual instances in the grid, not the grid vectors)
"""
_a_vector: numpy.ndarray _a_vector: numpy.ndarray
""" Vector `[x, y]` specifying the first lattice vector of the grid. """ Vector `[x, y]` specifying the first lattice vector of the grid.
@ -91,28 +60,14 @@ class GridRepetition:
_b_count: int _b_count: int
""" Number of instances along the direction specified by the `b_vector` """ """ Number of instances along the direction specified by the `b_vector` """
identifier: Tuple[Any, ...]
""" Arbitrary identifier, used internally by some `masque` functions. """
locked: bool
""" If `True`, disallows changes to the GridRepetition """
def __init__(self, def __init__(self,
pattern: Optional['Pattern'],
a_vector: numpy.ndarray, a_vector: numpy.ndarray,
a_count: int, a_count: int,
b_vector: Optional[numpy.ndarray] = None, b_vector: Optional[numpy.ndarray] = None,
b_count: Optional[int] = 1, b_count: Optional[int] = 1,
offset: vector2 = (0.0, 0.0), locked: bool = False,):
rotation: float = 0.0,
mirrored: Optional[Sequence[bool]] = None,
dose: float = 1.0,
scale: float = 1.0,
locked: bool = False,
identifier: Tuple[Any, ...] = ()):
""" """
Args: Args:
pattern: Pattern to reference.
a_vector: First lattice vector, of the form `[x, y]`. a_vector: First lattice vector, of the form `[x, y]`.
Specifies center-to-center spacing between adjacent instances. Specifies center-to-center spacing between adjacent instances.
a_count: Number of elements in the a_vector direction. a_count: Number of elements in the a_vector direction.
@ -121,14 +76,7 @@ class GridRepetition:
Can be omitted when specifying a 1D array. Can be omitted when specifying a 1D array.
b_count: Number of elements in the `b_vector` direction. b_count: Number of elements in the `b_vector` direction.
Should be omitted if `b_vector` was omitted. Should be omitted if `b_vector` was omitted.
offset: (x, y) offset applied to all instances. locked: Whether the `Grid` is locked after initialization.
rotation: Rotation (radians, counterclockwise) applied to each instance.
Relative to each instance's (0, 0).
mirrored: Whether to mirror individual instances across the x and y axes.
dose: Scaling factor applied to the dose.
scale: Scaling factor applied to the instances' geometry.
locked: Whether the `GridRepetition` is locked after initialization.
identifier: Arbitrary tuple, used internally by some `masque` functions.
Raises: Raises:
PatternError if `b_*` inputs conflict with each other PatternError if `b_*` inputs conflict with each other
@ -144,132 +92,31 @@ class GridRepetition:
b_vector = numpy.array([0.0, 0.0]) b_vector = numpy.array([0.0, 0.0])
if a_count < 1: if a_count < 1:
raise PatternError('Repetition has too-small a_count: ' raise PatternError(f'Repetition has too-small a_count: {a_count}')
'{}'.format(a_count))
if b_count < 1: if b_count < 1:
raise PatternError('Repetition has too-small b_count: ' raise PatternError(f'Repetition has too-small b_count: {b_count}')
'{}'.format(b_count))
object.__setattr__(self, 'locked', False) object.__setattr__(self, 'locked', False)
self.a_vector = a_vector self.a_vector = a_vector
self.b_vector = b_vector self.b_vector = b_vector
self.a_count = a_count self.a_count = a_count
self.b_count = b_count self.b_count = b_count
self.identifier = identifier
self.pattern = pattern
self.offset = offset
self.rotation = rotation
self.dose = dose
self.scale = scale
if mirrored is None:
mirrored = [False, False]
self.mirrored = mirrored
self.locked = locked self.locked = locked
def __setattr__(self, name, value): def __copy__(self) -> 'Grid':
if self.locked and name != 'locked': new = Grid(a_vector=self.a_vector.copy(),
raise PatternLockedError() b_vector=copy.copy(self.b_vector),
object.__setattr__(self, name, value) a_count=self.a_count,
b_count=self.b_count,
def __copy__(self) -> 'GridRepetition': locked=self.locked)
new = GridRepetition(pattern=self.pattern,
a_vector=self.a_vector.copy(),
b_vector=copy.copy(self.b_vector),
a_count=self.a_count,
b_count=self.b_count,
offset=self.offset.copy(),
rotation=self.rotation,
dose=self.dose,
scale=self.scale,
mirrored=self.mirrored.copy(),
locked=self.locked)
return new return new
def __deepcopy__(self, memo: Dict = None) -> 'GridRepetition': def __deepcopy__(self, memo: Dict = None) -> 'Grid':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new.pattern = copy.deepcopy(self.pattern, memo)
new.locked = self.locked new.locked = self.locked
return new return new
# pattern property
@property
def pattern(self) -> Optional['Pattern']:
return self._pattern
@pattern.setter
def pattern(self, val: Optional['Pattern']):
from .pattern import Pattern
if val is not None and not isinstance(val, Pattern):
raise PatternError('Provided pattern {} is not a Pattern object or None!'.format(val))
self._pattern = val
# offset property
@property
def offset(self) -> numpy.ndarray:
return self._offset
@offset.setter
def offset(self, val: vector2):
if self.locked:
raise PatternLockedError()
if not isinstance(val, numpy.ndarray):
val = numpy.array(val, dtype=float)
if val.size != 2:
raise PatternError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten().astype(float)
# dose property
@property
def dose(self) -> float:
return self._dose
@dose.setter
def dose(self, val: float):
if not is_scalar(val):
raise PatternError('Dose must be a scalar')
if not val >= 0:
raise PatternError('Dose must be non-negative')
self._dose = val
# scale property
@property
def scale(self) -> float:
return self._scale
@scale.setter
def scale(self, val: float):
if not is_scalar(val):
raise PatternError('Scale must be a scalar')
if not val > 0:
raise PatternError('Scale must be positive')
self._scale = val
# Rotation property [ccw]
@property
def rotation(self) -> float:
return self._rotation
@rotation.setter
def rotation(self, val: float):
if not is_scalar(val):
raise PatternError('Rotation must be a scalar')
self._rotation = val % (2 * pi)
# Mirrored property
@property
def mirrored(self) -> numpy.ndarray: # ndarray[bool]
return self._mirrored
@mirrored.setter
def mirrored(self, val: Sequence[bool]):
if is_scalar(val):
raise PatternError('Mirrored must be a 2-element list of booleans')
self._mirrored = numpy.array(val, dtype=bool, copy=True)
# a_vector property # a_vector property
@property @property
def a_vector(self) -> numpy.ndarray: def a_vector(self) -> numpy.ndarray:
@ -320,71 +167,15 @@ class GridRepetition:
raise PatternError('b_count must be convertable to an int!') raise PatternError('b_count must be convertable to an int!')
self._b_count = int(val) self._b_count = int(val)
def as_pattern(self) -> 'Pattern': @property
def displacements(self) -> numpy.ndarray:
aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij')
return (aa.flatten()[:, None] * self.a_vector[None, :]
+ bb.flatten()[:, None] * self.b_vector[None, :]) # noqa
def rotate(self, rotation: float) -> 'Grid':
""" """
Returns a copy of self.pattern which has been scaled, rotated, repeated, etc. Rotate lattice vectors (around (0, 0))
etc. according to this `GridRepetition`'s properties.
Returns:
A copy of self.pattern which has been scaled, rotated, repeated, etc.
etc. according to this `GridRepetition`'s properties.
"""
assert(self.pattern is not None)
patterns = []
for a in range(self.a_count):
for b in range(self.b_count):
offset = a * self.a_vector + b * self.b_vector
newPat = self.pattern.deepcopy().deepunlock()
newPat.translate_elements(offset)
patterns.append(newPat)
combined = patterns[0]
for p in patterns[1:]:
combined.append(p)
combined.scale_by(self.scale)
[combined.mirror(ax) for ax, do in enumerate(self.mirrored) if do]
combined.rotate_around((0.0, 0.0), self.rotation)
combined.translate_elements(self.offset)
combined.scale_element_doses(self.dose)
return combined
def translate(self, offset: vector2) -> 'GridRepetition':
"""
Translate by the given offset
Args:
offset: `[x, y]` to translate by
Returns:
self
"""
self.offset += offset
return self
def rotate_around(self, pivot: vector2, rotation: float) -> 'GridRepetition':
"""
Rotate the array around a point
Args:
pivot: Point `[x, y]` to rotate around
rotation: Angle to rotate by (counterclockwise, radians)
Returns:
self
"""
pivot = numpy.array(pivot, dtype=float)
self.translate(-pivot)
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset)
self.rotate(rotation)
self.translate(+pivot)
return self
def rotate(self, rotation: float) -> 'GridRepetition':
"""
Rotate around (0, 0)
Args: Args:
rotation: Angle to rotate by (counterclockwise, radians) rotation: Angle to rotate by (counterclockwise, radians)
@ -392,15 +183,179 @@ class GridRepetition:
Returns: Returns:
self self
""" """
self.rotate_elements(rotation)
self.a_vector = numpy.dot(rotation_matrix_2d(rotation), self.a_vector) self.a_vector = numpy.dot(rotation_matrix_2d(rotation), self.a_vector)
if self.b_vector is not None: if self.b_vector is not None:
self.b_vector = numpy.dot(rotation_matrix_2d(rotation), self.b_vector) self.b_vector = numpy.dot(rotation_matrix_2d(rotation), self.b_vector)
return self return self
def rotate_elements(self, rotation: float) -> 'GridRepetition': def mirror(self, axis: int) -> 'Grid':
""" """
Rotate each element around its origin Mirror the Grid across an axis.
Args:
axis: Axis to mirror across.
(0: mirror across x-axis, 1: mirror across y-axis)
Returns:
self
"""
self.a_vector[1 - axis] *= -1
if self.b_vector is not None:
self.b_vector[1 - axis] *= -1
return self
def get_bounds(self) -> Optional[numpy.ndarray]:
"""
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the `Grid` in each dimension.
Returns:
`[[x_min, y_min], [x_max, y_max]]` or `None`
"""
a_extent = self.a_vector * self.a_count
b_extent = self.b_vector * self.b_count if self.b_count != 0 else 0
corners = ((0, 0), a_extent, b_extent, a_extent + b_extent)
xy_min = numpy.min(corners, axis=0)
xy_max = numpy.max(corners, axis=0)
return numpy.array((xy_min, xy_max))
def scale_by(self, c: float) -> 'Grid':
"""
Scale the Grid by a factor
Args:
c: scaling factor
Returns:
self
"""
self.a_vector *= c
if self.b_vector is not None:
self.b_vector *= c
return self
def lock(self) -> 'Grid':
"""
Lock the `Grid`, disallowing changes.
Returns:
self
"""
self.a_vector.flags.writeable = False
if self.b_vector is not None:
self.b_vector.flags.writeable = False
LockableImpl.lock(self)
return self
def unlock(self) -> 'Grid':
"""
Unlock the `Grid`
Returns:
self
"""
self.a_vector.flags.writeable = True
if self.b_vector is not None:
self.b_vector.flags.writeable = True
LockableImpl.unlock(self)
return self
def __repr__(self) -> str:
locked = ' L' if self.locked else ''
bv = f', {self.b_vector}' if self.b_vector is not None else ''
return (f'<Grid {self.a_count}x{self.b_count} ({self.a_vector}{bv}){locked}>')
def __eq__(self, other: Any) -> bool:
if not isinstance(other, type(self)):
return False
if self.a_count != other.a_count or self.b_count != other.b_count:
return False
if any(self.a_vector[ii] != other.a_vector[ii] for ii in range(2)):
return False
if self.b_vector is None and other.b_vector is None:
return True
if self.b_vector is None or other.b_vector is None:
return False
if any(self.b_vector[ii] != other.b_vector[ii] for ii in range(2)):
return False
if self.locked != other.locked:
return False
return True
class Arbitrary(LockableImpl, Repetition, metaclass=AutoSlots):
"""
`Arbitrary` is a simple list of (absolute) displacements for instances.
Attributes:
displacements (numpy.ndarray): absolute displacements of all elements
`[[x0, y0], [x1, y1], ...]`
"""
_displacements: numpy.ndarray
""" List of vectors `[[x0, y0], [x1, y1], ...]` specifying the offsets
of the instances.
"""
@property
def displacements(self) -> numpy.ndarray:
return self._displacements
@displacements.setter
def displacements(self, val: Union[Sequence[Sequence[float]], numpy.ndarray]):
val = numpy.array(val, float)
val = numpy.sort(val.view([('', val.dtype)] * val.shape[1]), 0).view(val.dtype) # sort rows
self._displacements = val
def __init__(self,
displacements: numpy.ndarray,
locked: bool = False,):
"""
Args:
displacements: List of vectors (Nx2 ndarray) specifying displacements.
locked: Whether the object is locked after initialization.
"""
object.__setattr__(self, 'locked', False)
self.displacements = displacements
self.locked = locked
def lock(self) -> 'Arbitrary':
"""
Lock the object, disallowing changes.
Returns:
self
"""
self._displacements.flags.writeable = False
LockableImpl.lock(self)
return self
def unlock(self) -> 'Arbitrary':
"""
Unlock the object
Returns:
self
"""
self._displacements.flags.writeable = True
LockableImpl.unlock(self)
return self
def __repr__(self) -> str:
locked = ' L' if self.locked else ''
return (f'<Arbitrary {len(self.displacements)}pts {locked}>')
def __eq__(self, other: Any) -> bool:
if not isinstance(other, type(self)):
return False
if self.locked != other.locked:
return False
return numpy.array_equal(self.displacements, other.displacements)
def rotate(self, rotation: float) -> 'Arbitrary':
"""
Rotate dispacements (around (0, 0))
Args: Args:
rotation: Angle to rotate by (counterclockwise, radians) rotation: Angle to rotate by (counterclockwise, radians)
@ -408,12 +363,12 @@ class GridRepetition:
Returns: Returns:
self self
""" """
self.rotation += rotation self.displacements = numpy.dot(rotation_matrix_2d(rotation), self.displacements.T).T
return self return self
def mirror(self, axis: int) -> 'GridRepetition': def mirror(self, axis: int) -> 'Arbitrary':
""" """
Mirror the GridRepetition across an axis. Mirror the displacements across an axis.
Args: Args:
axis: Axis to mirror across. axis: Axis to mirror across.
@ -422,43 +377,24 @@ class GridRepetition:
Returns: Returns:
self self
""" """
self.mirror_elements(axis) self.displacements[1 - axis] *= -1
self.a_vector[1-axis] *= -1
if self.b_vector is not None:
self.b_vector[1-axis] *= -1
return self
def mirror_elements(self, axis: int) -> 'GridRepetition':
"""
Mirror each element across an axis relative to its origin.
Args:
axis: Axis to mirror across.
(0: mirror across x-axis, 1: mirror across y-axis)
Returns:
self
"""
self.mirrored[axis] = not self.mirrored[axis]
self.rotation *= -1
return self return self
def get_bounds(self) -> Optional[numpy.ndarray]: def get_bounds(self) -> Optional[numpy.ndarray]:
""" """
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
extent of the `GridRepetition` in each dimension. extent of the `displacements` in each dimension.
Returns `None` if the contained `Pattern` is empty.
Returns: Returns:
`[[x_min, y_min], [x_max, y_max]]` or `None` `[[x_min, y_min], [x_max, y_max]]` or `None`
""" """
if self.pattern is None: xy_min = numpy.min(self.displacements, axis=0)
return None xy_max = numpy.max(self.displacements, axis=0)
return self.as_pattern().get_bounds() return numpy.array((xy_min, xy_max))
def scale_by(self, c: float) -> 'GridRepetition': def scale_by(self, c: float) -> 'Arbitrary':
""" """
Scale the GridRepetition by a factor Scale the displacements by a factor
Args: Args:
c: scaling factor c: scaling factor
@ -466,107 +402,6 @@ class GridRepetition:
Returns: Returns:
self self
""" """
self.scale_elements_by(c) self.displacements *= c
self.a_vector *= c
if self.b_vector is not None:
self.b_vector *= c
return self return self
def scale_elements_by(self, c: float) -> 'GridRepetition':
"""
Scale each element by a factor
Args:
c: scaling factor
Returns:
self
"""
self.scale *= c
return self
def copy(self) -> 'GridRepetition':
"""
Return a shallow copy of the repetition.
Returns:
`copy.copy(self)`
"""
return copy.copy(self)
def deepcopy(self) -> 'GridRepetition':
"""
Return a deep copy of the repetition.
Returns:
`copy.deepcopy(self)`
"""
return copy.deepcopy(self)
def lock(self) -> 'GridRepetition':
"""
Lock the `GridRepetition`, disallowing changes.
Returns:
self
"""
self.offset.flags.writeable = False
self.a_vector.flags.writeable = False
self.mirrored.flags.writeable = False
if self.b_vector is not None:
self.b_vector.flags.writeable = False
object.__setattr__(self, 'locked', True)
return self
def unlock(self) -> 'GridRepetition':
"""
Unlock the `GridRepetition`
Returns:
self
"""
self.offset.flags.writeable = True
self.a_vector.flags.writeable = True
self.mirrored.flags.writeable = True
if self.b_vector is not None:
self.b_vector.flags.writeable = True
object.__setattr__(self, 'locked', False)
return self
def deeplock(self) -> 'GridRepetition':
"""
Recursively lock the `GridRepetition` and its contained pattern
Returns:
self
"""
assert(self.pattern is not None)
self.lock()
self.pattern.deeplock()
return self
def deepunlock(self) -> 'GridRepetition':
"""
Recursively unlock the `GridRepetition` and its contained pattern
This is dangerous unless you have just performed a deepcopy, since
the component parts may be reused elsewhere.
Returns:
self
"""
assert(self.pattern is not None)
self.unlock()
self.pattern.deepunlock()
return self
def __repr__(self) -> str:
name = self.pattern.name if self.pattern is not None else None
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
scale = f' d{self.scale:g}' if self.scale != 1 else ''
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else ''
bv = f', {self.b_vector}' if self.b_vector is not None else ''
return (f'<GridRepetition "{name}" at {self.offset} {rotation}{scale}{mirrored}{dose}'
f' {self.a_count}x{self.b_count} ({self.a_vector}{bv}){locked}>')

View File

@ -1,15 +1,18 @@
from typing import List, Tuple, Dict, Optional, Sequence from typing import List, Dict, Optional, Sequence
import copy import copy
import math import math
import numpy
import numpy # type: ignore
from numpy import pi from numpy import pi
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
from .. import PatternError from .. import PatternError
from ..utils import is_scalar, vector2, layer_t from ..repetition import Repetition
from ..utils import is_scalar, vector2, layer_t, AutoSlots, annotations_t
from ..traits import LockableImpl
class Arc(Shape): class Arc(Shape, metaclass=AutoSlots):
""" """
An elliptical arc, formed by cutting off an elliptical ring with two rays which exit from its An elliptical arc, formed by cutting off an elliptical ring with two rays which exit from its
center. It has a position, two radii, a start and stop angle, a rotation, and a width. center. It has a position, two radii, a start and stop angle, a rotation, and a width.
@ -20,6 +23,7 @@ class Arc(Shape):
""" """
__slots__ = ('_radii', '_angles', '_width', '_rotation', __slots__ = ('_radii', '_angles', '_width', '_rotation',
'poly_num_points', 'poly_max_arclen') 'poly_num_points', 'poly_max_arclen')
_radii: numpy.ndarray _radii: numpy.ndarray
""" Two radii for defining an ellipse """ """ Two radii for defining an ellipse """
@ -77,7 +81,7 @@ class Arc(Shape):
# arc start/stop angle properties # arc start/stop angle properties
@property @property
def angles(self) -> numpy.ndarray: #ndarray[float] def angles(self) -> numpy.ndarray:
""" """
Return the start and stop angles `[a_start, a_stop]`. Return the start and stop angles `[a_start, a_stop]`.
Angles are measured from x-axis after rotation Angles are measured from x-axis after rotation
@ -150,6 +154,7 @@ class Arc(Shape):
radii: vector2, radii: vector2,
angles: vector2, angles: vector2,
width: float, width: float,
*,
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS, poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
poly_max_arclen: Optional[float] = None, poly_max_arclen: Optional[float] = None,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
@ -157,28 +162,46 @@ class Arc(Shape):
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, dose: float = 1.0,
locked: bool = False): repetition: Optional[Repetition] = None,
object.__setattr__(self, 'locked', False) annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
):
LockableImpl.unlock(self)
self.identifier = () self.identifier = ()
self.radii = radii if raw:
self.angles = angles self._radii = radii
self.width = width self._angles = angles
self.offset = offset self._width = width
self.rotation = rotation self._offset = offset
[self.mirror(a) for a, do in enumerate(mirrored) if do] self._rotation = rotation
self.layer = layer self._repetition = repetition
self.dose = dose self._annotations = annotations if annotations is not None else {}
self._layer = layer
self._dose = dose
else:
self.radii = radii
self.angles = angles
self.width = width
self.offset = offset
self.rotation = rotation
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.layer = layer
self.dose = dose
self.poly_num_points = poly_num_points self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen self.poly_max_arclen = poly_max_arclen
self.locked = locked [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Arc': def __deepcopy__(self, memo: Dict = None) -> 'Arc':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._radii = self._radii.copy() new._radii = self._radii.copy()
new._angles = self._angles.copy() new._angles = self._angles.copy()
new.locked = self.locked new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons(self, def to_polygons(self,
@ -191,8 +214,8 @@ class Arc(Shape):
poly_max_arclen = self.poly_max_arclen poly_max_arclen = self.poly_max_arclen
if (poly_num_points is None) and (poly_max_arclen is None): if (poly_num_points is None) and (poly_max_arclen is None):
raise PatternError('Max number of points and arclength left unspecified' + raise PatternError('Max number of points and arclength left unspecified'
' (default was also overridden)') + ' (default was also overridden)')
r0, r1 = self.radii r0, r1 = self.radii
@ -212,8 +235,9 @@ class Arc(Shape):
n += [poly_num_points] n += [poly_num_points]
if poly_max_arclen is not None: if poly_max_arclen is not None:
n += [perimeter / poly_max_arclen] n += [perimeter / poly_max_arclen]
thetas_inner = numpy.linspace(a_ranges[0][1], a_ranges[0][0], max(n), endpoint=True) num_points = int(round(max(n)))
thetas_outer = numpy.linspace(a_ranges[1][0], a_ranges[1][1], max(n), endpoint=True) thetas_inner = numpy.linspace(a_ranges[0][1], a_ranges[0][0], num_points, endpoint=True)
thetas_outer = numpy.linspace(a_ranges[1][0], a_ranges[1][1], num_points, endpoint=True)
sin_th_i, cos_th_i = (numpy.sin(thetas_inner), numpy.cos(thetas_inner)) sin_th_i, cos_th_i = (numpy.sin(thetas_inner), numpy.cos(thetas_inner))
sin_th_o, cos_th_o = (numpy.sin(thetas_outer), numpy.cos(thetas_outer)) sin_th_o, cos_th_o = (numpy.sin(thetas_outer), numpy.cos(thetas_outer))
@ -249,7 +273,7 @@ class Arc(Shape):
mins = [] mins = []
maxs = [] maxs = []
for a, sgn in zip(a_ranges, (-1, +1)): for a, sgn in zip(a_ranges, (-1, +1)):
wh = sgn * self.width/2 wh = sgn * self.width / 2
rx = self.radius_x + wh rx = self.radius_x + wh
ry = self.radius_y + wh ry = self.radius_y + wh
@ -263,7 +287,7 @@ class Arc(Shape):
# Cutoff angles # Cutoff angles
xpt = (-self.rotation) % (2 * pi) + a0_offset xpt = (-self.rotation) % (2 * pi) + a0_offset
ypt = (pi/2 - self.rotation) % (2 * pi) + a0_offset ypt = (pi / 2 - self.rotation) % (2 * pi) + a0_offset
xnt = (xpt - pi) % (2 * pi) + a0_offset xnt = (xpt - pi) % (2 * pi) + a0_offset
ynt = (ypt - pi) % (2 * pi) + a0_offset ynt = (ypt - pi) % (2 * pi) + a0_offset
@ -332,9 +356,9 @@ class Arc(Shape):
rotation %= 2 * pi rotation %= 2 * pi
width = self.width width = self.width
return (type(self), radii, angles, width/norm_value, self.layer), \ return ((type(self), radii, angles, width / norm_value, self.layer),
(self.offset, scale/norm_value, rotation, False, self.dose), \ (self.offset, scale / norm_value, rotation, False, self.dose),
lambda: Arc(radii=radii*norm_value, angles=angles, width=width*norm_value, layer=self.layer) lambda: Arc(radii=radii * norm_value, angles=angles, width=width * norm_value, layer=self.layer))
def get_cap_edges(self) -> numpy.ndarray: def get_cap_edges(self) -> numpy.ndarray:
''' '''
@ -349,7 +373,7 @@ class Arc(Shape):
mins = [] mins = []
maxs = [] maxs = []
for a, sgn in zip(a_ranges, (-1, +1)): for a, sgn in zip(a_ranges, (-1, +1)):
wh = sgn * self.width/2 wh = sgn * self.width / 2
rx = self.radius_x + wh rx = self.radius_x + wh
ry = self.radius_y + wh ry = self.radius_y + wh
@ -364,7 +388,7 @@ class Arc(Shape):
mins.append([xn, yn]) mins.append([xn, yn])
maxs.append([xp, yp]) maxs.append([xp, yp])
return numpy.array([mins, maxs]) + self.offset return numpy.array([mins, maxs]) + self.offset
def _angles_to_parameters(self) -> numpy.ndarray: def _angles_to_parameters(self) -> numpy.ndarray:
''' '''
@ -374,12 +398,12 @@ class Arc(Shape):
''' '''
a = [] a = []
for sgn in (-1, +1): for sgn in (-1, +1):
wh = sgn * self.width/2 wh = sgn * self.width / 2
rx = self.radius_x + wh rx = self.radius_x + wh
ry = self.radius_y + wh ry = self.radius_y + wh
# create paremeter 'a' for parametrized ellipse # create paremeter 'a' for parametrized ellipse
a0, a1 = (numpy.arctan2(rx*numpy.sin(a), ry*numpy.cos(a)) for a in self.angles) a0, a1 = (numpy.arctan2(rx * numpy.sin(a), ry * numpy.cos(a)) for a in self.angles)
sign = numpy.sign(self.angles[1] - self.angles[0]) sign = numpy.sign(self.angles[1] - self.angles[0])
if sign != numpy.sign(a1 - a0): if sign != numpy.sign(a1 - a0):
a1 += sign * 2 * pi a1 += sign * 2 * pi
@ -400,8 +424,8 @@ class Arc(Shape):
return self return self
def __repr__(self) -> str: def __repr__(self) -> str:
angles = f'{self.angles*180/pi}' angles = f'{numpy.rad2deg(self.angles)}'
rotation = f'{self.rotation*180/pi:g}' if self.rotation != 0 else '' rotation = f'{numpy.rad2deg(self.rotation):g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else '' dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else '' locked = ' L' if self.locked else ''
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>' return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>'

View File

@ -1,18 +1,22 @@
from typing import List, Dict, Optional from typing import List, Dict, Optional
import copy import copy
import numpy
import numpy # type: ignore
from numpy import pi from numpy import pi
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
from .. import PatternError from .. import PatternError
from ..utils import is_scalar, vector2, layer_t from ..repetition import Repetition
from ..utils import is_scalar, vector2, layer_t, AutoSlots, annotations_t
from ..traits import LockableImpl
class Circle(Shape): class Circle(Shape, metaclass=AutoSlots):
""" """
A circle, which has a position and radius. A circle, which has a position and radius.
""" """
__slots__ = ('_radius', 'poly_num_points', 'poly_max_arclen') __slots__ = ('_radius', 'poly_num_points', 'poly_max_arclen')
_radius: float _radius: float
""" Circle radius """ """ Circle radius """
@ -40,27 +44,43 @@ class Circle(Shape):
def __init__(self, def __init__(self,
radius: float, radius: float,
*,
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS, poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
poly_max_arclen: Optional[float] = None, poly_max_arclen: Optional[float] = None,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, dose: float = 1.0,
locked: bool = False): repetition: Optional[Repetition] = None,
object.__setattr__(self, 'locked', False) annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
):
LockableImpl.unlock(self)
self.identifier = () self.identifier = ()
self.offset = numpy.array(offset, dtype=float) if raw:
self.layer = layer self._radius = radius
self.dose = dose self._offset = offset
self.radius = radius self._repetition = repetition
self._annotations = annotations if annotations is not None else {}
self._layer = layer
self._dose = dose
else:
self.radius = radius
self.offset = offset
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.layer = layer
self.dose = dose
self.poly_num_points = poly_num_points self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen self.poly_max_arclen = poly_max_arclen
self.locked = locked self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Circle': def __deepcopy__(self, memo: Dict = None) -> 'Circle':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
new.locked = self.locked new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons(self, def to_polygons(self,
@ -81,7 +101,8 @@ class Circle(Shape):
n += [poly_num_points] n += [poly_num_points]
if poly_max_arclen is not None: if poly_max_arclen is not None:
n += [2 * pi * self.radius / poly_max_arclen] n += [2 * pi * self.radius / poly_max_arclen]
thetas = numpy.linspace(2 * pi, 0, max(n), endpoint=False) num_points = int(round(max(n)))
thetas = numpy.linspace(2 * pi, 0, num_points, endpoint=False)
xs = numpy.cos(thetas) * self.radius xs = numpy.cos(thetas) * self.radius
ys = numpy.sin(thetas) * self.radius ys = numpy.sin(thetas) * self.radius
xys = numpy.vstack((xs, ys)).T xys = numpy.vstack((xs, ys)).T
@ -106,9 +127,9 @@ class Circle(Shape):
def normalized_form(self, norm_value) -> normalized_shape_tuple: def normalized_form(self, norm_value) -> normalized_shape_tuple:
rotation = 0.0 rotation = 0.0
magnitude = self.radius / norm_value magnitude = self.radius / norm_value
return (type(self), self.layer), \ return ((type(self), self.layer),
(self.offset, magnitude, rotation, False, self.dose), \ (self.offset, magnitude, rotation, False, self.dose),
lambda: Circle(radius=norm_value, layer=self.layer) lambda: Circle(radius=norm_value, layer=self.layer))
def __repr__(self) -> str: def __repr__(self) -> str:
dose = f' d{self.dose:g}' if self.dose != 1 else '' dose = f' d{self.dose:g}' if self.dose != 1 else ''

View File

@ -1,21 +1,25 @@
from typing import List, Tuple, Dict, Sequence, Optional from typing import List, Dict, Sequence, Optional
import copy import copy
import math import math
import numpy
import numpy # type: ignore
from numpy import pi from numpy import pi
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
from .. import PatternError from .. import PatternError
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t from ..repetition import Repetition
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t, AutoSlots, annotations_t
from ..traits import LockableImpl
class Ellipse(Shape): class Ellipse(Shape, metaclass=AutoSlots):
""" """
An ellipse, which has a position, two radii, and a rotation. An ellipse, which has a position, two radii, and a rotation.
The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius. The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius.
""" """
__slots__ = ('_radii', '_rotation', __slots__ = ('_radii', '_rotation',
'poly_num_points', 'poly_max_arclen') 'poly_num_points', 'poly_max_arclen')
_radii: numpy.ndarray _radii: numpy.ndarray
""" Ellipse radii """ """ Ellipse radii """
@ -85,6 +89,7 @@ class Ellipse(Shape):
def __init__(self, def __init__(self,
radii: vector2, radii: vector2,
*,
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS, poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
poly_max_arclen: Optional[float] = None, poly_max_arclen: Optional[float] = None,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
@ -92,25 +97,41 @@ class Ellipse(Shape):
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, dose: float = 1.0,
locked: bool = False): repetition: Optional[Repetition] = None,
object.__setattr__(self, 'locked', False) annotations: Optional[annotations_t] = None,
locked: bool = False,
raw: bool = False,
):
LockableImpl.unlock(self)
self.identifier = () self.identifier = ()
self.radii = radii if raw:
self.offset = offset self._radii = radii
self.rotation = rotation self._offset = offset
self._rotation = rotation
self._repetition = repetition
self._annotations = annotations if annotations is not None else {}
self._layer = layer
self._dose = dose
else:
self.radii = radii
self.offset = offset
self.rotation = rotation
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.layer = layer
self.dose = dose
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.layer = layer
self.dose = dose
self.poly_num_points = poly_num_points self.poly_num_points = poly_num_points
self.poly_max_arclen = poly_max_arclen self.poly_max_arclen = poly_max_arclen
self.locked = locked self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Ellipse': def __deepcopy__(self, memo: Dict = None) -> 'Ellipse':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._radii = self._radii.copy() new._radii = self._radii.copy()
new.locked = self.locked new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons(self, def to_polygons(self,
@ -139,7 +160,8 @@ class Ellipse(Shape):
n += [poly_num_points] n += [poly_num_points]
if poly_max_arclen is not None: if poly_max_arclen is not None:
n += [perimeter / poly_max_arclen] n += [perimeter / poly_max_arclen]
thetas = numpy.linspace(2 * pi, 0, max(n), endpoint=False) num_points = int(round(max(n)))
thetas = numpy.linspace(2 * pi, 0, num_points, endpoint=False)
sin_th, cos_th = (numpy.sin(thetas), numpy.cos(thetas)) sin_th, cos_th = (numpy.sin(thetas), numpy.cos(thetas))
xs = r0 * cos_th xs = r0 * cos_th
@ -176,9 +198,9 @@ class Ellipse(Shape):
radii = self.radii[::-1] / self.radius_y radii = self.radii[::-1] / self.radius_y
scale = self.radius_y scale = self.radius_y
angle = (self.rotation + pi / 2) % pi angle = (self.rotation + pi / 2) % pi
return (type(self), radii, self.layer), \ return ((type(self), radii, self.layer),
(self.offset, scale/norm_value, angle, False, self.dose), \ (self.offset, scale / norm_value, angle, False, self.dose),
lambda: Ellipse(radii=radii*norm_value, layer=self.layer) lambda: Ellipse(radii=radii * norm_value, layer=self.layer))
def lock(self) -> 'Ellipse': def lock(self) -> 'Ellipse':
self.radii.flags.writeable = False self.radii.flags.writeable = False

View File

@ -1,13 +1,16 @@
from typing import List, Tuple, Dict, Optional, Sequence from typing import List, Tuple, Dict, Optional, Sequence
import copy import copy
from enum import Enum from enum import Enum
import numpy
import numpy # type: ignore
from numpy import pi, inf from numpy import pi, inf
from . import Shape, normalized_shape_tuple, Polygon, Circle from . import Shape, normalized_shape_tuple, Polygon, Circle
from .. import PatternError from .. import PatternError
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t from ..repetition import Repetition
from ..utils import remove_colinear_vertices, remove_duplicate_vertices from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t, AutoSlots
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
from ..traits import LockableImpl
class PathCap(Enum): class PathCap(Enum):
@ -15,10 +18,10 @@ class PathCap(Enum):
Circle = 1 # Path extends past final vertices with a semicircle of radius width/2 Circle = 1 # Path extends past final vertices with a semicircle of radius width/2
Square = 2 # Path extends past final vertices with a width-by-width/2 rectangle Square = 2 # Path extends past final vertices with a width-by-width/2 rectangle
SquareCustom = 4 # Path extends past final vertices with a rectangle of length SquareCustom = 4 # Path extends past final vertices with a rectangle of length
# defined by path.cap_extensions # # defined by path.cap_extensions
class Path(Shape): class Path(Shape, metaclass=AutoSlots):
""" """
A path, consisting of a bunch of vertices (Nx2 ndarray), a width, an end-cap shape, A path, consisting of a bunch of vertices (Nx2 ndarray), a width, an end-cap shape,
and an offset. and an offset.
@ -100,7 +103,7 @@ class Path(Shape):
@vertices.setter @vertices.setter
def vertices(self, val: numpy.ndarray): def vertices(self, val: numpy.ndarray):
val = numpy.array(val, dtype=float) #TODO document that these might not be copied val = numpy.array(val, dtype=float) # TODO document that these might not be copied
if len(val.shape) < 2 or val.shape[1] != 2: if len(val.shape) < 2 or val.shape[1] != 2:
raise PatternError('Vertices must be an Nx2 array') raise PatternError('Vertices must be an Nx2 array')
if val.shape[0] < 2: if val.shape[0] < 2:
@ -140,6 +143,7 @@ class Path(Shape):
def __init__(self, def __init__(self,
vertices: numpy.ndarray, vertices: numpy.ndarray,
width: float = 0.0, width: float = 0.0,
*,
cap: PathCap = PathCap.Flush, cap: PathCap = PathCap.Flush,
cap_extensions: numpy.ndarray = None, cap_extensions: numpy.ndarray = None,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
@ -147,39 +151,55 @@ class Path(Shape):
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False, locked: bool = False,
raw: bool = False,
): ):
object.__setattr__(self, 'locked', False) LockableImpl.unlock(self)
self._cap_extensions = None # Since .cap setter might access it self._cap_extensions = None # Since .cap setter might access it
self.identifier = () self.identifier = ()
self.offset = offset if raw:
self.layer = layer self._vertices = vertices
self.dose = dose self._offset = offset
self.vertices = vertices self._repetition = repetition
self.width = width self._annotations = annotations if annotations is not None else {}
self.cap = cap self._layer = layer
if cap_extensions is not None: self._dose = dose
self._width = width
self._cap = cap
self._cap_extensions = cap_extensions
else:
self.vertices = vertices
self.offset = offset
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.layer = layer
self.dose = dose
self.width = width
self.cap = cap
self.cap_extensions = cap_extensions self.cap_extensions = cap_extensions
self.rotate(rotation) self.rotate(rotation)
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.locked = locked self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Path': def __deepcopy__(self, memo: Dict = None) -> 'Path':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._vertices = self._vertices.copy() new._vertices = self._vertices.copy()
new._cap = copy.deepcopy(self._cap, memo) new._cap = copy.deepcopy(self._cap, memo)
new._cap_extensions = copy.deepcopy(self._cap_extensions, memo) new._cap_extensions = copy.deepcopy(self._cap_extensions, memo)
new.locked = self.locked new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
@staticmethod @staticmethod
def travel(travel_pairs: Tuple[Tuple[float, float]], def travel(travel_pairs: Tuple[Tuple[float, float]],
width: float = 0.0, width: float = 0.0,
cap: PathCap = PathCap.Flush, cap: PathCap = PathCap.Flush,
cap_extensions = None, cap_extensions: Optional[Tuple[float, float]] = None,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
rotation: float = 0, rotation: float = 0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
@ -255,9 +275,9 @@ class Path(Shape):
intersection_p = v[:-2] + rp * dv[:-1] + perp[:-1] intersection_p = v[:-2] + rp * dv[:-1] + perp[:-1]
intersection_n = v[:-2] + rn * dv[:-1] - perp[:-1] intersection_n = v[:-2] + rn * dv[:-1] - perp[:-1]
towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp? towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp?
# straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight # straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight
acute = (dv[1:] * dv[:-1]).sum(axis=1) < 0 # angle is acute? acute = (dv[1:] * dv[:-1]).sum(axis=1) < 0 # angle is acute?
# Build vertices # Build vertices
o0 = [v[0] + perp[0]] o0 = [v[0] + perp[0]]
@ -309,12 +329,13 @@ class Path(Shape):
bounds[0, :] = numpy.minimum(bounds[0, :], poly_bounds[0, :]) bounds[0, :] = numpy.minimum(bounds[0, :], poly_bounds[0, :])
bounds[1, :] = numpy.maximum(bounds[1, :], poly_bounds[1, :]) bounds[1, :] = numpy.maximum(bounds[1, :], poly_bounds[1, :])
else: else:
raise PatternError('get_bounds() not implemented for endcaps: {}'.format(self.cap)) raise PatternError(f'get_bounds() not implemented for endcaps: {self.cap}')
return bounds return bounds
def rotate(self, theta: float) -> 'Path': def rotate(self, theta: float) -> 'Path':
self.vertices = numpy.dot(rotation_matrix_2d(theta), self.vertices.T).T if theta != 0:
self.vertices = numpy.dot(rotation_matrix_2d(theta), self.vertices.T).T
return self return self
def mirror(self, axis: int) -> 'Path': def mirror(self, axis: int) -> 'Path':
@ -349,10 +370,10 @@ class Path(Shape):
width0 = self.width / norm_value width0 = self.width / norm_value
return (type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer), \ return ((type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer),
(offset, scale/norm_value, rotation, False, self.dose), \ (offset, scale / norm_value, rotation, False, self.dose),
lambda: Path(reordered_vertices*norm_value, width=self.width*norm_value, lambda: Path(reordered_vertices * norm_value, width=self.width * norm_value,
cap=self.cap, layer=self.layer) cap=self.cap, layer=self.layer))
def clean_vertices(self) -> 'Path': def clean_vertices(self) -> 'Path':
""" """
@ -388,7 +409,7 @@ class Path(Shape):
if self.cap == PathCap.Square: if self.cap == PathCap.Square:
extensions = numpy.full(2, self.width / 2) extensions = numpy.full(2, self.width / 2)
elif self.cap == PathCap.SquareCustom: elif self.cap == PathCap.SquareCustom:
extensions = self.cap_extensions extensions = self.cap_extensions
else: else:
# Flush or Circle # Flush or Circle
extensions = numpy.zeros(2) extensions = numpy.zeros(2)

View File

@ -1,15 +1,18 @@
from typing import List, Tuple, Dict, Optional, Sequence from typing import List, Dict, Optional, Sequence
import copy import copy
import numpy
import numpy # type: ignore
from numpy import pi from numpy import pi
from . import Shape, normalized_shape_tuple from . import Shape, normalized_shape_tuple
from .. import PatternError from .. import PatternError
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t from ..repetition import Repetition
from ..utils import remove_colinear_vertices, remove_duplicate_vertices from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t, AutoSlots
from ..utils import remove_colinear_vertices, remove_duplicate_vertices, annotations_t
from ..traits import LockableImpl
class Polygon(Shape): class Polygon(Shape, metaclass=AutoSlots):
""" """
A polygon, consisting of a bunch of vertices (Nx2 ndarray) which specify an A polygon, consisting of a bunch of vertices (Nx2 ndarray) which specify an
implicitly-closed boundary, and an offset. implicitly-closed boundary, and an offset.
@ -17,6 +20,7 @@ class Polygon(Shape):
A `normalized_form(...)` is available, but can be quite slow with lots of vertices. A `normalized_form(...)` is available, but can be quite slow with lots of vertices.
""" """
__slots__ = ('_vertices',) __slots__ = ('_vertices',)
_vertices: numpy.ndarray _vertices: numpy.ndarray
""" Nx2 ndarray of vertices `[[x0, y0], [x1, y1], ...]` """ """ Nx2 ndarray of vertices `[[x0, y0], [x1, y1], ...]` """
@ -30,7 +34,7 @@ class Polygon(Shape):
@vertices.setter @vertices.setter
def vertices(self, val: numpy.ndarray): def vertices(self, val: numpy.ndarray):
val = numpy.array(val, dtype=float) #TODO document that these might not be copied val = numpy.array(val, dtype=float) # TODO document that these might not be copied
if len(val.shape) < 2 or val.shape[1] != 2: if len(val.shape) < 2 or val.shape[1] != 2:
raise PatternError('Vertices must be an Nx2 array') raise PatternError('Vertices must be an Nx2 array')
if val.shape[0] < 3: if val.shape[0] < 3:
@ -69,29 +73,44 @@ class Polygon(Shape):
def __init__(self, def __init__(self,
vertices: numpy.ndarray, vertices: numpy.ndarray,
*,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
rotation: float = 0.0, rotation: float = 0.0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False, locked: bool = False,
raw: bool = False,
): ):
object.__setattr__(self, 'locked', False) LockableImpl.unlock(self)
self.identifier = () self.identifier = ()
self.layer = layer if raw:
self.dose = dose self._vertices = vertices
self.vertices = vertices self._offset = offset
self.offset = offset self._repetition = repetition
self._annotations = annotations if annotations is not None else {}
self._layer = layer
self._dose = dose
else:
self.vertices = vertices
self.offset = offset
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.layer = layer
self.dose = dose
self.rotate(rotation) self.rotate(rotation)
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.locked = locked self.set_locked(locked)
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Polygon': def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Polygon':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._vertices = self._vertices.copy() new._vertices = self._vertices.copy()
new.locked = self.locked new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
@staticmethod @staticmethod
@ -250,7 +269,6 @@ class Polygon(Shape):
layer=layer, dose=dose) layer=layer, dose=dose)
return poly return poly
def to_polygons(self, def to_polygons(self,
poly_num_points: int = None, # unused poly_num_points: int = None, # unused
poly_max_arclen: float = None, # unused poly_max_arclen: float = None, # unused
@ -262,7 +280,8 @@ class Polygon(Shape):
self.offset + numpy.max(self.vertices, axis=0))) self.offset + numpy.max(self.vertices, axis=0)))
def rotate(self, theta: float) -> 'Polygon': def rotate(self, theta: float) -> 'Polygon':
self.vertices = numpy.dot(rotation_matrix_2d(theta), self.vertices.T).T if theta != 0:
self.vertices = numpy.dot(rotation_matrix_2d(theta), self.vertices.T).T
return self return self
def mirror(self, axis: int) -> 'Polygon': def mirror(self, axis: int) -> 'Polygon':
@ -296,9 +315,9 @@ class Polygon(Shape):
# TODO: normalize mirroring? # TODO: normalize mirroring?
return (type(self), reordered_vertices.data.tobytes(), self.layer), \ return ((type(self), reordered_vertices.data.tobytes(), self.layer),
(offset, scale/norm_value, rotation, False, self.dose), \ (offset, scale / norm_value, rotation, False, self.dose),
lambda: Polygon(reordered_vertices*norm_value, layer=self.layer) lambda: Polygon(reordered_vertices * norm_value, layer=self.layer))
def clean_vertices(self) -> 'Polygon': def clean_vertices(self) -> 'Polygon':
""" """

View File

@ -1,10 +1,12 @@
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
import numpy
from ..error import PatternError, PatternLockedError import numpy # type: ignore
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t
from ..traits import (PositionableImpl, LayerableImpl, DoseableImpl,
Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, LockableImpl, RepeatableImpl,
AnnotatableImpl)
if TYPE_CHECKING: if TYPE_CHECKING:
from . import Polygon from . import Polygon
@ -23,36 +25,20 @@ DEFAULT_POLY_NUM_POINTS = 24
T = TypeVar('T', bound='Shape') T = TypeVar('T', bound='Shape')
class Shape(metaclass=ABCMeta): class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, RepeatableImpl, LockableImpl, AnnotatableImpl, metaclass=ABCMeta):
""" """
Abstract class specifying functions common to all shapes. Abstract class specifying functions common to all shapes.
""" """
__slots__ = ('_offset', '_layer', '_dose', 'identifier', 'locked') __slots__ = () # Children should use AutoSlots
_offset: numpy.ndarray
""" `[x_offset, y_offset]` """
_layer: layer_t
""" Layer (integer >= 0 or tuple) """
_dose: float
""" Dose """
identifier: Tuple identifier: Tuple
""" An arbitrary identifier for the shape, usually empty but used by `Pattern.flatten()` """ """ An arbitrary identifier for the shape, usually empty but used by `Pattern.flatten()` """
locked: bool
""" If `True`, any changes to the shape will raise a `PatternLockedError` """
def __setattr__(self, name, value):
if self.locked and name != 'locked':
raise PatternLockedError()
object.__setattr__(self, name, value)
def __copy__(self) -> 'Shape': def __copy__(self) -> 'Shape':
cls = self.__class__ cls = self.__class__
new = cls.__new__(cls) new = cls.__new__(cls)
for name in Shape.__slots__ + self.__slots__: for name in self.__slots__: # type: str
object.__setattr__(new, name, getattr(self, name)) object.__setattr__(new, name, getattr(self, name))
return new return new
@ -79,53 +65,6 @@ class Shape(metaclass=ABCMeta):
""" """
pass pass
@abstractmethod
def get_bounds(self) -> numpy.ndarray:
"""
Returns `[[x_min, y_min], [x_max, y_max]]` which specify a minimal bounding box for the shape.
"""
pass
@abstractmethod
def rotate(self: T, theta: float) -> T:
"""
Rotate the shape around its origin (0, 0), ignoring its offset.
Args:
theta: Angle to rotate by (counterclockwise, radians)
Returns:
self
"""
pass
@abstractmethod
def mirror(self: T, axis: int) -> T:
"""
Mirror the shape across an axis.
Args:
axis: Axis to mirror across.
(0: mirror across x axis, 1: mirror across y axis)
Returns:
self
"""
pass
@abstractmethod
def scale_by(self: T, c: float) -> T:
"""
Scale the shape's size (eg. radius, for a circle) by a constant factor.
Args:
c: Factor to scale by
Returns:
self
"""
pass
@abstractmethod @abstractmethod
def normalized_form(self: T, norm_value: int) -> normalized_shape_tuple: def normalized_form(self: T, norm_value: int) -> normalized_shape_tuple:
""" """
@ -150,97 +89,9 @@ class Shape(metaclass=ABCMeta):
""" """
pass pass
'''
---- Non-abstract properties
'''
# offset property
@property
def offset(self) -> numpy.ndarray:
"""
[x, y] offset
"""
return self._offset
@offset.setter
def offset(self, val: vector2):
if not isinstance(val, numpy.ndarray):
val = numpy.array(val, dtype=float)
if val.size != 2:
raise PatternError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten()
# layer property
@property
def layer(self) -> layer_t:
"""
Layer number or name (int, tuple of ints, or string)
"""
return self._layer
@layer.setter
def layer(self, val: layer_t):
self._layer = val
# dose property
@property
def dose(self) -> float:
"""
Dose (float >= 0)
"""
return self._dose
@dose.setter
def dose(self, val: float):
if not is_scalar(val):
raise PatternError('Dose must be a scalar')
if not val >= 0:
raise PatternError('Dose must be non-negative')
self._dose = val
''' '''
---- Non-abstract methods ---- Non-abstract methods
''' '''
def copy(self: T) -> T:
"""
Returns a deep copy of the shape.
Returns:
copy.deepcopy(self)
"""
return copy.deepcopy(self)
def translate(self: T, offset: vector2) -> T:
"""
Translate the shape by the given offset
Args:
offset: [x_offset, y,offset]
Returns:
self
"""
self.offset += offset
return self
def rotate_around(self: T, pivot: vector2, rotation: float) -> T:
"""
Rotate the shape around a point.
Args:
pivot: Point (x, y) to rotate around
rotation: Angle to rotate by (counterclockwise, radians)
Returns:
self
"""
pivot = numpy.array(pivot, dtype=float)
self.translate(-pivot)
self.rotate(rotation)
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset)
self.translate(+pivot)
return self
def manhattanize_fast(self, def manhattanize_fast(self,
grid_x: numpy.ndarray, grid_x: numpy.ndarray,
grid_y: numpy.ndarray, grid_y: numpy.ndarray,
@ -288,7 +139,6 @@ class Shape(metaclass=ABCMeta):
if err_xmax >= 0.5: if err_xmax >= 0.5:
gxi_max += 1 gxi_max += 1
if abs(dv[0]) < 1e-20: if abs(dv[0]) < 1e-20:
# Vertical line, don't calculate slope # Vertical line, don't calculate slope
xi = [gxi_min, gxi_max - 1] xi = [gxi_min, gxi_max - 1]
@ -301,8 +151,9 @@ class Shape(metaclass=ABCMeta):
vertex_lists.append(segment) vertex_lists.append(segment)
continue continue
m = dv[1]/dv[0] m = dv[1] / dv[0]
def get_grid_inds(xes):
def get_grid_inds(xes: numpy.ndarray) -> numpy.ndarray:
ys = m * (xes - v[0]) + v[1] ys = m * (xes - v[0]) + v[1]
# (inds - 1) is the index of the y-grid line below the edge's intersection with the x-grid # (inds - 1) is the index of the y-grid line below the edge's intersection with the x-grid
@ -324,7 +175,7 @@ class Shape(metaclass=ABCMeta):
xs2 = (xs[:-1] + xs[1:]) / 2 xs2 = (xs[:-1] + xs[1:]) / 2
inds2 = get_grid_inds(xs2) inds2 = get_grid_inds(xs2)
xinds = numpy.round(numpy.arange(gxi_min, gxi_max - 0.99, 1/3)).astype(int) xinds = numpy.round(numpy.arange(gxi_min, gxi_max - 0.99, 1 / 3)).astype(int)
# interleave the results # interleave the results
yinds = xinds.copy() yinds = xinds.copy()
@ -348,7 +199,6 @@ class Shape(metaclass=ABCMeta):
return manhattan_polygons return manhattan_polygons
def manhattanize(self, def manhattanize(self,
grid_x: numpy.ndarray, grid_x: numpy.ndarray,
grid_y: numpy.ndarray grid_y: numpy.ndarray
@ -392,7 +242,7 @@ class Shape(metaclass=ABCMeta):
List of `Polygon` objects with grid-aligned edges. List of `Polygon` objects with grid-aligned edges.
""" """
from . import Polygon from . import Polygon
import skimage.measure import skimage.measure # type: ignore
import float_raster import float_raster
grid_x = numpy.unique(grid_x) grid_x = numpy.unique(grid_x)
@ -442,37 +292,12 @@ class Shape(metaclass=ABCMeta):
return manhattan_polygons return manhattan_polygons
def set_layer(self: T, layer: layer_t) -> T:
"""
Chainable method for changing the layer.
Args:
layer: new value for self.layer
Returns:
self
"""
self.layer = layer
return self
def lock(self: T) -> T: def lock(self: T) -> T:
""" PositionableImpl._lock(self)
Lock the Shape, disallowing further changes LockableImpl.lock(self)
Returns:
self
"""
self.offset.flags.writeable = False
object.__setattr__(self, 'locked', True)
return self return self
def unlock(self: T) -> T: def unlock(self: T) -> T:
""" LockableImpl.unlock(self)
Unlock the Shape PositionableImpl._unlock(self)
Returns:
self
"""
object.__setattr__(self, 'locked', False)
self.offset.flags.writeable = True
return self return self

View File

@ -1,27 +1,32 @@
from typing import List, Tuple, Dict, Sequence, Optional, MutableSequence from typing import List, Tuple, Dict, Sequence, Optional
import copy import copy
import numpy
import numpy # type: ignore
from numpy import pi, inf from numpy import pi, inf
from . import Shape, Polygon, normalized_shape_tuple from . import Shape, Polygon, normalized_shape_tuple
from .. import PatternError from .. import PatternError
from ..utils import is_scalar, vector2, get_bit, normalize_mirror, layer_t from ..repetition import Repetition
from ..traits import RotatableImpl
from ..utils import is_scalar, vector2, get_bit, normalize_mirror, layer_t, AutoSlots
from ..utils import annotations_t
from ..traits import LockableImpl
# Loaded on use: # Loaded on use:
# from freetype import Face # from freetype import Face
# from matplotlib.path import Path # from matplotlib.path import Path
class Text(Shape): class Text(RotatableImpl, Shape, metaclass=AutoSlots):
""" """
Text (to be printed e.g. as a set of polygons). Text (to be printed e.g. as a set of polygons).
This is distinct from non-printed Label objects. This is distinct from non-printed Label objects.
""" """
__slots__ = ('_string', '_height', '_rotation', '_mirrored', 'font_path') __slots__ = ('_string', '_height', '_mirrored', 'font_path')
_string: str _string: str
_height: float _height: float
_rotation: float _mirrored: numpy.ndarray # ndarray[bool]
_mirrored: numpy.ndarray #ndarray[bool]
font_path: str font_path: str
# vertices property # vertices property
@ -33,17 +38,6 @@ class Text(Shape):
def string(self, val: str): def string(self, val: str):
self._string = val self._string = val
# Rotation property
@property
def rotation(self) -> float:
return self._rotation
@rotation.setter
def rotation(self, val: float):
if not is_scalar(val):
raise PatternError('Rotation must be a scalar')
self._rotation = val % (2 * pi)
# Height property # Height property
@property @property
def height(self) -> float: def height(self) -> float:
@ -57,7 +51,7 @@ class Text(Shape):
# Mirrored property # Mirrored property
@property @property
def mirrored(self) -> numpy.ndarray: #ndarray[bool] def mirrored(self) -> numpy.ndarray: # ndarray[bool]
return self._mirrored return self._mirrored
@mirrored.setter @mirrored.setter
@ -70,31 +64,49 @@ class Text(Shape):
string: str, string: str,
height: float, height: float,
font_path: str, font_path: str,
*,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
rotation: float = 0.0, rotation: float = 0.0,
mirrored: Tuple[bool, bool] = (False, False), mirrored: Tuple[bool, bool] = (False, False),
layer: layer_t = 0, layer: layer_t = 0,
dose: float = 1.0, dose: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False, locked: bool = False,
raw: bool = False,
): ):
object.__setattr__(self, 'locked', False) LockableImpl.unlock(self)
self.identifier = () self.identifier = ()
self.offset = offset if raw:
self.layer = layer self._offset = offset
self.dose = dose self._layer = layer
self.string = string self._dose = dose
self.height = height self._string = string
self.rotation = rotation self._height = height
self._rotation = rotation
self._mirrored = mirrored
self._repetition = repetition
self._annotations = annotations if annotations is not None else {}
else:
self.offset = offset
self.layer = layer
self.dose = dose
self.string = string
self.height = height
self.rotation = rotation
self.mirrored = mirrored
self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.font_path = font_path self.font_path = font_path
self.mirrored = mirrored self.set_locked(locked)
self.locked = locked
def __deepcopy__(self, memo: Dict = None) -> 'Text': def __deepcopy__(self, memo: Dict = None) -> 'Text':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
new._mirrored = copy.deepcopy(self._mirrored, memo) new._mirrored = copy.deepcopy(self._mirrored, memo)
new.locked = self.locked new._annotations = copy.deepcopy(self._annotations)
new.set_locked(self.locked)
return new return new
def to_polygons(self, def to_polygons(self,
@ -120,10 +132,6 @@ class Text(Shape):
return all_polygons return all_polygons
def rotate(self, theta: float) -> 'Text':
self.rotation += theta
return self
def mirror(self, axis: int) -> 'Text': def mirror(self, axis: int) -> 'Text':
self.mirrored[axis] = not self.mirrored[axis] self.mirrored[axis] = not self.mirrored[axis]
return self return self
@ -136,14 +144,14 @@ class Text(Shape):
mirror_x, rotation = normalize_mirror(self.mirrored) mirror_x, rotation = normalize_mirror(self.mirrored)
rotation += self.rotation rotation += self.rotation
rotation %= 2 * pi rotation %= 2 * pi
return (type(self), self.string, self.font_path, self.layer), \ return ((type(self), self.string, self.font_path, self.layer),
(self.offset, self.height / norm_value, rotation, mirror_x, self.dose), \ (self.offset, self.height / norm_value, rotation, mirror_x, self.dose),
lambda: Text(string=self.string, lambda: Text(string=self.string,
height=self.height * norm_value, height=self.height * norm_value,
font_path=self.font_path, font_path=self.font_path,
rotation=rotation, rotation=rotation,
mirrored=(mirror_x, False), mirrored=(mirror_x, False),
layer=self.layer) layer=self.layer))
def get_bounds(self) -> numpy.ndarray: def get_bounds(self) -> numpy.ndarray:
# rotation makes this a huge pain when using slot.advance and glyph.bbox(), so # rotation makes this a huge pain when using slot.advance and glyph.bbox(), so
@ -160,10 +168,10 @@ class Text(Shape):
def get_char_as_polygons(font_path: str, def get_char_as_polygons(font_path: str,
char: str, char: str,
resolution: float = 48*64, resolution: float = 48 * 64,
) -> Tuple[List[List[List[float]]], float]: ) -> Tuple[List[List[List[float]]], float]:
from freetype import Face from freetype import Face # type: ignore
from matplotlib.path import Path from matplotlib.path import Path # type: ignore
""" """
Get a list of polygons representing a single character. Get a list of polygons representing a single character.

121
masque/snaps/snapper.py Normal file
View File

@ -0,0 +1,121 @@
callback_t = Callable[[T, numpy.ndarray, Dict[str, int]], ...]
class Device:
pattern: Pattern
ports: numpy.ndarray
port_names: Dict[str, int]
callback: Optional[callback_t] = None
def __init__(self,
pattern: Optional[Pattern] = None,
ports: Optional[numpy.ndarray] = None,
names: Optional[Union[Dict[str, int], List[Optional[str]]] = None,
callback: Optional[callback_t]= None
) -> None:
if pattern is None:
self.pattern = Pattern()
if ports is None:
self.ports = {0: [0, 0, 0, 0],
1: [0, 0, pi, 0]}
else:
self.ports = numpy.array(ports)
if callback:
self.callback = callback
self.callback(self, self.ports.keys())
def __getitem__(self, key) -> numpy.ndarray:
if isinstance(key, str):
inds = [self.port_names[key]]
elif hasattr(key, '__iter__'):
inds = [self.port_names.get(k, k) for k in key]
else:
inds = [self.port_names.get(key, key)]
return self.ports[inds]
def build(self: T,
name: str,
other: Device,
map_in: Dict[port_t, port_t],
map_out: Dict[port_t, port_t],
mirror: bool = False,
) -> T:
translation, rotation = self.find_transform(other, map_in, map_out, mirror)
pat = Pattern(name)
pat.addsp(self.pattern)
new = Device(pat, ports=self.ports, port_names=self.port_names, callback=self.callback)
return new
def plug(self, other, map_in, map_out, mirror):
translation, rotation, pivot = self.find_transform(other, map_in, map_out, mirror)
sp = SubPattern(other.pattern, mirrored=mirror)
sp.rotate_around(pivot, rotation)
sp.translate(translation)
self.pat.subpatterns.append(sp)
# get rid of plugged ports
# insert remaining device ports into router port list
with numpy.errstate(invalid='ignore'):
self.ports[:, 2] %= 2 * pi
if self.port_callback:
self.port_callback(...)
def find_transform(self, other, map_in, map_out, mirror):
s_ports = self[map_in.keys()]
o_ports= other[map_in.values()]
if mirror[0]:
o_ports[:, 1] *= -1
o_ports[:, 2] += pi
if mirror[1]:
o_ports[:, 0] *= -1
o_ports[:, 2] += pi
s_offsets = s_ports[:, :2]
s_angles = s_ports[:, 2]
s_types = s_ports[:, 3]
o_offsets = o_ports[:, :2]
o_angles = o_ports[:, 2]
o_types = o_ports[:, 3]
if (r_types != d_types) & (r_types != 0) & (d_types != 0):
#TODO warn about types
rotations = numpy.mod(s_angles - o_angles - pi, 2 * pi)
if not numpy.allclose(rotations[:1], rotations):
# TODO warn about rotation
rot_ports = rotate_ports_around(o_ports, o_offsets[0], rotations[0]) #TODO also rotate unplugged device ports
translations = r_offsets - d_offsets
translation = translations[0]
if not numpy.allclose(translations[:1], translations):
return translations[0], rotations[0]
def as_pattern(self, name) -> Pattern:
return self.pat.copy().rename(name)
def as_device(self, name):
return Device(self.as_pattern, REMAINING_NON-NAN_PORTS) #TODO
def rotate_ports_around(ports: numpy.ndarray, pivot: numpy.ndarray, angle: float) -> numpy.ndarray:
ports[:, :2] -= pivot
ports[:, :2] = (rotation_matrix_2d(angle) @ ports[:, :2].T).T
ports[:, :2] += pivot
ports[:, 2] += angle
return ports

View File

@ -4,68 +4,61 @@
""" """
#TODO more top-level documentation #TODO more top-level documentation
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any from typing import Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any, TypeVar
import copy import copy
import numpy import numpy # type: ignore
from numpy import pi from numpy import pi
from .error import PatternError, PatternLockedError from .error import PatternError
from .utils import is_scalar, rotation_matrix_2d, vector2 from .utils import is_scalar, vector2, AutoSlots, annotations_t
from .repetition import GridRepetition from .repetition import Repetition
from .traits import (PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl,
Mirrorable, PivotableImpl, Copyable, LockableImpl, RepeatableImpl,
AnnotatableImpl)
if TYPE_CHECKING: if TYPE_CHECKING:
from . import Pattern from . import Pattern
class SubPattern: S = TypeVar('S', bound='SubPattern')
class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mirrorable,
PivotableImpl, Copyable, RepeatableImpl, LockableImpl, AnnotatableImpl,
metaclass=AutoSlots):
""" """
SubPattern provides basic support for nesting Pattern objects within each other, by adding SubPattern provides basic support for nesting Pattern objects within each other, by adding
offset, rotation, scaling, and associated methods. offset, rotation, scaling, and associated methods.
""" """
__slots__ = ('_pattern', __slots__ = ('_pattern',
'_offset',
'_rotation',
'_dose',
'_scale',
'_mirrored', '_mirrored',
'identifier', 'identifier',
'locked') )
_pattern: Optional['Pattern'] _pattern: Optional['Pattern']
""" The `Pattern` being instanced """ """ The `Pattern` being instanced """
_offset: numpy.ndarray
""" (x, y) offset for the instance """
_rotation: float
""" rotation for the instance, radians counterclockwise """
_dose: float
""" dose factor for the instance """
_scale: float
""" scale factor for the instance """
_mirrored: numpy.ndarray # ndarray[bool] _mirrored: numpy.ndarray # ndarray[bool]
""" Whether to mirror the instanc across the x and/or y axes. """ """ Whether to mirror the instance across the x and/or y axes. """
identifier: Tuple[Any, ...] identifier: Tuple[Any, ...]
""" Arbitrary identifier, used internally by some `masque` functions. """ """ Arbitrary identifier, used internally by some `masque` functions. """
locked: bool
""" If `True`, disallows changes to the GridRepetition """
def __init__(self, def __init__(self,
pattern: Optional['Pattern'], pattern: Optional['Pattern'],
*,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
rotation: float = 0.0, rotation: float = 0.0,
mirrored: Optional[Sequence[bool]] = None, mirrored: Optional[Sequence[bool]] = None,
dose: float = 1.0, dose: float = 1.0,
scale: float = 1.0, scale: float = 1.0,
repetition: Optional[Repetition] = None,
annotations: Optional[annotations_t] = None,
locked: bool = False, locked: bool = False,
identifier: Tuple[Any, ...] = ()): identifier: Tuple[Any, ...] = (),
) -> None:
""" """
Args: Args:
pattern: Pattern to reference. pattern: Pattern to reference.
@ -74,10 +67,11 @@ class SubPattern:
mirrored: Whether to mirror the referenced pattern across its x and y axes. mirrored: Whether to mirror the referenced pattern across its x and y axes.
dose: Scaling factor applied to the dose. dose: Scaling factor applied to the dose.
scale: Scaling factor applied to the pattern's geometry. scale: Scaling factor applied to the pattern's geometry.
repetition: TODO
locked: Whether the `SubPattern` is locked after initialization. locked: Whether the `SubPattern` is locked after initialization.
identifier: Arbitrary tuple, used internally by some `masque` functions. identifier: Arbitrary tuple, used internally by some `masque` functions.
""" """
object.__setattr__(self, 'locked', False) LockableImpl.unlock(self)
self.identifier = identifier self.identifier = identifier
self.pattern = pattern self.pattern = pattern
self.offset = offset self.offset = offset
@ -87,28 +81,29 @@ class SubPattern:
if mirrored is None: if mirrored is None:
mirrored = [False, False] mirrored = [False, False]
self.mirrored = mirrored self.mirrored = mirrored
self.locked = locked self.repetition = repetition
self.annotations = annotations if annotations is not None else {}
self.set_locked(locked)
def __setattr__(self, name, value): def __copy__(self: S) -> S:
if self.locked and name != 'locked':
raise PatternLockedError()
object.__setattr__(self, name, value)
def __copy__(self) -> 'SubPattern':
new = SubPattern(pattern=self.pattern, new = SubPattern(pattern=self.pattern,
offset=self.offset.copy(), offset=self.offset.copy(),
rotation=self.rotation, rotation=self.rotation,
dose=self.dose, dose=self.dose,
scale=self.scale, scale=self.scale,
mirrored=self.mirrored.copy(), mirrored=self.mirrored.copy(),
repetition=copy.deepcopy(self.repetition),
annotations=copy.deepcopy(self.annotations),
locked=self.locked) locked=self.locked)
return new return new
def __deepcopy__(self, memo: Dict = None) -> 'SubPattern': def __deepcopy__(self: S, memo: Dict = None) -> S:
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new.pattern = copy.deepcopy(self.pattern, memo) new.pattern = copy.deepcopy(self.pattern, memo)
new.locked = self.locked new.repetition = copy.deepcopy(self.repetition, memo)
new.annotations = copy.deepcopy(self.annotations, memo)
new.set_locked(self.locked)
return new return new
# pattern property # pattern property
@ -120,60 +115,9 @@ class SubPattern:
def pattern(self, val: Optional['Pattern']): def pattern(self, val: Optional['Pattern']):
from .pattern import Pattern from .pattern import Pattern
if val is not None and not isinstance(val, Pattern): if val is not None and not isinstance(val, Pattern):
raise PatternError('Provided pattern {} is not a Pattern object or None!'.format(val)) raise PatternError(f'Provided pattern {val} is not a Pattern object or None!')
self._pattern = val self._pattern = val
# offset property
@property
def offset(self) -> numpy.ndarray:
return self._offset
@offset.setter
def offset(self, val: vector2):
if not isinstance(val, numpy.ndarray):
val = numpy.array(val, dtype=float)
if val.size != 2:
raise PatternError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten().astype(float)
# dose property
@property
def dose(self) -> float:
return self._dose
@dose.setter
def dose(self, val: float):
if not is_scalar(val):
raise PatternError('Dose must be a scalar')
if not val >= 0:
raise PatternError('Dose must be non-negative')
self._dose = val
# scale property
@property
def scale(self) -> float:
return self._scale
@scale.setter
def scale(self, val: float):
if not is_scalar(val):
raise PatternError('Scale must be a scalar')
if not val > 0:
raise PatternError('Scale must be positive')
self._scale = val
# Rotation property [ccw]
@property
def rotation(self) -> float:
return self._rotation
@rotation.setter
def rotation(self, val: float):
if not is_scalar(val):
raise PatternError('Rotation must be a scalar')
self._rotation = val % (2 * pi)
# Mirrored property # Mirrored property
@property @property
def mirrored(self) -> numpy.ndarray: # ndarray[bool] def mirrored(self) -> numpy.ndarray: # ndarray[bool]
@ -198,64 +142,28 @@ class SubPattern:
pattern.rotate_around((0.0, 0.0), self.rotation) pattern.rotate_around((0.0, 0.0), self.rotation)
pattern.translate_elements(self.offset) pattern.translate_elements(self.offset)
pattern.scale_element_doses(self.dose) pattern.scale_element_doses(self.dose)
if self.repetition is not None:
combined = type(pattern)(name='__repetition__')
for dd in self.repetition.displacements:
temp_pat = pattern.deepcopy()
temp_pat.translate_elements(dd)
combined.append(temp_pat)
pattern = combined
return pattern return pattern
def translate(self, offset: vector2) -> 'SubPattern': def rotate(self: S, rotation: float) -> S:
"""
Translate by the given offset
Args:
offset: Offset `[x, y]` to translate by
Returns:
self
"""
self.offset += offset
return self
def rotate_around(self, pivot: vector2, rotation: float) -> 'SubPattern':
"""
Rotate around a point
Args:
pivot: Point `[x, y]` to rotate around
rotation: Angle to rotate by (counterclockwise, radians)
Returns:
self
"""
pivot = numpy.array(pivot, dtype=float)
self.translate(-pivot)
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset)
self.rotate(rotation)
self.translate(+pivot)
return self
def rotate(self, rotation: float) -> 'SubPattern':
"""
Rotate the instance around it's origin
Args:
rotation: Angle to rotate by (counterclockwise, radians)
Returns:
self
"""
self.rotation += rotation self.rotation += rotation
if self.repetition is not None:
self.repetition.rotate(rotation)
return self return self
def mirror(self, axis: int) -> 'SubPattern': def mirror(self: S, axis: int) -> S:
"""
Mirror the subpattern across an axis.
Args:
axis: Axis to mirror across.
Returns:
self
"""
self.mirrored[axis] = not self.mirrored[axis] self.mirrored[axis] = not self.mirrored[axis]
self.rotation *= -1 self.rotation *= -1
if self.repetition is not None:
self.repetition.mirror(axis)
return self return self
def get_bounds(self) -> Optional[numpy.ndarray]: def get_bounds(self) -> Optional[numpy.ndarray]:
@ -271,62 +179,31 @@ class SubPattern:
return None return None
return self.as_pattern().get_bounds() return self.as_pattern().get_bounds()
def scale_by(self, c: float) -> 'SubPattern': def lock(self: S) -> S:
"""
Scale the subpattern by a factor
Args:
c: scaling factor
Returns:
self
"""
self.scale *= c
return self
def copy(self) -> 'SubPattern':
"""
Return a shallow copy of the subpattern.
Returns:
`copy.copy(self)`
"""
return copy.copy(self)
def deepcopy(self) -> 'SubPattern':
"""
Return a deep copy of the subpattern.
Returns:
`copy.deepcopy(self)`
"""
return copy.deepcopy(self)
def lock(self) -> 'SubPattern':
""" """
Lock the SubPattern, disallowing changes Lock the SubPattern, disallowing changes
Returns: Returns:
self self
""" """
self.offset.flags.writeable = False
self.mirrored.flags.writeable = False self.mirrored.flags.writeable = False
object.__setattr__(self, 'locked', True) PositionableImpl._lock(self)
LockableImpl.lock(self)
return self return self
def unlock(self) -> 'SubPattern': def unlock(self: S) -> S:
""" """
Unlock the SubPattern Unlock the SubPattern
Returns: Returns:
self self
""" """
self.offset.flags.writeable = True LockableImpl.unlock(self)
PositionableImpl._unlock(self)
self.mirrored.flags.writeable = True self.mirrored.flags.writeable = True
object.__setattr__(self, 'locked', False)
return self return self
def deeplock(self) -> 'SubPattern': def deeplock(self: S) -> S:
""" """
Recursively lock the SubPattern and its contained pattern Recursively lock the SubPattern and its contained pattern
@ -338,7 +215,7 @@ class SubPattern:
self.pattern.deeplock() self.pattern.deeplock()
return self return self
def deepunlock(self) -> 'SubPattern': def deepunlock(self: S) -> S:
""" """
Recursively unlock the SubPattern and its contained pattern Recursively unlock the SubPattern and its contained pattern
@ -361,6 +238,3 @@ class SubPattern:
dose = f' d{self.dose:g}' if self.dose != 1 else '' dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else '' locked = ' L' if self.locked else ''
return f'<SubPattern "{name}" at {self.offset}{rotation}{scale}{mirrored}{dose}{locked}>' return f'<SubPattern "{name}" at {self.offset}{rotation}{scale}{mirrored}{dose}{locked}>'
subpattern_t = Union[SubPattern, GridRepetition]

13
masque/traits/__init__.py Normal file
View File

@ -0,0 +1,13 @@
"""
Traits (mixins) and default implementations
"""
from .positionable import Positionable, PositionableImpl
from .layerable import Layerable, LayerableImpl
from .doseable import Doseable, DoseableImpl
from .rotatable import Rotatable, RotatableImpl, Pivotable, PivotableImpl
from .repeatable import Repeatable, RepeatableImpl
from .scalable import Scalable, ScalableImpl
from .mirrorable import Mirrorable
from .copyable import Copyable
from .lockable import Lockable, LockableImpl
from .annotatable import Annotatable, AnnotatableImpl

View File

@ -0,0 +1,55 @@
from typing import TypeVar
#from types import MappingProxyType
from abc import ABCMeta, abstractmethod
from ..utils import annotations_t
from ..error import PatternError
T = TypeVar('T', bound='Annotatable')
I = TypeVar('I', bound='AnnotatableImpl')
class Annotatable(metaclass=ABCMeta):
"""
Abstract class for all annotatable entities
Annotations correspond to GDS/OASIS "properties"
"""
__slots__ = ()
'''
---- Properties
'''
@property
@abstractmethod
def annotations(self) -> annotations_t:
"""
Dictionary mapping annotation names to values
"""
pass
class AnnotatableImpl(Annotatable, metaclass=ABCMeta):
"""
Simple implementation of `Annotatable`.
"""
__slots__ = ()
_annotations: annotations_t
""" Dictionary storing annotation name/value pairs """
'''
---- Non-abstract properties
'''
@property
def annotations(self) -> annotations_t:
return self._annotations
# # TODO: Find a way to make sure the subclass implements Lockable without dealing with diamond inheritance or this extra hasattr
# if hasattr(self, 'is_locked') and self.is_locked():
# return MappingProxyType(self._annotations)
@annotations.setter
def annotations(self, annotations: annotations_t):
if not isinstance(annotations, dict):
raise PatternError(f'annotations expected dict, got {type(annotations)}')
self._annotations = annotations

34
masque/traits/copyable.py Normal file
View File

@ -0,0 +1,34 @@
from typing import TypeVar
from abc import ABCMeta
import copy
T = TypeVar('T', bound='Copyable')
class Copyable(metaclass=ABCMeta):
"""
Abstract class which adds .copy() and .deepcopy()
"""
__slots__ = ()
'''
---- Non-abstract methods
'''
def copy(self: T) -> T:
"""
Return a shallow copy of the object.
Returns:
`copy.copy(self)`
"""
return copy.copy(self)
def deepcopy(self: T) -> T:
"""
Return a deep copy of the object.
Returns:
`copy.deepcopy(self)`
"""
return copy.deepcopy(self)

76
masque/traits/doseable.py Normal file
View File

@ -0,0 +1,76 @@
from typing import TypeVar
from abc import ABCMeta, abstractmethod
from ..error import PatternError
T = TypeVar('T', bound='Doseable')
I = TypeVar('I', bound='DoseableImpl')
class Doseable(metaclass=ABCMeta):
"""
Abstract class for all doseable entities
"""
__slots__ = ()
'''
---- Properties
'''
@property
@abstractmethod
def dose(self) -> float:
"""
Dose (float >= 0)
"""
pass
# @dose.setter
# @abstractmethod
# def dose(self, val: float):
# pass
'''
---- Methods
'''
def set_dose(self: T, dose: float) -> T:
"""
Set the dose
Args:
dose: new value for dose
Returns:
self
"""
pass
class DoseableImpl(Doseable, metaclass=ABCMeta):
"""
Simple implementation of Doseable
"""
__slots__ = ()
_dose: float
""" Dose """
'''
---- Non-abstract properties
'''
@property
def dose(self) -> float:
return self._dose
@dose.setter
def dose(self, val: float):
if not val >= 0:
raise PatternError('Dose must be non-negative')
self._dose = val
'''
---- Non-abstract methods
'''
def set_dose(self: I, dose: float) -> I:
self.dose = dose
return self

View File

@ -0,0 +1,73 @@
from typing import TypeVar
from abc import ABCMeta, abstractmethod
from ..utils import layer_t
T = TypeVar('T', bound='Layerable')
I = TypeVar('I', bound='LayerableImpl')
class Layerable(metaclass=ABCMeta):
"""
Abstract class for all layerable entities
"""
__slots__ = ()
'''
---- Properties
'''
@property
@abstractmethod
def layer(self) -> layer_t:
"""
Layer number or name (int, tuple of ints, or string)
"""
pass
# @layer.setter
# @abstractmethod
# def layer(self, val: layer_t):
# pass
'''
---- Methods
'''
def set_layer(self: T, layer: layer_t) -> T:
"""
Set the layer
Args:
layer: new value for layer
Returns:
self
"""
pass
class LayerableImpl(Layerable, metaclass=ABCMeta):
"""
Simple implementation of Layerable
"""
__slots__ = ()
_layer: layer_t
""" Layer number, pair, or name """
'''
---- Non-abstract properties
'''
@property
def layer(self) -> layer_t:
return self._layer
@layer.setter
def layer(self, val: layer_t):
self._layer = val
'''
---- Non-abstract methods
'''
def set_layer(self: I, layer: layer_t) -> I:
self.layer = layer
return self

93
masque/traits/lockable.py Normal file
View File

@ -0,0 +1,93 @@
from typing import TypeVar
from abc import ABCMeta, abstractmethod
from ..error import PatternLockedError
T = TypeVar('T', bound='Lockable')
I = TypeVar('I', bound='LockableImpl')
class Lockable(metaclass=ABCMeta):
"""
Abstract class for all lockable entities
"""
__slots__ = ()
'''
---- Methods
'''
@abstractmethod
def lock(self: T) -> T:
"""
Lock the object, disallowing further changes
Returns:
self
"""
pass
@abstractmethod
def unlock(self: T) -> T:
"""
Unlock the object, reallowing changes
Returns:
self
"""
pass
@abstractmethod
def is_locked(self) -> bool:
"""
Returns:
True if the object is locked
"""
pass
def set_locked(self: T, locked: bool) -> T:
"""
Locks or unlocks based on the argument.
No action if already in the requested state.
Args:
locked: State to set.
Returns:
self
"""
if locked != self.is_locked():
if locked:
self.lock()
else:
self.unlock()
return self
class LockableImpl(Lockable, metaclass=ABCMeta):
"""
Simple implementation of Lockable
"""
__slots__ = ()
locked: bool
""" If `True`, disallows changes to the object """
'''
---- Non-abstract methods
'''
def __setattr__(self, name, value):
if self.locked and name != 'locked':
raise PatternLockedError()
object.__setattr__(self, name, value)
def lock(self: I) -> I:
object.__setattr__(self, 'locked', True)
return self
def unlock(self: I) -> I:
object.__setattr__(self, 'locked', False)
return self
def is_locked(self) -> bool:
return self.locked

View File

@ -0,0 +1,58 @@
from typing import TypeVar
from abc import ABCMeta, abstractmethod
T = TypeVar('T', bound='Mirrorable')
#I = TypeVar('I', bound='MirrorableImpl')
class Mirrorable(metaclass=ABCMeta):
"""
Abstract class for all mirrorable entities
"""
__slots__ = ()
'''
---- Abstract methods
'''
@abstractmethod
def mirror(self: T, axis: int) -> T:
"""
Mirror the entity across an axis.
Args:
axis: Axis to mirror across.
Returns:
self
"""
pass
#class MirrorableImpl(Mirrorable, metaclass=ABCMeta):
# """
# Simple implementation of `Mirrorable`
# """
# __slots__ = ()
#
# _mirrored: numpy.ndarray # ndarray[bool]
# """ Whether to mirror the instance across the x and/or y axes. """
#
# '''
# ---- Properties
# '''
# # Mirrored property
# @property
# def mirrored(self) -> numpy.ndarray: # ndarray[bool]
# """ Whether to mirror across the [x, y] axes, respectively """
# return self._mirrored
#
# @mirrored.setter
# def mirrored(self, val: Sequence[bool]):
# if is_scalar(val):
# raise PatternError('Mirrored must be a 2-element list of booleans')
# self._mirrored = numpy.array(val, dtype=bool, copy=True)
#
# '''
# ---- Methods
# '''

View File

@ -0,0 +1,132 @@
# TODO top-level comment about how traits should set __slots__ = (), and how to use AutoSlots
from typing import TypeVar
from abc import ABCMeta, abstractmethod
import numpy # type: ignore
from ..error import PatternError
from ..utils import vector2
T = TypeVar('T', bound='Positionable')
I = TypeVar('I', bound='PositionableImpl')
class Positionable(metaclass=ABCMeta):
"""
Abstract class for all positionable entities
"""
__slots__ = ()
'''
---- Abstract properties
'''
@property
@abstractmethod
def offset(self) -> numpy.ndarray:
"""
[x, y] offset
"""
pass
# @offset.setter
# @abstractmethod
# def offset(self, val: vector2):
# pass
'''
--- Abstract methods
'''
@abstractmethod
def get_bounds(self) -> numpy.ndarray:
"""
Returns `[[x_min, y_min], [x_max, y_max]]` which specify a minimal bounding box for the entity.
"""
pass
@abstractmethod
def set_offset(self: T, offset: vector2) -> T:
"""
Set the offset
Args:
offset: [x_offset, y,offset]
Returns:
self
"""
pass
@abstractmethod
def translate(self: T, offset: vector2) -> T:
"""
Translate the entity by the given offset
Args:
offset: [x_offset, y,offset]
Returns:
self
"""
pass
class PositionableImpl(Positionable, metaclass=ABCMeta):
"""
Simple implementation of Positionable
"""
__slots__ = ()
_offset: numpy.ndarray
""" `[x_offset, y_offset]` """
'''
---- Properties
'''
# offset property
@property
def offset(self) -> numpy.ndarray:
"""
[x, y] offset
"""
return self._offset
@offset.setter
def offset(self, val: vector2):
if not isinstance(val, numpy.ndarray) or val.dtype != numpy.float64:
val = numpy.array(val, dtype=float)
if val.size != 2:
raise PatternError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten()
'''
---- Methods
'''
def set_offset(self: I, offset: vector2) -> I:
self.offset = offset
return self
def translate(self: I, offset: vector2) -> I:
self._offset += offset
return self
def _lock(self: I) -> I:
"""
Lock the entity, disallowing further changes
Returns:
self
"""
self._offset.flags.writeable = False
return self
def _unlock(self: I) -> I:
"""
Unlock the entity
Returns:
self
"""
self._offset.flags.writeable = True
return self

View File

@ -0,0 +1,82 @@
from typing import TypeVar, Optional, TYPE_CHECKING
from abc import ABCMeta, abstractmethod
from ..error import PatternError
if TYPE_CHECKING:
from ..repetition import Repetition
T = TypeVar('T', bound='Repeatable')
I = TypeVar('I', bound='RepeatableImpl')
class Repeatable(metaclass=ABCMeta):
"""
Abstract class for all repeatable entities
"""
__slots__ = ()
'''
---- Properties
'''
@property
@abstractmethod
def repetition(self) -> Optional['Repetition']:
"""
Repetition object, or None (single instance only)
"""
pass
# @repetition.setter
# @abstractmethod
# def repetition(self, repetition: Optional['Repetition']):
# pass
'''
---- Methods
'''
@abstractmethod
def set_repetition(self: T, repetition: Optional['Repetition']) -> T:
"""
Set the repetition
Args:
repetition: new value for repetition, or None (single instance)
Returns:
self
"""
pass
class RepeatableImpl(Repeatable, metaclass=ABCMeta):
"""
Simple implementation of `Repeatable`
"""
__slots__ = ()
_repetition: Optional['Repetition']
""" Repetition object, or None (single instance only) """
'''
---- Non-abstract properties
'''
@property
def repetition(self) -> Optional['Repetition']:
return self._repetition
@repetition.setter
def repetition(self, repetition: Optional['Repetition']):
from ..repetition import Repetition
if repetition is not None and not isinstance(repetition, Repetition):
raise PatternError(f'{repetition} is not a valid Repetition object!')
self._repetition = repetition
'''
---- Non-abstract methods
'''
def set_repetition(self: I, repetition: Optional['Repetition']) -> I:
self.repetition = repetition
return self

119
masque/traits/rotatable.py Normal file
View File

@ -0,0 +1,119 @@
from typing import TypeVar
from abc import ABCMeta, abstractmethod
import numpy # type: ignore
from numpy import pi
#from .positionable import Positionable
from ..error import PatternError
from ..utils import is_scalar, rotation_matrix_2d, vector2
T = TypeVar('T', bound='Rotatable')
I = TypeVar('I', bound='RotatableImpl')
P = TypeVar('P', bound='Pivotable')
J = TypeVar('J', bound='PivotableImpl')
class Rotatable(metaclass=ABCMeta):
"""
Abstract class for all rotatable entities
"""
__slots__ = ()
'''
---- Abstract methods
'''
@abstractmethod
def rotate(self: T, theta: float) -> T:
"""
Rotate the shape around its origin (0, 0), ignoring its offset.
Args:
theta: Angle to rotate by (counterclockwise, radians)
Returns:
self
"""
pass
class RotatableImpl(Rotatable, metaclass=ABCMeta):
"""
Simple implementation of `Rotatable`
"""
__slots__ = ()
_rotation: float
""" rotation for the object, radians counterclockwise """
'''
---- Properties
'''
@property
def rotation(self) -> float:
""" Rotation, radians counterclockwise """
return self._rotation
@rotation.setter
def rotation(self, val: float):
if not is_scalar(val):
raise PatternError('Rotation must be a scalar')
self._rotation = val % (2 * pi)
'''
---- Methods
'''
def rotate(self: I, rotation: float) -> I:
self.rotation += rotation
return self
def set_rotation(self: I, rotation: float) -> I:
"""
Set the rotation to a value
Args:
rotation: radians ccw
Returns:
self
"""
self.rotation = rotation
return self
class Pivotable(metaclass=ABCMeta):
"""
Abstract class for entites which can be rotated around a point.
This requires that they are `Positionable` but not necessarily `Rotatable` themselves.
"""
__slots__ = ()
@abstractmethod
def rotate_around(self: P, pivot: vector2, rotation: float) -> P:
"""
Rotate the object around a point.
Args:
pivot: Point (x, y) to rotate around
rotation: Angle to rotate by (counterclockwise, radians)
Returns:
self
"""
pass
class PivotableImpl(Pivotable, metaclass=ABCMeta):
"""
Implementation of `Pivotable` for objects which are `Rotatable`
"""
__slots__ = ()
def rotate_around(self: J, pivot: vector2, rotation: float) -> J:
pivot = numpy.array(pivot, dtype=float)
self.translate(-pivot)
self.rotate(rotation)
self.offset = numpy.dot(rotation_matrix_2d(rotation), self.offset)
self.translate(+pivot)
return self

77
masque/traits/scalable.py Normal file
View File

@ -0,0 +1,77 @@
from typing import TypeVar
from abc import ABCMeta, abstractmethod
from ..error import PatternError
from ..utils import is_scalar
T = TypeVar('T', bound='Scalable')
I = TypeVar('I', bound='ScalableImpl')
class Scalable(metaclass=ABCMeta):
"""
Abstract class for all scalable entities
"""
__slots__ = ()
'''
---- Abstract methods
'''
@abstractmethod
def scale_by(self: T, c: float) -> T:
"""
Scale the entity by a factor
Args:
c: scaling factor
Returns:
self
"""
pass
class ScalableImpl(Scalable, metaclass=ABCMeta):
"""
Simple implementation of Scalable
"""
__slots__ = ()
_scale: float
""" scale factor for the entity """
'''
---- Properties
'''
@property
def scale(self) -> float:
return self._scale
@scale.setter
def scale(self, val: float):
if not is_scalar(val):
raise PatternError('Scale must be a scalar')
if not val > 0:
raise PatternError('Scale must be positive')
self._scale = val
'''
---- Methods
'''
def scale_by(self: I, c: float) -> I:
self.scale *= c
return self
def set_scale(self: I, scale: float) -> I:
"""
Set the sclae to a value
Args:
scale: absolute scale factor
Returns:
self
"""
self.scale = scale
return self

View File

@ -1,14 +1,16 @@
""" """
Various helper functions Various helper functions
""" """
from typing import Any, Union, Tuple, Sequence, Dict, List
from abc import ABCMeta
from typing import Any, Union, Tuple, Sequence import numpy # type: ignore
import numpy
# Type definitions # Type definitions
vector2 = Union[numpy.ndarray, Tuple[float, float], Sequence[float]] vector2 = Union[numpy.ndarray, Tuple[float, float], Sequence[float]]
layer_t = Union[int, Tuple[int, int], str] layer_t = Union[int, Tuple[int, int], str]
annotations_t = Dict[str, List[Union[int, float, str]]]
def is_scalar(var: Any) -> bool: def is_scalar(var: Any) -> bool:
@ -82,7 +84,7 @@ def normalize_mirror(mirrored: Sequence[bool]) -> Tuple[bool, float]:
""" """
mirrored_x, mirrored_y = mirrored mirrored_x, mirrored_y = mirrored
mirror_x = (mirrored_x != mirrored_y) #XOR mirror_x = (mirrored_x != mirrored_y) # XOR
angle = numpy.pi if mirrored_y else 0 angle = numpy.pi if mirrored_y else 0
return mirror_x, angle return mirror_x, angle
@ -122,8 +124,8 @@ def remove_colinear_vertices(vertices: numpy.ndarray, closed_path: bool = True)
# Check for dx0/dy0 == dx1/dy1 # Check for dx0/dy0 == dx1/dy1
dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...] dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...]
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] #[[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dy0]] dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] # [[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dy0]]
dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0] dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0]
err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40 err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40
@ -133,3 +135,30 @@ def remove_colinear_vertices(vertices: numpy.ndarray, closed_path: bool = True)
slopes_equal[[0, -1]] = False slopes_equal[[0, -1]] = False
return vertices[~slopes_equal] return vertices[~slopes_equal]
class AutoSlots(ABCMeta):
"""
Metaclass for automatically generating __slots__ based on superclass type annotations.
Superclasses must set `__slots__ = ()` to make this work properly.
This is a workaround for the fact that non-empty `__slots__` can't be used
with multiple inheritance. Since we only use multiple inheritance with abstract
classes, they can have empty `__slots__` and their attribute type annotations
can be used to generate a full `__slots__` for the concrete class.
"""
def __new__(cls, name, bases, dctn):
parents = set()
for base in bases:
parents |= set(base.mro())
slots = tuple(dctn.get('__slots__', tuple()))
for parent in parents:
if not hasattr(parent, '__annotations__'):
continue
slots += tuple(getattr(parent, '__annotations__').keys())
dctn['__slots__'] = slots
return super().__new__(cls, name, bases, dctn)

View File

@ -2,6 +2,7 @@
from setuptools import setup, find_packages from setuptools import setup, find_packages
with open('README.md', 'r') as f: with open('README.md', 'r') as f:
long_description = f.read() long_description = f.read()
@ -18,15 +19,20 @@ setup(name='masque',
url='https://mpxd.net/code/jan/masque', url='https://mpxd.net/code/jan/masque',
packages=find_packages(), packages=find_packages(),
package_data={ package_data={
'masque': ['VERSION'] 'masque': ['VERSION',
'py.typed',
]
}, },
install_requires=[ install_requires=[
'numpy', 'numpy',
], ],
extras_require={ extras_require={
'visualization': ['matplotlib'],
'gdsii': ['python-gdsii'], 'gdsii': ['python-gdsii'],
'klamath': ['klamath'],
'oasis': ['fatamorgana>=0.7'],
'dxf': ['ezdxf'],
'svg': ['svgwrite'], 'svg': ['svgwrite'],
'visualization': ['matplotlib'],
'text': ['freetype-py', 'matplotlib'], 'text': ['freetype-py', 'matplotlib'],
}, },
classifiers=[ classifiers=[
@ -37,10 +43,42 @@ setup(name='masque',
'Intended Audience :: Manufacturing', 'Intended Audience :: Manufacturing',
'Intended Audience :: Science/Research', 'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3', 'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Operating System :: Microsoft :: Windows',
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)', 'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
'Topic :: Scientific/Engineering :: Visualization', 'Topic :: Scientific/Engineering :: Visualization',
], ],
keywords=[
'layout',
'design',
'CAD',
'EDA',
'electronics',
'photonics',
'IC',
'mask',
'pattern',
'drawing',
'lithography',
'litho',
'geometry',
'geometric',
'polygon',
'curve',
'ellipse',
'oas',
'gds',
'dxf',
'svg',
'OASIS',
'gdsii',
'gds2',
'convert',
'stream',
'custom',
'visualize',
'vector',
'freeform',
'manhattan',
'angle',
],
) )