snapshot 2020-05-18 04:34:55.303040
This commit is contained in:
commit
e08c754b35
13
.gitignore
vendored
13
.gitignore
vendored
@ -1,6 +1,19 @@
|
||||
*.pyc
|
||||
__pycache__
|
||||
|
||||
*.idea
|
||||
|
||||
build/
|
||||
dist/
|
||||
*.egg-info/
|
||||
.mypy_cache/
|
||||
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
*.gds
|
||||
*.gds.gz
|
||||
*.svg
|
||||
*.oas
|
||||
*.dxf
|
||||
*.dxf.gz
|
||||
|
@ -1,2 +1,3 @@
|
||||
include README.md
|
||||
include LICENSE.md
|
||||
include masque/VERSION
|
||||
|
14
README.md
14
README.md
@ -15,15 +15,15 @@ E-beam doses, and the ability to output to multiple formats.
|
||||
Requirements:
|
||||
* python >= 3.5 (written and tested with 3.6)
|
||||
* numpy
|
||||
* matplotlib (optional, used for visualization functions and text)
|
||||
* python-gdsii (optional, used for gdsii i/o)
|
||||
* svgwrite (optional, used for svg output)
|
||||
* freetype (optional, used for text)
|
||||
* matplotlib (optional, used for `visualization` functions and `text`)
|
||||
* python-gdsii (optional, used for `gdsii` i/o)
|
||||
* svgwrite (optional, used for `svg` output)
|
||||
* freetype (optional, used for `text`)
|
||||
|
||||
|
||||
Install with pip:
|
||||
```bash
|
||||
pip3 install masque
|
||||
pip3 install 'masque[visualization,gdsii,svg,text]'
|
||||
```
|
||||
|
||||
Alternatively, install from git
|
||||
@ -33,11 +33,7 @@ pip3 install git+https://mpxd.net/code/jan/masque.git@release
|
||||
|
||||
## TODO
|
||||
|
||||
* Mirroring
|
||||
* Polygon de-embedding
|
||||
|
||||
### Maybe
|
||||
|
||||
* Construct from bitmap
|
||||
* Boolean operations on polygons (using pyclipper)
|
||||
* Output to OASIS (using fatamorgana)
|
||||
|
@ -4,6 +4,7 @@ import numpy
|
||||
|
||||
import masque
|
||||
import masque.file.gdsii
|
||||
import masque.file.dxf
|
||||
from masque import shapes
|
||||
|
||||
|
||||
@ -13,20 +14,22 @@ def main():
|
||||
pat.shapes.append(shapes.Arc(
|
||||
radii=(rmin, rmin),
|
||||
width=0.1,
|
||||
angles=(-numpy.pi/4, numpy.pi/4)
|
||||
angles=(-numpy.pi/4, numpy.pi/4),
|
||||
layer=(0, 0),
|
||||
))
|
||||
|
||||
pat.labels.append(masque.Label(string='grating centerline', offset=(1, 0), layer=(1, 2)))
|
||||
|
||||
pat.scale_by(1000)
|
||||
# pat.visualize()
|
||||
pat2 = masque.Pattern(name='p2')
|
||||
pat2.name = 'ellip_grating'
|
||||
pat2 = pat.copy()
|
||||
pat2.name = 'grating2'
|
||||
|
||||
pat2.subpatterns += [
|
||||
masque.SubPattern(pattern=pat, offset=(20e3, 0)),
|
||||
masque.SubPattern(pattern=pat, offset=(0, 20e3)),
|
||||
]
|
||||
masque.file.gdsii.writefile((pat, pat2), 'out.gds.gz', 1e-9, 1e-3)
|
||||
|
||||
masque.file.gdsii.write_dose2dtype((pat, pat2, pat2.copy(), pat2.copy()), 'out.gds', 1e-9, 1e-3)
|
||||
masque.file.dxf.writefile(pat, 'out.dxf.gz')
|
||||
dxf, info = masque.file.dxf.readfile('out.dxf.gz')
|
||||
masque.file.dxf.writefile(dxf, 'reout.dxf.gz')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
96
examples/test_rep.py
Normal file
96
examples/test_rep.py
Normal file
@ -0,0 +1,96 @@
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
import masque
|
||||
import masque.file.gdsii
|
||||
import masque.file.dxf
|
||||
from masque import shapes, Pattern, SubPattern, GridRepetition
|
||||
|
||||
from pprint import pprint
|
||||
|
||||
|
||||
def main():
|
||||
pat = masque.Pattern(name='ellip_grating')
|
||||
for rmin in numpy.arange(10, 15, 0.5):
|
||||
pat.shapes.append(shapes.Arc(
|
||||
radii=(rmin, rmin),
|
||||
width=0.1,
|
||||
angles=(0*-numpy.pi/4, numpy.pi/4)
|
||||
))
|
||||
|
||||
pat.scale_by(1000)
|
||||
pat.visualize()
|
||||
pat2 = pat.copy()
|
||||
pat2.name = 'grating2'
|
||||
|
||||
pat3 = Pattern('sref_test')
|
||||
pat3.subpatterns = [
|
||||
SubPattern(pat, offset=(1e5, 3e5)),
|
||||
SubPattern(pat, offset=(2e5, 3e5), rotation=pi/3),
|
||||
SubPattern(pat, offset=(3e5, 3e5), rotation=pi/2),
|
||||
SubPattern(pat, offset=(4e5, 3e5), rotation=pi),
|
||||
SubPattern(pat, offset=(5e5, 3e5), rotation=3*pi/2),
|
||||
SubPattern(pat, mirrored=(True, False), offset=(1e5, 4e5)),
|
||||
SubPattern(pat, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3),
|
||||
SubPattern(pat, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2),
|
||||
SubPattern(pat, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi),
|
||||
SubPattern(pat, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2),
|
||||
SubPattern(pat, mirrored=(False, True), offset=(1e5, 5e5)),
|
||||
SubPattern(pat, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3),
|
||||
SubPattern(pat, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2),
|
||||
SubPattern(pat, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi),
|
||||
SubPattern(pat, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2),
|
||||
SubPattern(pat, mirrored=(True, True), offset=(1e5, 6e5)),
|
||||
SubPattern(pat, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3),
|
||||
SubPattern(pat, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2),
|
||||
SubPattern(pat, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi),
|
||||
SubPattern(pat, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2),
|
||||
]
|
||||
|
||||
pprint(pat3)
|
||||
pprint(pat3.subpatterns)
|
||||
pprint(pat.shapes)
|
||||
|
||||
args = {
|
||||
'pattern': pat,
|
||||
'a_vector': [1e4, 0],
|
||||
'b_vector': [0, 1.5e4],
|
||||
'a_count': 3,
|
||||
'b_count': 2,
|
||||
}
|
||||
pat4 = Pattern('aref_test')
|
||||
pat4.subpatterns = [
|
||||
GridRepetition(**args, offset=(1e5, 3e5)),
|
||||
GridRepetition(**args, offset=(2e5, 3e5), rotation=pi/3),
|
||||
GridRepetition(**args, offset=(3e5, 3e5), rotation=pi/2),
|
||||
GridRepetition(**args, offset=(4e5, 3e5), rotation=pi),
|
||||
GridRepetition(**args, offset=(5e5, 3e5), rotation=3*pi/2),
|
||||
GridRepetition(**args, mirrored=(True, False), offset=(1e5, 4e5)),
|
||||
GridRepetition(**args, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3),
|
||||
GridRepetition(**args, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2),
|
||||
GridRepetition(**args, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi),
|
||||
GridRepetition(**args, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2),
|
||||
GridRepetition(**args, mirrored=(False, True), offset=(1e5, 5e5)),
|
||||
GridRepetition(**args, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3),
|
||||
GridRepetition(**args, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2),
|
||||
GridRepetition(**args, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi),
|
||||
GridRepetition(**args, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2),
|
||||
GridRepetition(**args, mirrored=(True, True), offset=(1e5, 6e5)),
|
||||
GridRepetition(**args, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3),
|
||||
GridRepetition(**args, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2),
|
||||
GridRepetition(**args, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi),
|
||||
GridRepetition(**args, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2),
|
||||
]
|
||||
|
||||
masque.file.gdsii.writefile((pat, pat2, pat3, pat4), 'rep.gds.gz', 1e-9, 1e-3)
|
||||
|
||||
cells = list(masque.file.gdsii.readfile('rep.gds.gz')[0].values())
|
||||
masque.file.gdsii.writefile(cells, 'rerep.gds.gz', 1e-9, 1e-3)
|
||||
|
||||
masque.file.dxf.writefile(pat4, 'rep.dxf.gz')
|
||||
dxf, info = masque.file.dxf.readfile('rep.dxf.gz')
|
||||
masque.file.dxf.writefile(dxf, 'rerep.dxf.gz')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
1
masque/VERSION
Normal file
1
masque/VERSION
Normal file
@ -0,0 +1 @@
|
||||
1.3
|
@ -6,31 +6,38 @@
|
||||
with some vectorized element types (eg. circles, not just polygons), better support for
|
||||
E-beam doses, and the ability to output to multiple formats.
|
||||
|
||||
Pattern is a basic object containing a 2D lithography mask, composed of a list of Shape
|
||||
objects and a list of SubPattern objects.
|
||||
`Pattern` is a basic object containing a 2D lithography mask, composed of a list of `Shape`
|
||||
objects, a list of `Label` objects, and a list of references to other `Patterns` (using
|
||||
`SubPattern` and `GridRepetition`).
|
||||
|
||||
SubPattern provides basic support for nesting Pattern objects within each other, by adding
|
||||
`SubPattern` provides basic support for nesting `Pattern` objects within each other, by adding
|
||||
offset, rotation, scaling, and other such properties to a Pattern reference.
|
||||
|
||||
`GridRepetition` provides support for nesting regular arrays of `Pattern` objects.
|
||||
|
||||
Note that the methods for these classes try to avoid copying wherever possible, so unless
|
||||
otherwise noted, assume that arguments are stored by-reference.
|
||||
|
||||
|
||||
Dependencies:
|
||||
- numpy
|
||||
- matplotlib [Pattern.visualize(...)]
|
||||
- python-gdsii [masque.file.gdsii]
|
||||
- svgwrite [masque.file.svg]
|
||||
- `numpy`
|
||||
- `matplotlib` [Pattern.visualize(...)]
|
||||
- `python-gdsii` [masque.file.gdsii]
|
||||
- `svgwrite` [masque.file.svg]
|
||||
"""
|
||||
|
||||
from .error import PatternError
|
||||
import pathlib
|
||||
|
||||
from .error import PatternError, PatternLockedError
|
||||
from .shapes import Shape
|
||||
from .label import Label
|
||||
from .subpattern import SubPattern
|
||||
from .subpattern import SubPattern, subpattern_t
|
||||
from .repetition import GridRepetition
|
||||
from .pattern import Pattern
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
version = '0.5'
|
||||
with open(pathlib.Path(__file__).parent / 'VERSION', 'r') as f:
|
||||
__version__ = f.read().strip()
|
||||
version = __version__
|
||||
|
@ -7,3 +7,11 @@ class PatternError(Exception):
|
||||
|
||||
def __str__(self):
|
||||
return repr(self.value)
|
||||
|
||||
|
||||
class PatternLockedError(PatternError):
|
||||
"""
|
||||
Exception raised when trying to modify a locked pattern
|
||||
"""
|
||||
def __init__(self):
|
||||
PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape')
|
||||
|
382
masque/file/dxf.py
Normal file
382
masque/file/dxf.py
Normal file
@ -0,0 +1,382 @@
|
||||
"""
|
||||
DXF file format readers and writers
|
||||
"""
|
||||
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional
|
||||
import re
|
||||
import io
|
||||
import copy
|
||||
import base64
|
||||
import struct
|
||||
import logging
|
||||
import pathlib
|
||||
import gzip
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
import ezdxf
|
||||
|
||||
from .utils import mangle_name, make_dose_table
|
||||
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t
|
||||
from ..shapes import Polygon, Path
|
||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
||||
from ..utils import remove_colinear_vertices, normalize_mirror
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logger.warning('DXF support is experimental and only slightly tested!')
|
||||
|
||||
|
||||
DEFAULT_LAYER = 'DEFAULT'
|
||||
|
||||
|
||||
def write(pattern: Pattern,
|
||||
stream: io.TextIOBase,
|
||||
modify_originals: bool = False,
|
||||
dxf_version='AC1024',
|
||||
disambiguate_func: Callable[[Iterable[Pattern]], None] = None):
|
||||
"""
|
||||
Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes
|
||||
into polygons, and then writing patterns as DXF `Block`s, polygons as `LWPolyline`s,
|
||||
and subpatterns as `Insert`s.
|
||||
|
||||
The top level pattern's name is not written to the DXF file. Nested patterns keep their
|
||||
names.
|
||||
|
||||
Layer numbers are translated as follows:
|
||||
int: 1 -> '1'
|
||||
tuple: (1, 2) -> '1.2'
|
||||
str: '1.2' -> '1.2' (no change)
|
||||
|
||||
It is often a good idea to run `pattern.subpatternize()` prior to calling this function,
|
||||
especially if calling `.polygonize()` will result in very many vertices.
|
||||
|
||||
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
|
||||
prior to calling this function.
|
||||
|
||||
Only `GridRepetition` objects with manhattan basis vectors are preserved as arrays. Since DXF
|
||||
rotations apply to basis vectors while `masque`'s rotations do not, the basis vectors of an
|
||||
array with rotated instances must be manhattan _after_ having a compensating rotation applied.
|
||||
|
||||
Args:
|
||||
patterns: A Pattern or list of patterns to write to the stream.
|
||||
stream: Stream object to write to.
|
||||
modify_original: If `True`, the original pattern is modified as part of the writing
|
||||
process. Otherwise, a copy is made and `deepunlock()`-ed.
|
||||
Default `False`.
|
||||
disambiguate_func: Function which takes a list of patterns and alters them
|
||||
to make their names valid and unique. Default is `disambiguate_pattern_names`.
|
||||
WARNING: No additional error checking is performed on the results.
|
||||
"""
|
||||
#TODO consider supporting DXF arcs?
|
||||
if disambiguate_func is None:
|
||||
disambiguate_func = disambiguate_pattern_names
|
||||
|
||||
if not modify_originals:
|
||||
pattern = pattern.deepcopy().deepunlock()
|
||||
|
||||
# Get a dict of id(pattern) -> pattern
|
||||
patterns_by_id = pattern.referenced_patterns_by_id()
|
||||
disambiguate_func(patterns_by_id.values())
|
||||
|
||||
# Create library
|
||||
lib = ezdxf.new(dxf_version, setup=True)
|
||||
msp = lib.modelspace()
|
||||
_shapes_to_elements(msp, pattern.shapes)
|
||||
_labels_to_texts(msp, pattern.labels)
|
||||
_subpatterns_to_refs(msp, pattern.subpatterns)
|
||||
|
||||
# Now create a block for each referenced pattern, and add in any shapes
|
||||
for pat in patterns_by_id.values():
|
||||
assert(pat is not None)
|
||||
block = lib.blocks.new(name=pat.name)
|
||||
|
||||
_shapes_to_elements(block, pat.shapes)
|
||||
_labels_to_texts(block, pat.labels)
|
||||
_subpatterns_to_refs(block, pat.subpatterns)
|
||||
|
||||
lib.write(stream)
|
||||
|
||||
|
||||
def writefile(pattern: Pattern,
|
||||
filename: Union[str, pathlib.Path],
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Wrapper for `dxf.write()` that takes a filename or path instead of a stream.
|
||||
|
||||
Will automatically compress the file if it has a .gz suffix.
|
||||
|
||||
Args:
|
||||
pattern: `Pattern` to save
|
||||
filename: Filename to save to.
|
||||
*args: passed to `dxf.write`
|
||||
**kwargs: passed to `dxf.write`
|
||||
"""
|
||||
path = pathlib.Path(filename)
|
||||
if path.suffix == '.gz':
|
||||
open_func: Callable = gzip.open
|
||||
else:
|
||||
open_func = open
|
||||
|
||||
with open_func(path, mode='wt') as stream:
|
||||
results = write(pattern, stream, *args, **kwargs)
|
||||
return results
|
||||
|
||||
|
||||
def readfile(filename: Union[str, pathlib.Path],
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
||||
"""
|
||||
Wrapper for `dxf.read()` that takes a filename or path instead of a stream.
|
||||
|
||||
Will automatically decompress files with a .gz suffix.
|
||||
|
||||
Args:
|
||||
filename: Filename to save to.
|
||||
*args: passed to `dxf.read`
|
||||
**kwargs: passed to `dxf.read`
|
||||
"""
|
||||
path = pathlib.Path(filename)
|
||||
if path.suffix == '.gz':
|
||||
open_func: Callable = gzip.open
|
||||
else:
|
||||
open_func = open
|
||||
|
||||
with open_func(path, mode='rt') as stream:
|
||||
results = read(stream, *args, **kwargs)
|
||||
return results
|
||||
|
||||
|
||||
def read(stream: io.TextIOBase,
|
||||
clean_vertices: bool = True,
|
||||
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
||||
"""
|
||||
Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are
|
||||
translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s
|
||||
are translated into `SubPattern` objects.
|
||||
|
||||
If an object has no layer it is set to this module's `DEFAULT_LAYER` ("DEFAULT").
|
||||
|
||||
Args:
|
||||
stream: Stream to read from.
|
||||
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
|
||||
The cleaning process removes any polygons with zero area or <3 vertices.
|
||||
Default `True`.
|
||||
|
||||
Returns:
|
||||
- Top level pattern
|
||||
"""
|
||||
lib = ezdxf.read(stream)
|
||||
msp = lib.modelspace()
|
||||
|
||||
pat = _read_block(msp, clean_vertices)
|
||||
patterns = [pat] + [_read_block(bb, clean_vertices) for bb in lib.blocks if bb.name != '*Model_Space']
|
||||
|
||||
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
|
||||
# according to the subpattern.identifier (which is deleted after use).
|
||||
patterns_dict = dict(((p.name, p) for p in patterns))
|
||||
for p in patterns_dict.values():
|
||||
for sp in p.subpatterns:
|
||||
sp.pattern = patterns_dict[sp.identifier[0]]
|
||||
del sp.identifier
|
||||
|
||||
library_info = {
|
||||
'layers': [ll.dxfattribs() for ll in lib.layers]
|
||||
}
|
||||
|
||||
return pat, library_info
|
||||
|
||||
|
||||
def _read_block(block, clean_vertices):
|
||||
pat = Pattern(block.name)
|
||||
for element in block:
|
||||
eltype = element.dxftype()
|
||||
if eltype in ('POLYLINE', 'LWPOLYLINE'):
|
||||
if eltype == 'LWPOLYLINE':
|
||||
points = numpy.array(element.lwpoints)
|
||||
else:
|
||||
points = numpy.array(element.points)
|
||||
attr = element.dxfattribs()
|
||||
args = {'layer': attr.get('layer', DEFAULT_LAYER),
|
||||
}
|
||||
|
||||
if points.shape[1] == 2:
|
||||
shape = Polygon(**args)
|
||||
elif points.shape[1] > 2:
|
||||
if (points[0, 2] != points[:, 2]).any():
|
||||
raise PatternError('PolyLine has non-constant width (not yet representable in masque!)')
|
||||
elif points.shape[1] == 4 and (points[:, 3] != 0).any():
|
||||
raise PatternError('LWPolyLine has bulge (not yet representable in masque!)')
|
||||
else:
|
||||
width = points[0, 2]
|
||||
if width == 0:
|
||||
width = attr.get('const_width', 0)
|
||||
|
||||
if width == 0 and numpy.array_equal(points[0], points[-1]):
|
||||
shape = Polygon(**args, vertices=points[:-1, :2])
|
||||
else:
|
||||
shape = Path(**args, width=width, vertices=points[:, :2])
|
||||
|
||||
if clean_vertices:
|
||||
try:
|
||||
shape.clean_vertices()
|
||||
except PatternError:
|
||||
continue
|
||||
|
||||
pat.shapes.append(shape)
|
||||
|
||||
elif eltype in ('TEXT',):
|
||||
args = {'offset': element.get_pos()[1][:2],
|
||||
'layer': element.dxfattribs().get('layer', DEFAULT_LAYER),
|
||||
}
|
||||
string = element.dxfattribs().get('text', '')
|
||||
height = element.dxfattribs().get('height', 0)
|
||||
if height != 0:
|
||||
logger.warning('Interpreting DXF TEXT as a label despite nonzero height. '
|
||||
'This could be changed in the future by setting a font path in the masque DXF code.')
|
||||
pat.labels.append(Label(string=string, **args))
|
||||
# else:
|
||||
# pat.shapes.append(Text(string=string, height=height, font_path=????))
|
||||
elif eltype in ('INSERT',):
|
||||
attr = element.dxfattribs()
|
||||
xscale = attr.get('xscale', 1)
|
||||
yscale = attr.get('yscale', 1)
|
||||
if abs(xscale) != abs(yscale):
|
||||
logger.warning('Masque does not support per-axis scaling; using x-scaling only!')
|
||||
scale = abs(xscale)
|
||||
mirrored = (yscale < 0, xscale < 0)
|
||||
rotation = attr.get('rotation', 0) * pi/180
|
||||
|
||||
offset = attr.get('insert', (0, 0, 0))[:2]
|
||||
|
||||
args = {
|
||||
'offset': offset,
|
||||
'scale': scale,
|
||||
'mirrored': mirrored,
|
||||
'rotation': rotation,
|
||||
'pattern': None,
|
||||
'identifier': (attr.get('name', None),),
|
||||
}
|
||||
|
||||
if 'column_count' in attr:
|
||||
args['a_vector'] = (attr['column_spacing'], 0)
|
||||
args['b_vector'] = (0, attr['row_spacing'])
|
||||
args['a_count'] = attr['column_count']
|
||||
args['b_count'] = attr['row_count']
|
||||
pat.subpatterns.append(GridRepetition(**args))
|
||||
else:
|
||||
pat.subpatterns.append(SubPattern(**args))
|
||||
else:
|
||||
logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).')
|
||||
return pat
|
||||
|
||||
|
||||
def _subpatterns_to_refs(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace],
|
||||
subpatterns: List[subpattern_t]):
|
||||
for subpat in subpatterns:
|
||||
if subpat.pattern is None:
|
||||
continue
|
||||
encoded_name = subpat.pattern.name
|
||||
|
||||
rotation = (subpat.rotation * 180 / numpy.pi) % 360
|
||||
attribs = {
|
||||
'xscale': subpat.scale * (-1 if subpat.mirrored[1] else 1),
|
||||
'yscale': subpat.scale * (-1 if subpat.mirrored[0] else 1),
|
||||
'rotation': rotation,
|
||||
}
|
||||
|
||||
if isinstance(subpat, GridRepetition):
|
||||
a = subpat.a_vector
|
||||
b = subpat.b_vector if subpat.b_vector is not None else numpy.zeros(2)
|
||||
rotated_a = rotation_matrix_2d(-subpat.rotation) @ a
|
||||
rotated_b = rotation_matrix_2d(-subpat.rotation) @ b
|
||||
if rotated_a[1] == 0 and rotated_b[0] == 0:
|
||||
attribs['column_count'] = subpat.a_count
|
||||
attribs['row_count'] = subpat.b_count
|
||||
attribs['column_spacing'] = rotated_a[0]
|
||||
attribs['row_spacing'] = rotated_b[1]
|
||||
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs)
|
||||
elif rotated_a[0] == 0 and rotated_b[1] == 0:
|
||||
attribs['column_count'] = subpat.b_count
|
||||
attribs['row_count'] = subpat.a_count
|
||||
attribs['column_spacing'] = rotated_b[0]
|
||||
attribs['row_spacing'] = rotated_a[1]
|
||||
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs)
|
||||
else:
|
||||
#NOTE: We could still do non-manhattan (but still orthogonal) grids by getting
|
||||
# creative with counter-rotated nested patterns, but probably not worth it.
|
||||
# Instead, just break appart the grid into individual elements:
|
||||
for aa in numpy.arange(subpat.a_count):
|
||||
for bb in numpy.arange(subpat.b_count):
|
||||
block.add_blockref(encoded_name, subpat.offset + aa * a + bb * b, dxfattribs=attribs)
|
||||
else:
|
||||
block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs)
|
||||
|
||||
|
||||
def _shapes_to_elements(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace],
|
||||
shapes: List[Shape],
|
||||
polygonize_paths: bool = False):
|
||||
# Add `LWPolyline`s for each shape.
|
||||
# Could set do paths with width setting, but need to consider endcaps.
|
||||
for shape in shapes:
|
||||
attribs = {'layer': _mlayer2dxf(shape.layer)}
|
||||
for polygon in shape.to_polygons():
|
||||
xy_open = polygon.vertices + polygon.offset
|
||||
xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
|
||||
block.add_lwpolyline(xy_closed, dxfattribs=attribs)
|
||||
|
||||
|
||||
def _labels_to_texts(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace],
|
||||
labels: List[Label]):
|
||||
for label in labels:
|
||||
attribs = {'layer': _mlayer2dxf(label.layer)}
|
||||
xy = label.offset
|
||||
block.add_text(label.string, dxfattribs=attribs).set_pos(xy, align='BOTTOM_LEFT')
|
||||
|
||||
|
||||
def _mlayer2dxf(layer: layer_t) -> str:
|
||||
if isinstance(layer, str):
|
||||
return layer
|
||||
if isinstance(layer, int):
|
||||
return str(layer)
|
||||
if isinstance(layer, tuple):
|
||||
return f'{layer[0]}.{layer[1]}'
|
||||
raise PatternError(f'Unknown layer type: {layer} ({type(layer)})')
|
||||
|
||||
|
||||
def disambiguate_pattern_names(patterns,
|
||||
max_name_length: int = 32,
|
||||
suffix_length: int = 6,
|
||||
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name
|
||||
):
|
||||
used_names = []
|
||||
for pat in patterns:
|
||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name)
|
||||
|
||||
i = 0
|
||||
suffixed_name = sanitized_name
|
||||
while suffixed_name in used_names or suffixed_name == '':
|
||||
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
|
||||
|
||||
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
|
||||
i += 1
|
||||
|
||||
if sanitized_name == '':
|
||||
logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name))
|
||||
elif suffixed_name != sanitized_name:
|
||||
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
||||
logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format(
|
||||
pat.name, sanitized_name, suffixed_name))
|
||||
|
||||
if len(suffixed_name) == 0:
|
||||
# Should never happen since zero-length names are replaced
|
||||
raise PatternError('Zero-length name after sanitize,\n originally "{}"'.format(pat.name))
|
||||
if len(suffixed_name) > max_name_length:
|
||||
raise PatternError('Pattern name "{!r}" length > {} after encode,\n originally "{}"'.format(suffixed_name, max_name_length, pat.name))
|
||||
|
||||
pat.name = suffixed_name
|
||||
used_names.append(suffixed_name)
|
||||
|
@ -1,226 +1,188 @@
|
||||
"""
|
||||
GDSII file format readers and writers
|
||||
|
||||
Note that GDSII references follow the same convention as `masque`,
|
||||
with this order of operations:
|
||||
1. Mirroring
|
||||
2. Rotation
|
||||
3. Scaling
|
||||
4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets)
|
||||
|
||||
Scaling, rotation, and mirroring apply to individual instances, not grid
|
||||
vectors or offsets.
|
||||
"""
|
||||
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional
|
||||
import re
|
||||
import io
|
||||
import copy
|
||||
import numpy
|
||||
import base64
|
||||
import struct
|
||||
import logging
|
||||
import pathlib
|
||||
import gzip
|
||||
|
||||
# python-gdsii
|
||||
import gdsii.library
|
||||
import gdsii.structure
|
||||
import gdsii.elements
|
||||
|
||||
from typing import List, Any, Dict, Tuple
|
||||
import re
|
||||
import numpy
|
||||
import base64
|
||||
import struct
|
||||
import logging
|
||||
|
||||
from .utils import mangle_name, make_dose_table
|
||||
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape
|
||||
from .utils import mangle_name, make_dose_table, dose2dtype, dtype2dose
|
||||
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t
|
||||
from ..shapes import Polygon, Path
|
||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar
|
||||
from ..utils import remove_colinear_vertices
|
||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
||||
from ..utils import remove_colinear_vertices, normalize_mirror
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
#TODO absolute positioning
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def write(patterns: Pattern or List[Pattern],
|
||||
filename: str,
|
||||
path_cap_map = {
|
||||
None: Path.Cap.Flush,
|
||||
0: Path.Cap.Flush,
|
||||
1: Path.Cap.Circle,
|
||||
2: Path.Cap.Square,
|
||||
4: Path.Cap.SquareCustom,
|
||||
}
|
||||
|
||||
|
||||
def write(patterns: Union[Pattern, List[Pattern]],
|
||||
stream: io.BufferedIOBase,
|
||||
meters_per_unit: float,
|
||||
logical_units_per_unit: float = 1,
|
||||
library_name: str = 'masque-gdsii-write'):
|
||||
library_name: str = 'masque-gdsii-write',
|
||||
modify_originals: bool = False,
|
||||
disambiguate_func: Callable[[Iterable[Pattern]], None] = None):
|
||||
"""
|
||||
Write a Pattern or list of patterns to a GDSII file, by first calling
|
||||
.polygonize() to change the shapes into polygons, and then writing patterns
|
||||
Write a `Pattern` or list of patterns to a GDSII file, by first calling
|
||||
`.polygonize()` to change the shapes into polygons, and then writing patterns
|
||||
as GDSII structures, polygons as boundary elements, and subpatterns as structure
|
||||
references (sref).
|
||||
|
||||
For each shape,
|
||||
layer is chosen to be equal to shape.layer if it is an int,
|
||||
or shape.layer[0] if it is a tuple
|
||||
datatype is chosen to be shape.layer[1] if available,
|
||||
otherwise 0
|
||||
layer is chosen to be equal to `shape.layer` if it is an int,
|
||||
or `shape.layer[0]` if it is a tuple
|
||||
datatype is chosen to be `shape.layer[1]` if available,
|
||||
otherwise `0`
|
||||
|
||||
Note that this function modifies the Pattern.
|
||||
It is often a good idea to run `pattern.subpatternize()` prior to calling this function,
|
||||
especially if calling `.polygonize()` will result in very many vertices.
|
||||
|
||||
It is often a good idea to run pattern.subpatternize() prior to calling this function,
|
||||
especially if calling .polygonize() will result in very many vertices.
|
||||
|
||||
If you want pattern polygonized with non-default arguments, just call pattern.polygonize()
|
||||
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
|
||||
prior to calling this function.
|
||||
|
||||
:param patterns: A Pattern or list of patterns to write to file. Modified by this function.
|
||||
:param filename: Filename to write to.
|
||||
:param meters_per_unit: Written into the GDSII file, meters per (database) length unit.
|
||||
All distances are assumed to be an integer multiple of this unit, and are stored as such.
|
||||
:param logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
|
||||
"logical" unit which is different from the "database" unit, for display purposes.
|
||||
Default 1.
|
||||
:param library_name: Library name written into the GDSII file.
|
||||
Default 'masque-gdsii-write'.
|
||||
Args:
|
||||
patterns: A Pattern or list of patterns to write to the stream.
|
||||
stream: Stream object to write to.
|
||||
meters_per_unit: Written into the GDSII file, meters per (database) length unit.
|
||||
All distances are assumed to be an integer multiple of this unit, and are stored as such.
|
||||
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
|
||||
"logical" unit which is different from the "database" unit, for display purposes.
|
||||
Default `1`.
|
||||
library_name: Library name written into the GDSII file.
|
||||
Default 'masque-gdsii-write'.
|
||||
modify_originals: If `True`, the original pattern is modified as part of the writing
|
||||
process. Otherwise, a copy is made and `deepunlock()`-ed.
|
||||
Default `False`.
|
||||
disambiguate_func: Function which takes a list of patterns and alters them
|
||||
to make their names valid and unique. Default is `disambiguate_pattern_names`, which
|
||||
attempts to adhere to the GDSII standard as well as possible.
|
||||
WARNING: No additional error checking is performed on the results.
|
||||
"""
|
||||
if isinstance(patterns, Pattern):
|
||||
patterns = [patterns]
|
||||
|
||||
if disambiguate_func is None:
|
||||
disambiguate_func = disambiguate_pattern_names
|
||||
|
||||
if not modify_originals:
|
||||
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
|
||||
|
||||
# Create library
|
||||
lib = gdsii.library.Library(version=600,
|
||||
name=library_name.encode('ASCII'),
|
||||
logical_unit=logical_units_per_unit,
|
||||
physical_unit=meters_per_unit)
|
||||
|
||||
if isinstance(patterns, Pattern):
|
||||
patterns = [patterns]
|
||||
|
||||
# Get a dict of id(pattern) -> pattern
|
||||
patterns_by_id = {id(pattern): pattern for pattern in patterns}
|
||||
for pattern in patterns:
|
||||
patterns_by_id.update(pattern.referenced_patterns_by_id())
|
||||
for i, p in pattern.referenced_patterns_by_id().items():
|
||||
patterns_by_id[i] = p
|
||||
|
||||
_disambiguate_pattern_names(patterns_by_id.values())
|
||||
disambiguate_func(patterns_by_id.values())
|
||||
|
||||
# Now create a structure for each pattern, and add in any Boundary and SREF elements
|
||||
for pat in patterns_by_id.values():
|
||||
structure = gdsii.structure.Structure(name=pat.name)
|
||||
lib.append(structure)
|
||||
|
||||
# Add a Boundary element for each shape
|
||||
structure += _shapes_to_boundaries(pat.shapes)
|
||||
|
||||
structure += _shapes_to_elements(pat.shapes)
|
||||
structure += _labels_to_texts(pat.labels)
|
||||
|
||||
# Add an SREF / AREF for each subpattern entry
|
||||
structure += _subpatterns_to_refs(pat.subpatterns)
|
||||
|
||||
with open(filename, mode='wb') as stream:
|
||||
lib.save(stream)
|
||||
lib.save(stream)
|
||||
return
|
||||
|
||||
|
||||
def write_dose2dtype(patterns: Pattern or List[Pattern],
|
||||
filename: str,
|
||||
meters_per_unit: float,
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> List[float]:
|
||||
def writefile(patterns: Union[List[Pattern], Pattern],
|
||||
filename: Union[str, pathlib.Path],
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Write a Pattern or list of patterns to a GDSII file, by first calling
|
||||
.polygonize() to change the shapes into polygons, and then writing patterns
|
||||
as GDSII structures, polygons as boundary elements, and subpatterns as structure
|
||||
references (sref).
|
||||
Wrapper for `gdsii.write()` that takes a filename or path instead of a stream.
|
||||
|
||||
For each shape,
|
||||
layer is chosen to be equal to shape.layer if it is an int,
|
||||
or shape.layer[0] if it is a tuple
|
||||
datatype is chosen arbitrarily, based on calcualted dose for each shape.
|
||||
Shapes with equal calcualted dose will have the same datatype.
|
||||
A list of doses is retured, providing a mapping between datatype
|
||||
(list index) and dose (list entry).
|
||||
Will automatically compress the file if it has a .gz suffix.
|
||||
|
||||
Note that this function modifies the Pattern(s).
|
||||
|
||||
It is often a good idea to run pattern.subpatternize() prior to calling this function,
|
||||
especially if calling .polygonize() will result in very many vertices.
|
||||
|
||||
If you want pattern polygonized with non-default arguments, just call pattern.polygonize()
|
||||
prior to calling this function.
|
||||
|
||||
:param patterns: A Pattern or list of patterns to write to file. Modified by this function.
|
||||
:param filename: Filename to write to.
|
||||
:param meters_per_unit: Written into the GDSII file, meters per (database) length unit.
|
||||
All distances are assumed to be an integer multiple of this unit, and are stored as such.
|
||||
:param args: passed to masque.file.gdsii.write().
|
||||
:param kwargs: passed to masque.file.gdsii.write().
|
||||
:returns: A list of doses, providing a mapping between datatype (int, list index)
|
||||
and dose (float, list entry).
|
||||
Args:
|
||||
patterns: `Pattern` or list of patterns to save
|
||||
filename: Filename to save to.
|
||||
*args: passed to `gdsii.write`
|
||||
**kwargs: passed to `gdsii.write`
|
||||
"""
|
||||
patterns, dose_vals = dose2dtype(patterns)
|
||||
write(patterns, filename, meters_per_unit, *args, **kwargs)
|
||||
return dose_vals
|
||||
path = pathlib.Path(filename)
|
||||
if path.suffix == '.gz':
|
||||
open_func: Callable = gzip.open
|
||||
else:
|
||||
open_func = open
|
||||
|
||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||
results = write(patterns, stream, *args, **kwargs)
|
||||
return results
|
||||
|
||||
|
||||
def dose2dtype(patterns: Pattern or List[Pattern],
|
||||
) -> Tuple[List[Pattern], List[float]]:
|
||||
def readfile(filename: Union[str, pathlib.Path],
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
||||
"""
|
||||
For each shape in each pattern, set shape.layer to the tuple
|
||||
(base_layer, datatype), where:
|
||||
layer is chosen to be equal to the original shape.layer if it is an int,
|
||||
or shape.layer[0] if it is a tuple
|
||||
datatype is chosen arbitrarily, based on calcualted dose for each shape.
|
||||
Shapes with equal calcualted dose will have the same datatype.
|
||||
A list of doses is retured, providing a mapping between datatype
|
||||
(list index) and dose (list entry).
|
||||
Wrapper for `gdsii.read()` that takes a filename or path instead of a stream.
|
||||
|
||||
Note that this function modifies the input Pattern(s).
|
||||
Will automatically decompress files with a .gz suffix.
|
||||
|
||||
:param patterns: A Pattern or list of patterns to write to file. Modified by this function.
|
||||
:returns: (patterns, dose_list)
|
||||
patterns: modified input patterns
|
||||
dose_list: A list of doses, providing a mapping between datatype (int, list index)
|
||||
and dose (float, list entry).
|
||||
Args:
|
||||
filename: Filename to save to.
|
||||
*args: passed to `gdsii.read`
|
||||
**kwargs: passed to `gdsii.read`
|
||||
"""
|
||||
if isinstance(patterns, Pattern):
|
||||
patterns = [patterns]
|
||||
path = pathlib.Path(filename)
|
||||
if path.suffix == '.gz':
|
||||
open_func: Callable = gzip.open
|
||||
else:
|
||||
open_func = open
|
||||
|
||||
# Get a dict of id(pattern) -> pattern
|
||||
patterns_by_id = {id(pattern): pattern for pattern in patterns}
|
||||
for pattern in patterns:
|
||||
patterns_by_id.update(pattern.referenced_patterns_by_id())
|
||||
|
||||
# Get a table of (id(pat), written_dose) for each pattern and subpattern
|
||||
sd_table = make_dose_table(patterns)
|
||||
|
||||
# Figure out all the unique doses necessary to write this pattern
|
||||
# This means going through each row in sd_table and adding the dose values needed to write
|
||||
# that subpattern at that dose level
|
||||
dose_vals = set()
|
||||
for pat_id, pat_dose in sd_table:
|
||||
pat = patterns_by_id[pat_id]
|
||||
[dose_vals.add(shape.dose * pat_dose) for shape in pat.shapes]
|
||||
|
||||
if len(dose_vals) > 256:
|
||||
raise PatternError('Too many dose values: {}, maximum 256 when using dtypes.'.format(len(dose_vals)))
|
||||
|
||||
# Create a new pattern for each non-1-dose entry in the dose table
|
||||
# and update the shapes to reflect their new dose
|
||||
new_pats = {} # (id, dose) -> new_pattern mapping
|
||||
for pat_id, pat_dose in sd_table:
|
||||
if pat_dose == 1:
|
||||
new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id]
|
||||
continue
|
||||
|
||||
pat = patterns_by_id[pat_id].deepcopy()
|
||||
|
||||
encoded_name = mangle_name(pat, pat_dose).encode('ASCII')
|
||||
if len(encoded_name) == 0:
|
||||
raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name))
|
||||
|
||||
for shape in pat.shapes:
|
||||
data_type = dose_vals_list.index(shape.dose * pat_dose)
|
||||
if is_scalar(shape.layer):
|
||||
layer = (shape.layer, data_type)
|
||||
else:
|
||||
layer = (shape.layer[0], data_type)
|
||||
|
||||
new_pats[(pat_id, pat_dose)] = pat
|
||||
|
||||
# Go back through all the dose-specific patterns and fix up their subpattern entries
|
||||
for (pat_id, pat_dose), pat in new_pats.items():
|
||||
for subpat in pat.subpatterns:
|
||||
dose_mult = subpat.dose * pat_dose
|
||||
subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)]
|
||||
|
||||
return patterns, list(dose_vals)
|
||||
with io.BufferedReader(open_func(path, mode='rb')) as stream:
|
||||
results = read(stream, *args, **kwargs)
|
||||
return results
|
||||
|
||||
|
||||
def read_dtype2dose(filename: str) -> (List[Pattern], Dict[str, Any]):
|
||||
"""
|
||||
Alias for read(filename, use_dtype_as_dose=True)
|
||||
"""
|
||||
return read(filename, use_dtype_as_dose=True)
|
||||
|
||||
|
||||
def read(filename: str,
|
||||
def read(stream: io.BufferedIOBase,
|
||||
use_dtype_as_dose: bool = False,
|
||||
clean_vertices: bool = True,
|
||||
) -> (Dict[str, Pattern], Dict[str, Any]):
|
||||
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
||||
"""
|
||||
Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are
|
||||
translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs
|
||||
@ -232,18 +194,23 @@ def read(filename: str,
|
||||
'logical_units_per_unit': number of "logical" units displayed by layout tools (typically microns)
|
||||
per database unit
|
||||
|
||||
:param filename: Filename specifying a GDSII file to read from.
|
||||
:param use_dtype_as_dose: If false, set each polygon's layer to (gds_layer, gds_datatype).
|
||||
If true, set the layer to gds_layer and the dose to gds_datatype.
|
||||
Default False.
|
||||
:param clean_vertices: If true, remove any redundant vertices when loading polygons.
|
||||
Args:
|
||||
stream: Stream to read from.
|
||||
use_dtype_as_dose: If `False`, set each polygon's layer to `(gds_layer, gds_datatype)`.
|
||||
If `True`, set the layer to `gds_layer` and the dose to `gds_datatype`.
|
||||
Default `False`.
|
||||
NOTE: This will be deprecated in the future in favor of
|
||||
`pattern.apply(masque.file.utils.dtype2dose)`.
|
||||
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
|
||||
The cleaning process removes any polygons with zero area or <3 vertices.
|
||||
Default True.
|
||||
:return: Tuple: (Dict of pattern_name:Patterns generated from GDSII structures, Dict of GDSII library info)
|
||||
Default `True`.
|
||||
|
||||
Returns:
|
||||
- Dict of pattern_name:Patterns generated from GDSII structures
|
||||
- Dict of GDSII library info
|
||||
"""
|
||||
|
||||
with open(filename, mode='rb') as stream:
|
||||
lib = gdsii.library.Library.load(stream)
|
||||
lib = gdsii.library.Library.load(stream)
|
||||
|
||||
library_info = {'name': lib.name.decode('ASCII'),
|
||||
'meters_per_unit': lib.physical_unit,
|
||||
@ -256,46 +223,48 @@ def read(filename: str,
|
||||
for element in structure:
|
||||
# Switch based on element type:
|
||||
if isinstance(element, gdsii.elements.Boundary):
|
||||
if use_dtype_as_dose:
|
||||
shape = Polygon(vertices=element.xy[:-1],
|
||||
dose=element.data_type,
|
||||
layer=element.layer)
|
||||
else:
|
||||
shape = Polygon(vertices=element.xy[:-1],
|
||||
layer=(element.layer, element.data_type))
|
||||
args = {'vertices': element.xy[:-1],
|
||||
'layer': (element.layer, element.data_type),
|
||||
}
|
||||
|
||||
poly = Polygon(**args)
|
||||
|
||||
if clean_vertices:
|
||||
try:
|
||||
shape.clean_vertices()
|
||||
poly.clean_vertices()
|
||||
except PatternError:
|
||||
continue
|
||||
|
||||
pat.shapes.append(shape)
|
||||
pat.shapes.append(poly)
|
||||
|
||||
if isinstance(element, gdsii.elements.Path):
|
||||
cap_map = {0: Path.Cap.Flush,
|
||||
1: Path.Cap.Circle,
|
||||
2: Path.Cap.Square,
|
||||
#3: custom?
|
||||
}
|
||||
if element.path_type in cap_map:
|
||||
cap = cap_map[element.path_type]
|
||||
if element.path_type in path_cap_map:
|
||||
cap = path_cap_map[element.path_type]
|
||||
else:
|
||||
raise PatternError('Unrecognized path type: {}'.format(element.path_type))
|
||||
|
||||
if use_dtype_as_dose:
|
||||
shape = Path(vertices=element.xy,
|
||||
dose=element.data_type,
|
||||
layer=element.layer)
|
||||
else:
|
||||
shape = Path(vertices=element.xy,
|
||||
layer=(element.layer, element.data_type))
|
||||
args = {'vertices': element.xy,
|
||||
'layer': (element.layer, element.data_type),
|
||||
'width': element.width if element.width is not None else 0.0,
|
||||
'cap': cap,
|
||||
}
|
||||
|
||||
if cap == Path.Cap.SquareCustom:
|
||||
args['cap_extensions'] = numpy.zeros(2)
|
||||
if element.bgn_extn is not None:
|
||||
args['cap_extensions'][0] = element.bgn_extn
|
||||
if element.end_extn is not None:
|
||||
args['cap_extensions'][1] = element.end_extn
|
||||
|
||||
path = Path(**args)
|
||||
|
||||
if clean_vertices:
|
||||
try:
|
||||
shape.clean_vertices()
|
||||
path.clean_vertices()
|
||||
except PatternError as err:
|
||||
continue
|
||||
|
||||
pat.shapes.append(shape)
|
||||
pat.shapes.append(path)
|
||||
|
||||
elif isinstance(element, gdsii.elements.Text):
|
||||
label = Label(offset=element.xy,
|
||||
@ -309,41 +278,51 @@ def read(filename: str,
|
||||
elif isinstance(element, gdsii.elements.ARef):
|
||||
pat.subpatterns.append(_aref_to_gridrep(element))
|
||||
|
||||
if use_dose_as_dtype:
|
||||
logger.warning('use_dose_as_dtype will be removed in the future!')
|
||||
pat = dose2dtype(pat)
|
||||
|
||||
patterns.append(pat)
|
||||
|
||||
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
|
||||
# according to the subpattern.ref_name (which is deleted after use).
|
||||
# according to the subpattern.identifier (which is deleted after use).
|
||||
patterns_dict = dict(((p.name, p) for p in patterns))
|
||||
for p in patterns_dict.values():
|
||||
for sp in p.subpatterns:
|
||||
sp.pattern = patterns_dict[sp.ref_name.decode('ASCII')]
|
||||
del sp.ref_name
|
||||
sp.pattern = patterns_dict[sp.identifier[0].decode('ASCII')]
|
||||
del sp.identifier
|
||||
|
||||
return patterns_dict, library_info
|
||||
|
||||
|
||||
def _mlayer2gds(mlayer):
|
||||
if is_scalar(mlayer):
|
||||
def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]:
|
||||
""" Helper to turn a layer tuple-or-int into a layer and datatype"""
|
||||
if isinstance(mlayer, int):
|
||||
layer = mlayer
|
||||
data_type = 0
|
||||
else:
|
||||
elif isinstance(mlayer, tuple):
|
||||
layer = mlayer[0]
|
||||
if len(mlayer) > 1:
|
||||
data_type = mlayer[1]
|
||||
else:
|
||||
data_type = 0
|
||||
else:
|
||||
raise PatternError(f'Invalid layer for gdsii: {layer}. Note that gdsii layers cannot be strings.')
|
||||
return layer, data_type
|
||||
|
||||
|
||||
def _sref_to_subpat(element: gdsii.elements.SRef) -> SubPattern:
|
||||
# Helper function to create a SubPattern from an SREF. Sets subpat.pattern to None
|
||||
# and sets the instance attribute .ref_name to the struct_name.
|
||||
#
|
||||
# BUG: "Absolute" means not affected by parent elements.
|
||||
# That's not currently supported by masque at all, so need to either tag it and
|
||||
# undo the parent transformations, or implement it in masque.
|
||||
"""
|
||||
Helper function to create a SubPattern from an SREF. Sets subpat.pattern to None
|
||||
and sets the instance .identifier to (struct_name,).
|
||||
|
||||
BUG:
|
||||
"Absolute" means not affected by parent elements.
|
||||
That's not currently supported by masque at all, so need to either tag it and
|
||||
undo the parent transformations, or implement it in masque.
|
||||
"""
|
||||
subpat = SubPattern(pattern=None, offset=element.xy)
|
||||
subpat.ref_name = element.struct_name
|
||||
subpat.identifier = (element.struct_name,)
|
||||
if element.strans is not None:
|
||||
if element.mag is not None:
|
||||
subpat.scale = element.mag
|
||||
@ -359,22 +338,24 @@ def _sref_to_subpat(element: gdsii.elements.SRef) -> SubPattern:
|
||||
raise PatternError('Absolute rotation is not implemented yet!')
|
||||
# Bit 0 means mirror x-axis
|
||||
if get_bit(element.strans, 15 - 0):
|
||||
subpat.mirror(axis=0)
|
||||
subpat.mirrored[0] = 1
|
||||
return subpat
|
||||
|
||||
|
||||
def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition:
|
||||
# Helper function to create a GridRepetition from an AREF. Sets gridrep.pattern to None
|
||||
# and sets the instance attribute .ref_name to the struct_name.
|
||||
#
|
||||
# BUG: "Absolute" means not affected by parent elements.
|
||||
# That's not currently supported by masque at all, so need to either tag it and
|
||||
# undo the parent transformations, or implement it in masque.i
|
||||
"""
|
||||
Helper function to create a GridRepetition from an AREF. Sets gridrep.pattern to None
|
||||
and sets the instance .identifier to (struct_name,).
|
||||
|
||||
BUG:
|
||||
"Absolute" means not affected by parent elements.
|
||||
That's not currently supported by masque at all, so need to either tag it and
|
||||
undo the parent transformations, or implement it in masque.
|
||||
"""
|
||||
rotation = 0
|
||||
offset = numpy.array(element.xy[0])
|
||||
scale = 1
|
||||
mirror_signs = numpy.ones(2)
|
||||
mirror_across_x = False
|
||||
|
||||
if element.strans is not None:
|
||||
if element.mag is not None:
|
||||
@ -389,15 +370,11 @@ def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition:
|
||||
raise PatternError('Absolute rotation is not implemented yet!')
|
||||
# Bit 0 means mirror x-axis
|
||||
if get_bit(element.strans, 15 - 0):
|
||||
mirror_signs[0] = -1
|
||||
mirror_across_x = True
|
||||
|
||||
counts = [element.cols, element.rows]
|
||||
vec_a0 = element.xy[1] - offset
|
||||
vec_b0 = element.xy[2] - offset
|
||||
|
||||
a_vector = numpy.dot(rotation_matrix_2d(-rotation), vec_a0 / scale / counts[0]) * mirror_signs
|
||||
b_vector = numpy.dot(rotation_matrix_2d(-rotation), vec_b0 / scale / counts[1]) * mirror_signs
|
||||
|
||||
a_vector = (element.xy[1] - offset) / counts[0]
|
||||
b_vector = (element.xy[2] - offset) / counts[1]
|
||||
|
||||
gridrep = GridRepetition(pattern=None,
|
||||
a_vector=a_vector,
|
||||
@ -407,25 +384,28 @@ def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition:
|
||||
offset=offset,
|
||||
rotation=rotation,
|
||||
scale=scale,
|
||||
mirrored=(mirror_signs == -1))
|
||||
gridrep.ref_name = element.struct_name
|
||||
mirrored=(mirror_across_x, False))
|
||||
gridrep.identifier = (element.struct_name,)
|
||||
|
||||
return gridrep
|
||||
|
||||
|
||||
def _subpatterns_to_refs(subpatterns: List[SubPattern or GridRepetition]
|
||||
) -> List[gdsii.elements.ARef or gdsii.elements.SRef]:
|
||||
# strans must be set for angle and mag to take effect
|
||||
def _subpatterns_to_refs(subpatterns: List[subpattern_t]
|
||||
) -> List[Union[gdsii.elements.ARef, gdsii.elements.SRef]]:
|
||||
refs = []
|
||||
for subpat in subpatterns:
|
||||
if subpat.pattern is None:
|
||||
continue
|
||||
encoded_name = subpat.pattern.name
|
||||
|
||||
# Note: GDS mirrors first and rotates second
|
||||
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
|
||||
ref: Union[gdsii.elements.SRef, gdsii.elements.ARef]
|
||||
if isinstance(subpat, GridRepetition):
|
||||
mirror_signs = (-1) ** numpy.array(subpat.mirrored)
|
||||
xy = numpy.array(subpat.offset) + [
|
||||
[0, 0],
|
||||
numpy.dot(rotation_matrix_2d(subpat.rotation), subpat.a_vector * mirror_signs) * subpat.scale * subpat.a_count,
|
||||
numpy.dot(rotation_matrix_2d(subpat.rotation), subpat.b_vector * mirror_signs) * subpat.scale * subpat.b_count,
|
||||
subpat.a_vector * subpat.a_count,
|
||||
subpat.b_vector * subpat.b_count,
|
||||
]
|
||||
ref = gdsii.elements.ARef(struct_name=encoded_name,
|
||||
xy=numpy.round(xy).astype(int),
|
||||
@ -435,36 +415,40 @@ def _subpatterns_to_refs(subpatterns: List[SubPattern or GridRepetition]
|
||||
ref = gdsii.elements.SRef(struct_name=encoded_name,
|
||||
xy=numpy.round([subpat.offset]).astype(int))
|
||||
|
||||
ref.strans = 0
|
||||
ref.angle = subpat.rotation * 180 / numpy.pi
|
||||
mirror_x, mirror_y = subpat.mirrored
|
||||
if mirror_x and mirror_y:
|
||||
ref.angle += 180
|
||||
elif mirror_x:
|
||||
ref.strans = set_bit(ref.strans, 15 - 0, True)
|
||||
elif mirror_y:
|
||||
ref.angle += 180
|
||||
ref.strans = set_bit(ref.strans, 15 - 0, True)
|
||||
ref.angle %= 360
|
||||
ref.angle = ((subpat.rotation + extra_angle) * 180 / numpy.pi) % 360
|
||||
# strans must be non-None for angle and mag to take effect
|
||||
ref.strans = set_bit(0, 15 - 0, mirror_across_x)
|
||||
ref.mag = subpat.scale
|
||||
|
||||
refs.append(ref)
|
||||
return refs
|
||||
|
||||
|
||||
def _shapes_to_boundaries(shapes: List[Shape]
|
||||
) -> List[gdsii.elements.Boundary]:
|
||||
# Add a Boundary element for each shape
|
||||
boundaries = []
|
||||
def _shapes_to_elements(shapes: List[Shape],
|
||||
polygonize_paths: bool = False
|
||||
) -> List[Union[gdsii.elements.Boundary, gdsii.elements.Path]]:
|
||||
elements: List[Union[gdsii.elements.Boundary, gdsii.elements.Path]] = []
|
||||
# Add a Boundary element for each shape, and Path elements if necessary
|
||||
for shape in shapes:
|
||||
layer, data_type = _mlayer2gds(shape.layer)
|
||||
for polygon in shape.to_polygons():
|
||||
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
|
||||
xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
|
||||
boundaries.append(gdsii.elements.Boundary(layer=layer,
|
||||
data_type=data_type,
|
||||
xy=xy_closed))
|
||||
return boundaries
|
||||
if isinstance(shape, Path) and not polygonize_paths:
|
||||
xy = numpy.round(shape.vertices + shape.offset).astype(int)
|
||||
width = numpy.round(shape.width).astype(int)
|
||||
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup
|
||||
path = gdsii.elements.Path(layer=layer,
|
||||
data_type=data_type,
|
||||
xy=xy)
|
||||
path.path_type = path_type
|
||||
path.width = width
|
||||
elements.append(path)
|
||||
else:
|
||||
for polygon in shape.to_polygons():
|
||||
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
|
||||
xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
|
||||
elements.append(gdsii.elements.Boundary(layer=layer,
|
||||
data_type=data_type,
|
||||
xy=xy_closed))
|
||||
return elements
|
||||
|
||||
|
||||
def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
|
||||
@ -479,10 +463,21 @@ def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
|
||||
return texts
|
||||
|
||||
|
||||
def _disambiguate_pattern_names(patterns):
|
||||
def disambiguate_pattern_names(patterns,
|
||||
max_name_length: int = 32,
|
||||
suffix_length: int = 6,
|
||||
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name
|
||||
):
|
||||
used_names = []
|
||||
for pat in patterns:
|
||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name)
|
||||
if len(pat.name) > max_name_length:
|
||||
shortened_name = pat.name[:max_name_length - suffix_length]
|
||||
logger.warning('Pattern name "{}" is too long ({}/{} chars),\n'.format(pat.name, len(pat.name), max_name_length) +
|
||||
' shortening to "{}" before generating suffix'.format(shortened_name))
|
||||
else:
|
||||
shortened_name = pat.name
|
||||
|
||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
|
||||
|
||||
i = 0
|
||||
suffixed_name = sanitized_name
|
||||
@ -495,14 +490,16 @@ def _disambiguate_pattern_names(patterns):
|
||||
if sanitized_name == '':
|
||||
logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name))
|
||||
elif suffixed_name != sanitized_name:
|
||||
logger.warning('Pattern name "{}" appears multiple times; renaming to "{}"'.format(pat.name, suffixed_name))
|
||||
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
||||
logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format(
|
||||
pat.name, sanitized_name, suffixed_name))
|
||||
|
||||
encoded_name = suffixed_name.encode('ASCII')
|
||||
if len(encoded_name) == 0:
|
||||
# Should never happen since zero-length names are replaced
|
||||
raise PatternError('Zero-length name after sanitize+encode, originally "{}"'.format(pat.name))
|
||||
if len(encoded_name) > 32:
|
||||
raise PatternError('Pattern name "{}" length > 32 after encode, originally "{}"'.format(encoded_name, pat.name))
|
||||
raise PatternError('Zero-length name after sanitize+encode,\n originally "{}"'.format(pat.name))
|
||||
if len(encoded_name) > max_name_length:
|
||||
raise PatternError('Pattern name "{!r}" length > {} after encode,\n originally "{}"'.format(encoded_name, max_name_length, pat.name))
|
||||
|
||||
pat.name = encoded_name
|
||||
used_names.append(suffixed_name)
|
||||
|
441
masque/file/oasis.py
Normal file
441
masque/file/oasis.py
Normal file
@ -0,0 +1,441 @@
|
||||
"""
|
||||
OASIS file format readers and writers
|
||||
|
||||
Note that OASIS references follow the same convention as `masque`,
|
||||
with this order of operations:
|
||||
1. Mirroring
|
||||
2. Rotation
|
||||
3. Scaling
|
||||
4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets)
|
||||
|
||||
Scaling, rotation, and mirroring apply to individual instances, not grid
|
||||
vectors or offsets.
|
||||
"""
|
||||
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional
|
||||
import re
|
||||
import io
|
||||
import copy
|
||||
import numpy
|
||||
import base64
|
||||
import struct
|
||||
import logging
|
||||
import pathlib
|
||||
import gzip
|
||||
|
||||
import fatamorgana
|
||||
import fatamorgana.records as fatrec
|
||||
from fatamorgana.basic import PathExtensionScheme
|
||||
|
||||
from .utils import mangle_name, make_dose_table
|
||||
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t
|
||||
from ..shapes import Polygon, Path
|
||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
||||
from ..utils import remove_colinear_vertices, normalize_mirror
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
path_cap_map = {
|
||||
PathExtensionScheme.Flush: Path.Cap.Flush,
|
||||
PathExtensionScheme.HalfWidth: Path.Cap.Square,
|
||||
PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom,
|
||||
}
|
||||
|
||||
|
||||
def write(patterns: Union[Pattern, List[Pattern]],
|
||||
stream: io.BufferedIOBase,
|
||||
units_per_micron: int,
|
||||
layer_map: Dict[str, Union[int, Tuple[int, int]]] = None,
|
||||
modify_originals: bool = False,
|
||||
disambiguate_func: Callable[[Iterable[Pattern]], None] = None):
|
||||
"""
|
||||
Write a `Pattern` or list of patterns to a OASIS file, writing patterns
|
||||
as OASIS cells, polygons as Polygon records, and subpatterns as Placement
|
||||
records. Other shape types may be converted to polygons if no equivalent
|
||||
record type exists (or is not implemented here yet). #TODO
|
||||
|
||||
For each shape,
|
||||
layer is chosen to be equal to `shape.layer` if it is an int,
|
||||
or `shape.layer[0]` if it is a tuple
|
||||
datatype is chosen to be `shape.layer[1]` if available,
|
||||
otherwise `0`
|
||||
If a layer map is provided, layer strings will be converted
|
||||
automatically, and layer names will be written to the file.
|
||||
|
||||
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
|
||||
prior to calling this function.
|
||||
|
||||
Args:
|
||||
patterns: A Pattern or list of patterns to write to file.
|
||||
stream: Stream object to write to.
|
||||
units_per_micron: Written into the OASIS file, number of grid steps per micrometer.
|
||||
All distances are assumed to be an integer multiple of the grid step, and are stored as such.
|
||||
layer_map: Dictionary which translates
|
||||
modify_originals: If `True`, the original pattern is modified as part of the writing
|
||||
process. Otherwise, a copy is made and `deepunlock()`-ed.
|
||||
Default `False`.
|
||||
disambiguate_func: Function which takes a list of patterns and alters them
|
||||
to make their names valid and unique. Default is `disambiguate_pattern_names`.
|
||||
"""
|
||||
if isinstance(patterns, Pattern):
|
||||
patterns = [patterns]
|
||||
|
||||
if layer_map is None:
|
||||
layer_map = {}
|
||||
|
||||
if disambiguate_func is None:
|
||||
disambiguate_func = disambiguate_pattern_names
|
||||
|
||||
if not modify_originals:
|
||||
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
|
||||
|
||||
# Create library
|
||||
lib = fatamorgana.OasisLayout(unit, validation=None)
|
||||
|
||||
for name, layer_num in layer_map.items():
|
||||
layer, data_type = _mlayer2oas(layer_num)
|
||||
lib.layer_names.append( #TODO figure out how to deal with text layers
|
||||
LayerName(nstring=name,
|
||||
layer_interval=(layer, layer),
|
||||
type_interval=(data_type, data_type),
|
||||
is_textlayer=False))
|
||||
|
||||
def layer2oas(layer: layer_t) -> Tuple[int, int]:
|
||||
layer_num = layer_map[layer] if isinstance(layer, str) else layer
|
||||
return _mlayer2oas(layer_num)
|
||||
|
||||
# Get a dict of id(pattern) -> pattern
|
||||
patterns_by_id = {id(pattern): pattern for pattern in patterns}
|
||||
for pattern in patterns:
|
||||
for i, p in pattern.referenced_patterns_by_id().items():
|
||||
patterns_by_id[i] = p
|
||||
|
||||
disambiguate_func(patterns_by_id.values())
|
||||
|
||||
# Now create a structure for each pattern
|
||||
for pat in patterns_by_id.values():
|
||||
structure = fatamorgana.Cell(name=NString(pat.name))
|
||||
lib.cells.append(structure)
|
||||
|
||||
structure.geometry += _shapes_to_elements(pat.shapes, layer2oas)
|
||||
structure.geometry += _labels_to_texts(pat.labels, layer2oas)
|
||||
structure.placements += _subpatterns_to_refs(pat.subpatterns)
|
||||
|
||||
lib.write(stream)
|
||||
return
|
||||
|
||||
|
||||
def writefile(patterns: Union[List[Pattern], Pattern],
|
||||
filename: Union[str, pathlib.Path],
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
Wrapper for `oasis.write()` that takes a filename or path instead of a stream.
|
||||
|
||||
Will automatically compress the file if it has a .gz suffix.
|
||||
|
||||
Args:
|
||||
patterns: `Pattern` or list of patterns to save
|
||||
filename: Filename to save to.
|
||||
*args: passed to `oasis.write`
|
||||
**kwargs: passed to `oasis.write`
|
||||
"""
|
||||
path = pathlib.Path(filename)
|
||||
if path.suffix == '.gz':
|
||||
open_func: Callable = gzip.open
|
||||
else:
|
||||
open_func = open
|
||||
|
||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||
results = write(patterns, stream, *args, **kwargs)
|
||||
return results
|
||||
|
||||
|
||||
def readfile(filename: Union[str, pathlib.Path],
|
||||
*args,
|
||||
**kwargs,
|
||||
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
||||
"""
|
||||
Wrapper for `oasis.read()` that takes a filename or path instead of a stream.
|
||||
|
||||
Will automatically decompress files with a .gz suffix.
|
||||
|
||||
Args:
|
||||
filename: Filename to save to.
|
||||
*args: passed to `oasis.read`
|
||||
**kwargs: passed to `oasis.read`
|
||||
"""
|
||||
path = pathlib.Path(filename)
|
||||
if path.suffix == '.gz':
|
||||
open_func: Callable = gzip.open
|
||||
else:
|
||||
open_func = open
|
||||
|
||||
with io.BufferedReader(open_func(path, mode='rb')) as stream:
|
||||
results = read(stream, *args, **kwargs)
|
||||
return results
|
||||
|
||||
|
||||
def read(stream: io.BufferedIOBase,
|
||||
clean_vertices: bool = True,
|
||||
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
||||
"""
|
||||
Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are
|
||||
translated into Pattern objects; Polygons are translated into polygons, and Placements
|
||||
are translated into SubPattern or GridRepetition objects.
|
||||
|
||||
Additional library info is returned in a dict, containing:
|
||||
'units_per_micrometer': number of database units per micrometer (all values are in database units)
|
||||
|
||||
Args:
|
||||
stream: Stream to read from.
|
||||
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
|
||||
The cleaning process removes any polygons with zero area or <3 vertices.
|
||||
Default `True`.
|
||||
|
||||
Returns:
|
||||
- Dict of pattern_name:Patterns generated from GDSII structures
|
||||
- Dict of GDSII library info
|
||||
"""
|
||||
|
||||
lib = fatamorgana.OASISLayout.read(stream)
|
||||
|
||||
library_info = {'units_per_micrometer': lib.unit,
|
||||
}
|
||||
|
||||
patterns = []
|
||||
for cell in lib.cells:
|
||||
pat = Pattern(name=cell.name.string)
|
||||
for element in cell.geometry:
|
||||
if element.repetition is not None:
|
||||
raise PatternError('masque OASIS reader does not implement repetitions for shapes yet')
|
||||
|
||||
# Switch based on element type:
|
||||
if isinstance(element, fatrec.Polygon):
|
||||
args = {'vertices': element.point_list,
|
||||
'layer': (element.layer, element.data_type)
|
||||
'offset': (element.x, element.y),
|
||||
}
|
||||
poly = Polygon(**args)
|
||||
|
||||
if clean_vertices:
|
||||
try:
|
||||
poly.clean_vertices()
|
||||
except PatternError:
|
||||
continue
|
||||
|
||||
pat.shapes.append(poly)
|
||||
|
||||
if isinstance(element, fatrec.Path):
|
||||
cap_start = path_cap_map[element.extension_start[0]]
|
||||
cap_end = path_cap_map[element.extension_end[0]]
|
||||
if cap_start != cap_end:
|
||||
raise Exception('masque does not support multiple cap types on a single path.') #TODO handle multiple cap types
|
||||
cap = cap_start
|
||||
|
||||
args = {'vertices': element.point_list,
|
||||
'layer': (element.layer, element.data_type)
|
||||
'offset': (element.x, element.y),
|
||||
'width': element.half_width * 2,
|
||||
'cap': cap,
|
||||
}
|
||||
|
||||
if cap == Path.Cap.SquareCustom:
|
||||
args['cap_extensions'] = numpy.array((element.extension_start[1],
|
||||
element.extension_end[1]))
|
||||
path = Path(**args)
|
||||
|
||||
if clean_vertices:
|
||||
try:
|
||||
path.clean_vertices()
|
||||
except PatternError as err:
|
||||
continue
|
||||
|
||||
pat.shapes.append(path)
|
||||
|
||||
elif isinstance(element, fatrec.Text):
|
||||
args = {'layer': (element.layer, element.data_type)
|
||||
'offset': (element.x, element.y),
|
||||
'string': str(element.string),
|
||||
}
|
||||
pat.labels.append(Label(**args))
|
||||
|
||||
for placement in cell.placements:
|
||||
pat.subpattterns.append += _placement_to_subpats(placement)
|
||||
|
||||
patterns.append(pat)
|
||||
|
||||
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
|
||||
# according to the subpattern.identifier (which is deleted after use).
|
||||
patterns_dict = dict(((p.name, p) for p in patterns))
|
||||
for p in patterns_dict.values():
|
||||
for sp in p.subpatterns:
|
||||
sp.pattern = patterns_dict[sp.identifier[0]]
|
||||
del sp.identifier
|
||||
|
||||
return patterns_dict, library_info
|
||||
|
||||
|
||||
def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]:
|
||||
""" Helper to turn a layer tuple-or-int into a layer and datatype"""
|
||||
if isinstance(mlayer, int):
|
||||
layer = mlayer
|
||||
data_type = 0
|
||||
elif isinstance(mlayer, tuple):
|
||||
layer = mlayer[0]
|
||||
if len(mlayer) > 1:
|
||||
data_type = mlayer[1]
|
||||
else:
|
||||
data_type = 0
|
||||
else:
|
||||
raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be strings.') #TODO allow string layers using layer map def
|
||||
return layer, data_type
|
||||
|
||||
|
||||
def _placement_to_subpats(placement: fatrec.Placement) -> List[subpattern_t]:
|
||||
"""
|
||||
Helper function to create a SubPattern from a placment. Sets subpat.pattern to None
|
||||
and sets the instance .identifier to (struct_name,).
|
||||
"""
|
||||
xy = numpy.array((placement.x, placement.y))
|
||||
kwargs = {
|
||||
'pattern': None,
|
||||
'mirrored': (placement.flip, False),
|
||||
'rotation': float(placement.angle * pi/180)
|
||||
'scale': placement.magnification,
|
||||
'identifier': (placement.name,),
|
||||
}
|
||||
|
||||
rep = placement.repetition
|
||||
if isinstance(rep, fatamorgana.GridRepetition):
|
||||
subpat = GridRepetition(a_vector=rep.a_vector,
|
||||
b_vector=rep.b_vector,
|
||||
a_count=rep.a_count,
|
||||
b_count=rep.b_count,
|
||||
offset=xy,
|
||||
**kwargs)
|
||||
subpats = [subpat]
|
||||
elif isinstance(rep, fatamorgana.ArbitraryRepetition):
|
||||
subpats = []
|
||||
for rep_offset in numpy.cumsum(numpy.column_stack((rep.x_displacements,
|
||||
rep.y_displacements))):
|
||||
subpats.append(SubPattern(offset=xy + rep_offset, **kwargs))
|
||||
elif rep is None
|
||||
subpats = [SubPattern(offset=xy + rep_offset, **kwargs)]
|
||||
return subpats
|
||||
|
||||
|
||||
def _subpatterns_to_refs(subpatterns: List[subpattern_t]
|
||||
) -> List[fatrec.Placement]]:
|
||||
refs = []
|
||||
for subpat in subpatterns:
|
||||
if subpat.pattern is None:
|
||||
continue
|
||||
|
||||
# Note: OASIS mirrors first and rotates second
|
||||
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
|
||||
xy = numpy.round(subpat.offset).astype(int)
|
||||
args = {
|
||||
'x': xy[0],
|
||||
'y': xy[1],
|
||||
}
|
||||
|
||||
if isinstance(subpat, GridRepetition):
|
||||
kwargs['rep'] = fatamorgana.GridRepetition(
|
||||
a_vector=numpy.round(subpat.a_vector).astype(int),
|
||||
b_vector=numpy.round(subpat.b_vector).astype(int),
|
||||
a_count=numpy.round(subpat.a_count).astype(int),
|
||||
b_count=numpy.round(subpat.b_count).astype(int))
|
||||
|
||||
ref = fatrec.Placement(
|
||||
name=subpat.pattern.name,
|
||||
flip=mirror_across_x,
|
||||
angle=((subpat.rotation + extra_angle) * 180 / numpy.pi) % 360,
|
||||
magnification=subpat.scale,
|
||||
**kwargs)
|
||||
|
||||
refs.append(ref)
|
||||
return refs
|
||||
|
||||
|
||||
def _shapes_to_elements(shapes: List[Shape],
|
||||
layer2oas: Callable[[layer_t], Tuple[int, int]],
|
||||
polygonize_paths: bool = False,
|
||||
) -> List[Union[fatrec.Polygon, fatrec.Path]]:
|
||||
# Add a Polygon record for each shape, and Path elements if necessary
|
||||
elements: List[Union[fatrec.Polygon, fatrec.Path]] = []
|
||||
for shape in shapes:
|
||||
layer, data_type = layer2oas(shape.layer)
|
||||
if isinstance(shape, Path) and not polygonize_paths:
|
||||
offset = numpy.round(shape.offset).astype(int)
|
||||
points = numpy.round(shape.vertices).astype(int)
|
||||
half_width = numpy.round(shape.width / 2).astype(int)
|
||||
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup
|
||||
path = fatrec.Path(layer=layer,
|
||||
data_type=data_type,
|
||||
point_list=points,
|
||||
half_width=half_width,
|
||||
x=offset[0],
|
||||
y=offset[1],
|
||||
extension_start=path_type, #TODO implement multiple cap types?
|
||||
extension_end=path_type,
|
||||
)
|
||||
elements.append(path)
|
||||
else:
|
||||
for polygon in shape.to_polygons():
|
||||
points = numpy.round(polygon.vertices).astype(int)
|
||||
offset = numpy.round(polygon.offset).astype(int)
|
||||
elements.append(fatrec.Polygon(layer=layer,
|
||||
data_type=data_type,
|
||||
x=offset[0],
|
||||
y=offset[1],
|
||||
point_list=point_list))
|
||||
return elements
|
||||
|
||||
|
||||
def _labels_to_texts(labels: List[Label],
|
||||
layer2oas: Callable[[layer_t], Tuple[int, int]],
|
||||
) -> List[fatrec.Text]:
|
||||
texts = []
|
||||
for label in labels:
|
||||
layer, text_type = layer2oas(label.layer)
|
||||
xy = numpy.round(label.offset).astype(int)
|
||||
texts.append(fatrec.Text(layer=layer,
|
||||
text_type=text_type,
|
||||
x=xy[0],
|
||||
y=xy[1],
|
||||
string=string))
|
||||
return texts
|
||||
|
||||
|
||||
def disambiguate_pattern_names(patterns,
|
||||
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name
|
||||
):
|
||||
used_names = []
|
||||
for pat in patterns:
|
||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name)
|
||||
|
||||
i = 0
|
||||
suffixed_name = sanitized_name
|
||||
while suffixed_name in used_names or suffixed_name == '':
|
||||
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
|
||||
|
||||
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
|
||||
i += 1
|
||||
|
||||
if sanitized_name == '':
|
||||
logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name))
|
||||
elif suffixed_name != sanitized_name:
|
||||
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
||||
logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format(
|
||||
pat.name, sanitized_name, suffixed_name))
|
||||
|
||||
encoded_name = suffixed_name.encode('ASCII')
|
||||
if len(encoded_name) == 0:
|
||||
# Should never happen since zero-length names are replaced
|
||||
raise PatternError('Zero-length name after sanitize+encode,\n originally "{}"'.format(pat.name))
|
||||
|
||||
pat.name = encoded_name
|
||||
used_names.append(suffixed_name)
|
@ -1,18 +1,16 @@
|
||||
"""
|
||||
SVG file format readers and writers
|
||||
"""
|
||||
|
||||
from typing import Dict, Optional
|
||||
import svgwrite
|
||||
import numpy
|
||||
import warnings
|
||||
|
||||
from .utils import mangle_name
|
||||
from .. import Pattern
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
|
||||
def write(pattern: Pattern,
|
||||
def writefile(pattern: Pattern,
|
||||
filename: str,
|
||||
custom_attributes: bool=False):
|
||||
"""
|
||||
@ -23,26 +21,32 @@ def write(pattern: Pattern,
|
||||
|
||||
Note that this function modifies the Pattern.
|
||||
|
||||
If custom_attributes is True, non-standard pattern_layer and pattern_dose attributes
|
||||
If `custom_attributes` is `True`, non-standard `pattern_layer` and `pattern_dose` attributes
|
||||
are written to the relevant elements.
|
||||
|
||||
It is often a good idea to run pattern.subpatternize() on pattern prior to
|
||||
calling this function, especially if calling .polygonize() will result in very
|
||||
It is often a good idea to run `pattern.subpatternize()` on pattern prior to
|
||||
calling this function, especially if calling `.polygonize()` will result in very
|
||||
many vertices.
|
||||
|
||||
If you want pattern polygonized with non-default arguments, just call pattern.polygonize()
|
||||
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
|
||||
prior to calling this function.
|
||||
|
||||
:param pattern: Pattern to write to file. Modified by this function.
|
||||
:param filename: Filename to write to.
|
||||
:param custom_attributes: Whether to write non-standard pattern_layer and
|
||||
pattern_dose attributes to the SVG elements.
|
||||
Args:
|
||||
pattern: Pattern to write to file. Modified by this function.
|
||||
filename: Filename to write to.
|
||||
custom_attributes: Whether to write non-standard `pattern_layer` and
|
||||
`pattern_dose` attributes to the SVG elements.
|
||||
"""
|
||||
|
||||
# Polygonize pattern
|
||||
pattern.polygonize()
|
||||
|
||||
[bounds_min, bounds_max] = pattern.get_bounds()
|
||||
bounds = pattern.get_bounds()
|
||||
if bounds is None:
|
||||
bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]])
|
||||
warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox')
|
||||
else:
|
||||
bounds_min, bounds_max = bounds
|
||||
|
||||
viewbox = numpy.hstack((bounds_min - 1, (bounds_max - bounds_min) + 2))
|
||||
viewbox_string = '{:g} {:g} {:g} {:g}'.format(*viewbox)
|
||||
@ -52,11 +56,13 @@ def write(pattern: Pattern,
|
||||
debug=(not custom_attributes))
|
||||
|
||||
# Get a dict of id(pattern) -> pattern
|
||||
patterns_by_id = {**(pattern.referenced_patterns_by_id()), id(pattern): pattern}
|
||||
patterns_by_id = {**(pattern.referenced_patterns_by_id()), id(pattern): pattern} # type: Dict[int, Optional[Pattern]]
|
||||
|
||||
# Now create a group for each row in sd_table (ie, each pattern + dose combination)
|
||||
# and add in any Boundary and Use elements
|
||||
for pat in patterns_by_id.values():
|
||||
if pat is None:
|
||||
continue
|
||||
svg_group = svg.g(id=mangle_name(pat), fill='blue', stroke='red')
|
||||
|
||||
for shape in pat.shapes:
|
||||
@ -71,6 +77,8 @@ def write(pattern: Pattern,
|
||||
svg_group.add(path)
|
||||
|
||||
for subpat in pat.subpatterns:
|
||||
if subpat.pattern is None:
|
||||
continue
|
||||
transform = 'scale({:g}) rotate({:g}) translate({:g},{:g})'.format(
|
||||
subpat.scale, subpat.rotation, subpat.offset[0], subpat.offset[1])
|
||||
use = svg.use(href='#' + mangle_name(subpat.pattern), transform=transform)
|
||||
@ -83,25 +91,31 @@ def write(pattern: Pattern,
|
||||
svg.save()
|
||||
|
||||
|
||||
def write_inverted(pattern: Pattern, filename: str):
|
||||
def writefile_inverted(pattern: Pattern, filename: str):
|
||||
"""
|
||||
Write an inverted Pattern to an SVG file, by first calling .polygonize() and
|
||||
.flatten() on it to change the shapes into polygons, then drawing a bounding
|
||||
Write an inverted Pattern to an SVG file, by first calling `.polygonize()` and
|
||||
`.flatten()` on it to change the shapes into polygons, then drawing a bounding
|
||||
box and drawing the polygons with reverse vertex order inside it, all within
|
||||
one <path> element.
|
||||
one `<path>` element.
|
||||
|
||||
Note that this function modifies the Pattern.
|
||||
|
||||
If you want pattern polygonized with non-default arguments, just call pattern.polygonize()
|
||||
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
|
||||
prior to calling this function.
|
||||
|
||||
:param pattern: Pattern to write to file. Modified by this function.
|
||||
:param filename: Filename to write to.
|
||||
Args:
|
||||
pattern: Pattern to write to file. Modified by this function.
|
||||
filename: Filename to write to.
|
||||
"""
|
||||
# Polygonize and flatten pattern
|
||||
pattern.polygonize().flatten()
|
||||
|
||||
[bounds_min, bounds_max] = pattern.get_bounds()
|
||||
bounds = pattern.get_bounds()
|
||||
if bounds is None:
|
||||
bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]])
|
||||
warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox')
|
||||
else:
|
||||
bounds_min, bounds_max = bounds
|
||||
|
||||
viewbox = numpy.hstack((bounds_min - 1, (bounds_max - bounds_min) + 2))
|
||||
viewbox_string = '{:g} {:g} {:g} {:g}'.format(*viewbox)
|
||||
@ -129,8 +143,11 @@ def poly2path(vertices: numpy.ndarray) -> str:
|
||||
"""
|
||||
Create an SVG path string from an Nx2 list of vertices.
|
||||
|
||||
:param vertices: Nx2 array of vertices.
|
||||
:return: SVG path-string.
|
||||
Args:
|
||||
vertices: Nx2 array of vertices.
|
||||
|
||||
Returns:
|
||||
SVG path-string.
|
||||
"""
|
||||
commands = 'M{:g},{:g} '.format(vertices[0][0], vertices[0][1])
|
||||
for vertex in vertices[1:]:
|
||||
|
@ -7,16 +7,16 @@ from typing import Set, Tuple, List
|
||||
from masque.pattern import Pattern
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
|
||||
def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str:
|
||||
"""
|
||||
Create a name using pattern.name, id(pattern), and the dose multiplier.
|
||||
Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier.
|
||||
|
||||
:param pattern: Pattern whose name we want to mangle.
|
||||
:param dose_multiplier: Dose multiplier to mangle with.
|
||||
:return: Mangled name.
|
||||
Args:
|
||||
pattern: Pattern whose name we want to mangle.
|
||||
dose_multiplier: Dose multiplier to mangle with.
|
||||
|
||||
Returns:
|
||||
Mangled name.
|
||||
"""
|
||||
expression = re.compile('[^A-Za-z0-9_\?\$]')
|
||||
full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern))
|
||||
@ -26,17 +26,127 @@ def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str:
|
||||
|
||||
def make_dose_table(patterns: List[Pattern], dose_multiplier: float=1.0) -> Set[Tuple[int, float]]:
|
||||
"""
|
||||
Create a set containing (id(pat), written_dose) for each pattern (including subpatterns)
|
||||
Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns)
|
||||
|
||||
:param pattern: Source Patterns.
|
||||
:param dose_multiplier: Multiplier for all written_dose entries.
|
||||
:return: {(id(subpat.pattern), written_dose), ...}
|
||||
Args:
|
||||
pattern: Source Patterns.
|
||||
dose_multiplier: Multiplier for all written_dose entries.
|
||||
|
||||
Returns:
|
||||
`{(id(subpat.pattern), written_dose), ...}`
|
||||
"""
|
||||
dose_table = {(id(pattern), dose_multiplier) for pattern in patterns}
|
||||
for pattern in patterns:
|
||||
for subpat in pattern.subpatterns:
|
||||
if subpat.pattern is None:
|
||||
continue
|
||||
subpat_dose_entry = (id(subpat.pattern), subpat.dose * dose_multiplier)
|
||||
if subpat_dose_entry not in dose_table:
|
||||
subpat_dose_table = make_dose_table([subpat.pattern], subpat.dose * dose_multiplier)
|
||||
dose_table = dose_table.union(subpat_dose_table)
|
||||
return dose_table
|
||||
|
||||
|
||||
def dtype2dose(pattern: Pattern) -> Pattern:
|
||||
"""
|
||||
For each shape in the pattern, if the layer is a tuple, set the
|
||||
layer to the tuple's first element and set the dose to the
|
||||
tuple's second element.
|
||||
|
||||
Generally intended for use with `Pattern.apply()`.
|
||||
|
||||
Args:
|
||||
pattern: Pattern to modify
|
||||
|
||||
Returns:
|
||||
pattern
|
||||
"""
|
||||
for shape in pattern.shapes:
|
||||
if isinstance(shape.layer, tuple):
|
||||
shape.dose = shape.layer[1]
|
||||
shape.layer = shape.layer[0]
|
||||
return pattern
|
||||
|
||||
|
||||
def dose2dtype(patterns: List[Pattern],
|
||||
) -> Tuple[List[Pattern], List[float]]:
|
||||
"""
|
||||
For each shape in each pattern, set shape.layer to the tuple
|
||||
(base_layer, datatype), where:
|
||||
layer is chosen to be equal to the original shape.layer if it is an int,
|
||||
or shape.layer[0] if it is a tuple. `str` layers raise a PatterError.
|
||||
datatype is chosen arbitrarily, based on calcualted dose for each shape.
|
||||
Shapes with equal calcualted dose will have the same datatype.
|
||||
A list of doses is retured, providing a mapping between datatype
|
||||
(list index) and dose (list entry).
|
||||
|
||||
Note that this function modifies the input Pattern(s).
|
||||
|
||||
Args:
|
||||
patterns: A `Pattern` or list of patterns to write to file. Modified by this function.
|
||||
|
||||
Returns:
|
||||
(patterns, dose_list)
|
||||
patterns: modified input patterns
|
||||
dose_list: A list of doses, providing a mapping between datatype (int, list index)
|
||||
and dose (float, list entry).
|
||||
"""
|
||||
# Get a dict of id(pattern) -> pattern
|
||||
patterns_by_id = {id(pattern): pattern for pattern in patterns}
|
||||
for pattern in patterns:
|
||||
for i, p in pattern.referenced_patterns_by_id().items():
|
||||
patterns_by_id[i] = p
|
||||
|
||||
# Get a table of (id(pat), written_dose) for each pattern and subpattern
|
||||
sd_table = make_dose_table(patterns)
|
||||
|
||||
# Figure out all the unique doses necessary to write this pattern
|
||||
# This means going through each row in sd_table and adding the dose values needed to write
|
||||
# that subpattern at that dose level
|
||||
dose_vals = set()
|
||||
for pat_id, pat_dose in sd_table:
|
||||
pat = patterns_by_id[pat_id]
|
||||
for shape in pat.shapes:
|
||||
dose_vals.add(shape.dose * pat_dose)
|
||||
|
||||
if len(dose_vals) > 256:
|
||||
raise PatternError('Too many dose values: {}, maximum 256 when using dtypes.'.format(len(dose_vals)))
|
||||
|
||||
dose_vals_list = list(dose_vals)
|
||||
|
||||
# Create a new pattern for each non-1-dose entry in the dose table
|
||||
# and update the shapes to reflect their new dose
|
||||
new_pats = {} # (id, dose) -> new_pattern mapping
|
||||
for pat_id, pat_dose in sd_table:
|
||||
if pat_dose == 1:
|
||||
new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id]
|
||||
continue
|
||||
|
||||
old_pat = patterns_by_id[pat_id]
|
||||
pat = old_pat.copy() # keep old subpatterns
|
||||
pat.shapes = copy.deepcopy(old_pat.shapes)
|
||||
pat.labels = copy.deepcopy(old_pat.labels)
|
||||
|
||||
encoded_name = mangle_name(pat, pat_dose)
|
||||
if len(encoded_name) == 0:
|
||||
raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name))
|
||||
pat.name = encoded_name
|
||||
|
||||
for shape in pat.shapes:
|
||||
data_type = dose_vals_list.index(shape.dose * pat_dose)
|
||||
if isinstance(shape.layer, int):
|
||||
shape.layer = (shape.layer, data_type)
|
||||
elif isinstance(shape.layer, tuple):
|
||||
shape.layer = (shape.layer[0], data_type)
|
||||
else:
|
||||
raise PatternError(f'Invalid layer for gdsii: {shape.layer}')
|
||||
|
||||
new_pats[(pat_id, pat_dose)] = pat
|
||||
|
||||
# Go back through all the dose-specific patterns and fix up their subpattern entries
|
||||
for (pat_id, pat_dose), pat in new_pats.items():
|
||||
for subpat in pat.subpatterns:
|
||||
dose_mult = subpat.dose * pat_dose
|
||||
subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)]
|
||||
|
||||
return patterns, dose_vals_list
|
||||
|
123
masque/label.py
123
masque/label.py
@ -1,28 +1,37 @@
|
||||
from typing import List, Tuple
|
||||
from typing import List, Tuple, Dict
|
||||
import copy
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from . import PatternError
|
||||
from .utils import is_scalar, vector2, rotation_matrix_2d
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
from .error import PatternError, PatternLockedError
|
||||
from .utils import is_scalar, vector2, rotation_matrix_2d, layer_t
|
||||
|
||||
|
||||
class Label:
|
||||
"""
|
||||
A circle, which has a position and radius.
|
||||
A text annotation with a position and layer (but no size; it is not drawn)
|
||||
"""
|
||||
__slots__ = ('_offset', '_layer', '_string', 'identifier', 'locked')
|
||||
|
||||
# [x_offset, y_offset]
|
||||
_offset = numpy.array([0.0, 0.0]) # type: numpy.ndarray
|
||||
_offset: numpy.ndarray
|
||||
""" [x_offset, y_offset] """
|
||||
|
||||
# Layer (integer >= 0)
|
||||
_layer = 0 # type: int or Tuple
|
||||
_layer: layer_t
|
||||
""" Layer (integer >= 0, or 2-Tuple of integers) """
|
||||
|
||||
# Label string
|
||||
_string = None # type: str
|
||||
_string: str
|
||||
""" Label string """
|
||||
|
||||
identifier: Tuple
|
||||
""" Arbitrary identifier tuple, useful for keeping track of history when flattening """
|
||||
|
||||
locked: bool
|
||||
""" If `True`, any changes to the label will raise a `PatternLockedError` """
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if self.locked and name != 'locked':
|
||||
raise PatternLockedError()
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
# ---- Properties
|
||||
# offset property
|
||||
@ -30,8 +39,6 @@ class Label:
|
||||
def offset(self) -> numpy.ndarray:
|
||||
"""
|
||||
[x, y] offset
|
||||
|
||||
:return: [x_offset, y_offset]
|
||||
"""
|
||||
return self._offset
|
||||
|
||||
@ -42,20 +49,18 @@ class Label:
|
||||
|
||||
if val.size != 2:
|
||||
raise PatternError('Offset must be convertible to size-2 ndarray')
|
||||
self._offset = val.flatten()
|
||||
self._offset = val.flatten().astype(float)
|
||||
|
||||
# layer property
|
||||
@property
|
||||
def layer(self) -> int or Tuple[int]:
|
||||
def layer(self) -> layer_t:
|
||||
"""
|
||||
Layer number (int or tuple of ints)
|
||||
|
||||
:return: Layer
|
||||
Layer number or name (int, tuple of ints, or string)
|
||||
"""
|
||||
return self._layer
|
||||
|
||||
@layer.setter
|
||||
def layer(self, val: int or List[int]):
|
||||
def layer(self, val: layer_t):
|
||||
self._layer = val
|
||||
|
||||
# string property
|
||||
@ -63,8 +68,6 @@ class Label:
|
||||
def string(self) -> str:
|
||||
"""
|
||||
Label string (str)
|
||||
|
||||
:return: string
|
||||
"""
|
||||
return self._string
|
||||
|
||||
@ -74,39 +77,58 @@ class Label:
|
||||
|
||||
def __init__(self,
|
||||
string: str,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
layer: int=0):
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
layer: layer_t = 0,
|
||||
locked: bool = False):
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.identifier = ()
|
||||
self.string = string
|
||||
self.offset = numpy.array(offset, dtype=float)
|
||||
self.offset = numpy.array(offset, dtype=float, copy=True)
|
||||
self.layer = layer
|
||||
self.locked = locked
|
||||
|
||||
def __copy__(self) -> 'Label':
|
||||
return Label(string=self.string,
|
||||
offset=self.offset.copy(),
|
||||
layer=self.layer,
|
||||
locked=self.locked)
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Label':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new._offset = self._offset.copy()
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
# ---- Non-abstract methods
|
||||
def copy(self) -> 'Label':
|
||||
"""
|
||||
Returns a deep copy of the shape.
|
||||
|
||||
:return: Deep copy of self
|
||||
Returns a deep copy of the label.
|
||||
"""
|
||||
return copy.deepcopy(self)
|
||||
|
||||
def translate(self, offset: vector2) -> 'Label':
|
||||
"""
|
||||
Translate the shape by the given offset
|
||||
Translate the label by the given offset
|
||||
|
||||
:param offset: [x_offset, y,offset]
|
||||
:return: self
|
||||
Args:
|
||||
offset: [x_offset, y,offset]
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset += offset
|
||||
return self
|
||||
|
||||
def rotate_around(self, pivot: vector2, rotation: float) -> 'Label':
|
||||
"""
|
||||
Rotate the shape around a point.
|
||||
Rotate the label around a point.
|
||||
|
||||
:param pivot: Point (x, y) to rotate around
|
||||
:param rotation: Angle to rotate by (counterclockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
pivot: Point (x, y) to rotate around
|
||||
rotation: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pivot = numpy.array(pivot, dtype=float)
|
||||
self.translate(-pivot)
|
||||
@ -122,8 +144,33 @@ class Label:
|
||||
bounds = [self.offset,
|
||||
self.offset]
|
||||
|
||||
:return: Bounds [[xmin, xmax], [ymin, ymax]]
|
||||
Returns:
|
||||
Bounds [[xmin, xmax], [ymin, ymax]]
|
||||
"""
|
||||
return numpy.array([self.offset, self.offset])
|
||||
|
||||
def lock(self) -> 'Label':
|
||||
"""
|
||||
Lock the Label, causing any modifications to raise an exception.
|
||||
|
||||
Return:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = False
|
||||
object.__setattr__(self, 'locked', True)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'Label':
|
||||
"""
|
||||
Unlock the Label, re-allowing changes.
|
||||
|
||||
Return:
|
||||
self
|
||||
"""
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.offset.flags.writeable = True
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
locked = ' L' if self.locked else ''
|
||||
return f'<Label "{self.string}" l{self.layer} o{self.offset}{locked}>'
|
||||
|
@ -1,58 +1,77 @@
|
||||
"""
|
||||
Base object for containing a lithography mask.
|
||||
Base object representing a lithography mask.
|
||||
"""
|
||||
|
||||
from typing import List, Callable, Tuple, Dict, Union
|
||||
from typing import List, Callable, Tuple, Dict, Union, Set, Sequence, Optional, Type, overload
|
||||
from typing import MutableMapping, Iterable
|
||||
import copy
|
||||
import itertools
|
||||
import pickle
|
||||
from collections import defaultdict
|
||||
|
||||
import numpy
|
||||
from numpy import inf
|
||||
# .visualize imports matplotlib and matplotlib.collections
|
||||
|
||||
from .subpattern import SubPattern
|
||||
from .subpattern import SubPattern, subpattern_t
|
||||
from .repetition import GridRepetition
|
||||
from .shapes import Shape, Polygon
|
||||
from .label import Label
|
||||
from .utils import rotation_matrix_2d, vector2
|
||||
from .error import PatternError
|
||||
from .utils import rotation_matrix_2d, vector2, normalize_mirror
|
||||
from .error import PatternError, PatternLockedError
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, numpy.ndarray], 'Pattern']
|
||||
|
||||
|
||||
class Pattern:
|
||||
"""
|
||||
2D layout consisting of some set of shapes and references to other Pattern objects
|
||||
(via SubPattern). Shapes are assumed to inherit from .shapes.Shape or provide equivalent
|
||||
functions.
|
||||
|
||||
:var shapes: List of all shapes in this Pattern. Elements in this list are assumed to inherit
|
||||
from Shape or provide equivalent functions.
|
||||
:var subpatterns: List of all SubPattern objects in this Pattern. Multiple SubPattern objects
|
||||
may reference the same Pattern object.
|
||||
:var name: An identifier for this object. Not necessarily unique.
|
||||
2D layout consisting of some set of shapes, labels, and references to other Pattern objects
|
||||
(via SubPattern and GridRepetition). Shapes are assumed to inherit from
|
||||
masque.shapes.Shape or provide equivalent functions.
|
||||
"""
|
||||
shapes = None # type: List[Shape]
|
||||
labels = None # type: List[Labels]
|
||||
subpatterns = None # type: List[SubPattern or GridRepetition]
|
||||
name = None # type: str
|
||||
__slots__ = ('shapes', 'labels', 'subpatterns', 'name', 'locked')
|
||||
|
||||
shapes: List[Shape]
|
||||
""" List of all shapes in this Pattern.
|
||||
Elements in this list are assumed to inherit from Shape or provide equivalent functions.
|
||||
"""
|
||||
|
||||
labels: List[Label]
|
||||
""" List of all labels in this Pattern. """
|
||||
|
||||
subpatterns: List[subpattern_t]
|
||||
""" List of all objects referencing other patterns in this Pattern.
|
||||
Examples are SubPattern (gdsii "instances") or GridRepetition (gdsii "arrays")
|
||||
Multiple objects in this list may reference the same Pattern object
|
||||
(multiple instances of the same object).
|
||||
"""
|
||||
|
||||
name: str
|
||||
""" A name for this pattern """
|
||||
|
||||
locked: bool
|
||||
""" When the pattern is locked, no changes may be made. """
|
||||
|
||||
def __init__(self,
|
||||
shapes: List[Shape]=(),
|
||||
labels: List[Label]=(),
|
||||
subpatterns: List[SubPattern]=(),
|
||||
name: str='',
|
||||
name: str = '',
|
||||
shapes: Sequence[Shape] = (),
|
||||
labels: Sequence[Label] = (),
|
||||
subpatterns: Sequence[subpattern_t] = (),
|
||||
locked: bool = False,
|
||||
):
|
||||
"""
|
||||
Basic init; arguments get assigned to member variables.
|
||||
Non-list inputs for shapes and subpatterns get converted to lists.
|
||||
|
||||
:param shapes: Initial shapes in the Pattern
|
||||
:param labels: Initial labels in the Pattern
|
||||
:param subpatterns: Initial subpatterns in the Pattern
|
||||
:param name: An identifier for the Pattern
|
||||
Args:
|
||||
shapes: Initial shapes in the Pattern
|
||||
labels: Initial labels in the Pattern
|
||||
subpatterns: Initial subpatterns in the Pattern
|
||||
name: An identifier for the Pattern
|
||||
locked: Whether to lock the pattern after construction
|
||||
"""
|
||||
object.__setattr__(self, 'locked', False)
|
||||
if isinstance(shapes, list):
|
||||
self.shapes = shapes
|
||||
else:
|
||||
@ -69,14 +88,39 @@ class Pattern:
|
||||
self.subpatterns = list(subpatterns)
|
||||
|
||||
self.name = name
|
||||
self.locked = locked
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if self.locked and name != 'locked':
|
||||
raise PatternLockedError()
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
def __copy__(self, memo: Dict = None) -> 'Pattern':
|
||||
return Pattern(name=self.name,
|
||||
shapes=copy.deepcopy(self.shapes),
|
||||
labels=copy.deepcopy(self.labels),
|
||||
subpatterns=[copy.copy(sp) for sp in self.subpatterns],
|
||||
locked=self.locked)
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Pattern':
|
||||
memo = {} if memo is None else memo
|
||||
new = Pattern(name=self.name,
|
||||
shapes=copy.deepcopy(self.shapes, memo),
|
||||
labels=copy.deepcopy(self.labels, memo),
|
||||
subpatterns=copy.deepcopy(self.subpatterns, memo),
|
||||
locked=self.locked)
|
||||
return new
|
||||
|
||||
def append(self, other_pattern: 'Pattern') -> 'Pattern':
|
||||
"""
|
||||
Appends all shapes, labels and subpatterns from other_pattern to self's shapes,
|
||||
labels, and supbatterns.
|
||||
|
||||
:param other_pattern: The Pattern to append
|
||||
:return: self
|
||||
Args:
|
||||
other_pattern: The Pattern to append
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.subpatterns += other_pattern.subpatterns
|
||||
self.shapes += other_pattern.shapes
|
||||
@ -84,28 +128,33 @@ class Pattern:
|
||||
return self
|
||||
|
||||
def subset(self,
|
||||
shapes_func: Callable[[Shape], bool]=None,
|
||||
labels_func: Callable[[Label], bool]=None,
|
||||
subpatterns_func: Callable[[SubPattern], bool]=None,
|
||||
recursive: bool=False,
|
||||
shapes_func: Callable[[Shape], bool] = None,
|
||||
labels_func: Callable[[Label], bool] = None,
|
||||
subpatterns_func: Callable[[subpattern_t], bool] = None,
|
||||
recursive: bool = False,
|
||||
) -> 'Pattern':
|
||||
"""
|
||||
Returns a Pattern containing only the entities (e.g. shapes) for which the
|
||||
given entity_func returns True.
|
||||
Self is _not_ altered, but shapes, labels, and subpatterns are _not_ copied.
|
||||
|
||||
:param shapes_func: Given a shape, returns a boolean denoting whether the shape is a member
|
||||
of the subset. Default always returns False.
|
||||
:param labels_func: Given a label, returns a boolean denoting whether the label is a member
|
||||
of the subset. Default always returns False.
|
||||
:param subpatterns_func: Given a subpattern, returns a boolean denoting if it is a member
|
||||
of the subset. Default always returns False.
|
||||
:param recursive: If True, also calls .subset() recursively on patterns referenced by this
|
||||
pattern.
|
||||
:return: A Pattern containing all the shapes and subpatterns for which the parameter
|
||||
functions return True
|
||||
Args:
|
||||
shapes_func: Given a shape, returns a boolean denoting whether the shape is a member
|
||||
of the subset. Default always returns False.
|
||||
labels_func: Given a label, returns a boolean denoting whether the label is a member
|
||||
of the subset. Default always returns False.
|
||||
subpatterns_func: Given a subpattern, returns a boolean denoting if it is a member
|
||||
of the subset. Default always returns False.
|
||||
recursive: If True, also calls .subset() recursively on patterns referenced by this
|
||||
pattern.
|
||||
|
||||
Returns:
|
||||
A Pattern containing all the shapes and subpatterns for which the parameter
|
||||
functions return True
|
||||
"""
|
||||
def do_subset(src):
|
||||
def do_subset(src: Optional['Pattern']) -> Optional['Pattern']:
|
||||
if src is None:
|
||||
return None
|
||||
pat = Pattern(name=src.name)
|
||||
if shapes_func is not None:
|
||||
pat.shapes = [s for s in src.shapes if shapes_func(s)]
|
||||
@ -119,12 +168,14 @@ class Pattern:
|
||||
pat = self.apply(do_subset)
|
||||
else:
|
||||
pat = do_subset(self)
|
||||
|
||||
assert(pat is not None)
|
||||
return pat
|
||||
|
||||
def apply(self,
|
||||
func: Callable[['Pattern'], 'Pattern'],
|
||||
memo: Dict[int, 'Pattern']=None,
|
||||
) -> 'Pattern':
|
||||
func: Callable[[Optional['Pattern']], Optional['Pattern']],
|
||||
memo: Optional[Dict[int, Optional['Pattern']]] = None,
|
||||
) -> Optional['Pattern']:
|
||||
"""
|
||||
Recursively apply func() to this pattern and any pattern it references.
|
||||
func() is expected to take and return a Pattern.
|
||||
@ -132,12 +183,17 @@ class Pattern:
|
||||
It is only applied to any given pattern once, regardless of how many times it is
|
||||
referenced.
|
||||
|
||||
:param func: Function which accepts a Pattern, and returns a pattern.
|
||||
:param memo: Dictionary used to avoid re-running on multiply-referenced patterns.
|
||||
Stores {id(pattern): func(pattern)} for patterns which have already been processed.
|
||||
Default None (no already-processed patterns).
|
||||
:return: The result of applying func() to this pattern and all subpatterns.
|
||||
:raises: PatternError if called on a pattern containing a circular reference.
|
||||
Args:
|
||||
func: Function which accepts a Pattern, and returns a pattern.
|
||||
memo: Dictionary used to avoid re-running on multiply-referenced patterns.
|
||||
Stores `{id(pattern): func(pattern)}` for patterns which have already been processed.
|
||||
Default `None` (no already-processed patterns).
|
||||
|
||||
Returns:
|
||||
The result of applying func() to this pattern and all subpatterns.
|
||||
|
||||
Raises:
|
||||
PatternError if called on a pattern containing a circular reference.
|
||||
"""
|
||||
if memo is None:
|
||||
memo = {}
|
||||
@ -146,8 +202,12 @@ class Pattern:
|
||||
if pat_id not in memo:
|
||||
memo[pat_id] = None
|
||||
pat = func(self)
|
||||
for subpat in pat.subpatterns:
|
||||
subpat.pattern = subpat.pattern.apply(func, memo)
|
||||
if pat is not None:
|
||||
for subpat in pat.subpatterns:
|
||||
if subpat.pattern is None:
|
||||
subpat.pattern = func(None)
|
||||
else:
|
||||
subpat.pattern = subpat.pattern.apply(func, memo)
|
||||
memo[pat_id] = pat
|
||||
elif memo[pat_id] is None:
|
||||
raise PatternError('.apply() called on pattern with circular reference')
|
||||
@ -155,41 +215,130 @@ class Pattern:
|
||||
pat = memo[pat_id]
|
||||
return pat
|
||||
|
||||
def dfs(self,
|
||||
visit_before: visitor_function_t = None,
|
||||
visit_after: visitor_function_t = None,
|
||||
transform: Union[numpy.ndarray, bool, None] = False,
|
||||
memo: Optional[Dict] = None,
|
||||
hierarchy: Tuple['Pattern', ...] = (),
|
||||
) -> 'Pattern':
|
||||
"""
|
||||
Experimental convenience function.
|
||||
Performs a depth-first traversal of this pattern and its subpatterns.
|
||||
At each pattern in the tree, the following sequence is called:
|
||||
```
|
||||
current_pattern = visit_before(current_pattern, **vist_args)
|
||||
for sp in current_pattern.subpatterns]
|
||||
sp.pattern = sp.pattern.df(visit_before, visit_after, updated_transform,
|
||||
memo, (current_pattern,) + hierarchy)
|
||||
current_pattern = visit_after(current_pattern, **visit_args)
|
||||
```
|
||||
where `visit_args` are
|
||||
`hierarchy`: (top_pattern, L1_pattern, L2_pattern, ..., parent_pattern)
|
||||
tuple of all parent-and-higher patterns
|
||||
`transform`: numpy.ndarray containing cumulative
|
||||
[x_offset, y_offset, rotation (rad), mirror_x (0 or 1)]
|
||||
for the instance being visited
|
||||
`memo`: Arbitrary dict (not altered except by visit_*())
|
||||
|
||||
Args:
|
||||
visit_before: Function to call before traversing subpatterns.
|
||||
Should accept a `Pattern` and `**visit_args`, and return the (possibly modified)
|
||||
pattern. Default `None` (not called).
|
||||
visit_after: Function to call after traversing subpatterns.
|
||||
Should accept a Pattern and **visit_args, and return the (possibly modified)
|
||||
pattern. Default `None` (not called).
|
||||
transform: Initial value for `visit_args['transform']`.
|
||||
Can be `False`, in which case the transform is not calculated.
|
||||
`True` or `None` is interpreted as `[0, 0, 0, 0]`.
|
||||
memo: Arbitrary dict for use by `visit_*()` functions. Default `None` (empty dict).
|
||||
hierarchy: Tuple of patterns specifying the hierarchy above the current pattern.
|
||||
Appended to the start of the generated `visit_args['hierarchy']`.
|
||||
Default is an empty tuple.
|
||||
|
||||
Returns:
|
||||
The result, including `visit_before(self, ...)` and `visit_after(self, ...)`.
|
||||
Note that `self` may also be altered!
|
||||
"""
|
||||
if memo is None:
|
||||
memo = {}
|
||||
|
||||
if transform is None or transform is True:
|
||||
transform = numpy.zeros(4)
|
||||
|
||||
if self in hierarchy:
|
||||
raise PatternError('.dfs() called on pattern with circular reference')
|
||||
|
||||
pat = self
|
||||
if visit_before is not None:
|
||||
pat = visit_before(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore
|
||||
|
||||
for subpattern in self.subpatterns:
|
||||
if transform is not False:
|
||||
sign = numpy.ones(2)
|
||||
if transform[3]:
|
||||
sign[1] = -1
|
||||
xy = numpy.dot(rotation_matrix_2d(transform[2]), subpattern.offset * sign)
|
||||
mirror_x, angle = normalize_mirror(subpattern.mirrored)
|
||||
angle += subpattern.rotation
|
||||
sp_transform = transform + (xy[0], xy[1], angle, mirror_x)
|
||||
sp_transform[3] %= 2
|
||||
else:
|
||||
sp_transform = False
|
||||
|
||||
if subpattern.pattern is not None:
|
||||
subpattern.pattern = subpattern.pattern.dfs(visit_before=visit_before,
|
||||
visit_after=visit_after,
|
||||
transform=sp_transform,
|
||||
memo=memo,
|
||||
hierarchy=hierarchy + (self,))
|
||||
|
||||
if visit_after is not None:
|
||||
pat = visit_after(pat, hierarchy=hierarchy, memo=memo, transform=transform) # type: ignore
|
||||
return pat
|
||||
|
||||
def polygonize(self,
|
||||
poly_num_points: int=None,
|
||||
poly_max_arclen: float=None
|
||||
poly_num_points: Optional[int] = None,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
) -> 'Pattern':
|
||||
"""
|
||||
Calls .to_polygons(...) on all the shapes in this Pattern and any referenced patterns,
|
||||
Calls `.to_polygons(...)` on all the shapes in this Pattern and any referenced patterns,
|
||||
replacing them with the returned polygons.
|
||||
Arguments are passed directly to shape.to_polygons(...).
|
||||
Arguments are passed directly to `shape.to_polygons(...)`.
|
||||
|
||||
:param poly_num_points: Number of points to use for each polygon. Can be overridden by
|
||||
poly_max_arclen if that results in more points. Optional, defaults to shapes'
|
||||
internal defaults.
|
||||
:param poly_max_arclen: Maximum arclength which can be approximated by a single line
|
||||
Args:
|
||||
poly_num_points: Number of points to use for each polygon. Can be overridden by
|
||||
`poly_max_arclen` if that results in more points. Optional, defaults to shapes'
|
||||
internal defaults.
|
||||
poly_max_arclen: Maximum arclength which can be approximated by a single line
|
||||
segment. Optional, defaults to shapes' internal defaults.
|
||||
:return: self
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
old_shapes = self.shapes
|
||||
self.shapes = list(itertools.chain.from_iterable(
|
||||
(shape.to_polygons(poly_num_points, poly_max_arclen)
|
||||
for shape in old_shapes)))
|
||||
for subpat in self.subpatterns:
|
||||
subpat.pattern.polygonize(poly_num_points, poly_max_arclen)
|
||||
if subpat.pattern is not None:
|
||||
subpat.pattern.polygonize(poly_num_points, poly_max_arclen)
|
||||
return self
|
||||
|
||||
def manhattanize(self,
|
||||
grid_x: numpy.ndarray,
|
||||
grid_y: numpy.ndarray
|
||||
grid_y: numpy.ndarray,
|
||||
) -> 'Pattern':
|
||||
"""
|
||||
Calls .polygonize() and .flatten on the pattern, then calls .manhattanize() on all the
|
||||
Calls `.polygonize()` and `.flatten()` on the pattern, then calls `.manhattanize()` on all the
|
||||
resulting shapes, replacing them with the returned Manhattan polygons.
|
||||
|
||||
:param grid_x: List of allowed x-coordinates for the Manhattanized polygon edges.
|
||||
:param grid_y: List of allowed y-coordinates for the Manhattanized polygon edges.
|
||||
:return: self
|
||||
Args:
|
||||
grid_x: List of allowed x-coordinates for the Manhattanized polygon edges.
|
||||
grid_y: List of allowed y-coordinates for the Manhattanized polygon edges.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
|
||||
self.polygonize().flatten()
|
||||
@ -199,26 +348,30 @@ class Pattern:
|
||||
return self
|
||||
|
||||
def subpatternize(self,
|
||||
recursive: bool=True,
|
||||
norm_value: int=1e6,
|
||||
exclude_types: Tuple[Shape]=(Polygon,)
|
||||
recursive: bool = True,
|
||||
norm_value: int = int(1e6),
|
||||
exclude_types: Tuple[Type] = (Polygon,)
|
||||
) -> 'Pattern':
|
||||
"""
|
||||
Iterates through this Pattern and all referenced Patterns. Within each Pattern, it iterates
|
||||
over all shapes, calling .normalized_form(norm_value) on them to retrieve a scale-,
|
||||
Iterates through this `Pattern` and all referenced `Pattern`s. Within each `Pattern`, it iterates
|
||||
over all shapes, calling `.normalized_form(norm_value)` on them to retrieve a scale-,
|
||||
offset-, dose-, and rotation-independent form. Each shape whose normalized form appears
|
||||
more than once is removed and re-added using subpattern objects referencing a newly-created
|
||||
Pattern containing only the normalized form of the shape.
|
||||
`Pattern` containing only the normalized form of the shape.
|
||||
|
||||
Note that the default norm_value was chosen to give a reasonable precision when converting
|
||||
to GDSII, which uses integer values for pixel coordinates.
|
||||
Note:
|
||||
The default norm_value was chosen to give a reasonable precision when converting
|
||||
to GDSII, which uses integer values for pixel coordinates.
|
||||
|
||||
:param recursive: Whether to call recursively on self's subpatterns. Default True.
|
||||
:param norm_value: Passed to shape.normalized_form(norm_value). Default 1e6 (see function
|
||||
Args:
|
||||
recursive: Whether to call recursively on self's subpatterns. Default `True`.
|
||||
norm_value: Passed to `shape.normalized_form(norm_value)`. Default `1e6` (see function
|
||||
note about GDSII)
|
||||
:param exclude_types: Shape types passed in this argument are always left untouched, for
|
||||
speed or convenience. Default: (Shapes.Polygon,)
|
||||
:return: self
|
||||
exclude_types: Shape types passed in this argument are always left untouched, for
|
||||
speed or convenience. Default: `(shapes.Polygon,)`
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
|
||||
if exclude_types is None:
|
||||
@ -226,14 +379,16 @@ class Pattern:
|
||||
|
||||
if recursive:
|
||||
for subpat in self.subpatterns:
|
||||
if subpat.pattern is None:
|
||||
continue
|
||||
subpat.pattern.subpatternize(recursive=True,
|
||||
norm_value=norm_value,
|
||||
exclude_types=exclude_types)
|
||||
|
||||
# Create a dict which uses the label tuple from .normalized_form() as a key, and which
|
||||
# stores (function_to_create_normalized_shape, [(index_in_shapes, values), ...]), where
|
||||
# values are the (offset, scale, rotation, dose) values as calculated by .normalized_form()
|
||||
shape_table = defaultdict(lambda: [None, list()])
|
||||
# Create a dict which uses the label tuple from `.normalized_form()` as a key, and which
|
||||
# stores `(function_to_create_normalized_shape, [(index_in_shapes, values), ...])`, where
|
||||
# values are the `(offset, scale, rotation, dose)` values as calculated by `.normalized_form()`
|
||||
shape_table: MutableMapping[Tuple, List] = defaultdict(lambda: [None, list()])
|
||||
for i, shape in enumerate(self.shapes):
|
||||
if not any((isinstance(shape, t) for t in exclude_types)):
|
||||
label, values, func = shape.normalized_form(norm_value)
|
||||
@ -241,9 +396,9 @@ class Pattern:
|
||||
shape_table[label][1].append((i, values))
|
||||
|
||||
# Iterate over the normalized shapes in the table. If any normalized shape occurs more than
|
||||
# once, create a Pattern holding a normalized shape object, and add self.subpatterns
|
||||
# once, create a `Pattern` holding a normalized shape object, and add `self.subpatterns`
|
||||
# entries for each occurrence in self. Also, note down that we should delete the
|
||||
# self.shapes entries for which we made SubPatterns.
|
||||
# `self.shapes` entries for which we made SubPatterns.
|
||||
shapes_to_remove = []
|
||||
for label in shape_table:
|
||||
if len(shape_table[label][1]) > 1:
|
||||
@ -251,9 +406,9 @@ class Pattern:
|
||||
pat = Pattern(shapes=[shape])
|
||||
|
||||
for i, values in shape_table[label][1]:
|
||||
(offset, scale, rotation, dose) = values
|
||||
(offset, scale, rotation, mirror_x, dose) = values
|
||||
subpat = SubPattern(pattern=pat, offset=offset, scale=scale,
|
||||
rotation=rotation, dose=dose)
|
||||
rotation=rotation, dose=dose, mirrored=(mirror_x, False))
|
||||
self.subpatterns.append(subpat)
|
||||
shapes_to_remove.append(i)
|
||||
|
||||
@ -267,85 +422,155 @@ class Pattern:
|
||||
"""
|
||||
Represents the pattern as a list of polygons.
|
||||
|
||||
Deep-copies the pattern, then calls .polygonize() and .flatten() on the copy in order to
|
||||
Deep-copies the pattern, then calls `.polygonize()` and `.flatten()` on the copy in order to
|
||||
generate the list of polygons.
|
||||
|
||||
:return: A list of (Ni, 2) numpy.ndarrays specifying vertices of the polygons. Each ndarray
|
||||
is of the form [[x0, y0], [x1, y1],...].
|
||||
Returns:
|
||||
A list of `(Ni, 2)` `numpy.ndarray`s specifying vertices of the polygons. Each ndarray
|
||||
is of the form `[[x0, y0], [x1, y1],...]`.
|
||||
"""
|
||||
pat = copy.deepcopy(self).polygonize().flatten()
|
||||
return [shape.vertices + shape.offset for shape in pat.shapes]
|
||||
pat = self.deepcopy().deepunlock().polygonize().flatten()
|
||||
return [shape.vertices + shape.offset for shape in pat.shapes] # type: ignore # mypy can't figure out that shapes are all Polygons now
|
||||
|
||||
@overload
|
||||
def referenced_patterns_by_id(self) -> Dict[int, 'Pattern']:
|
||||
pass
|
||||
|
||||
@overload
|
||||
def referenced_patterns_by_id(self, include_none: bool) -> Dict[int, Optional['Pattern']]:
|
||||
pass
|
||||
|
||||
def referenced_patterns_by_id(self,
|
||||
include_none: bool = False
|
||||
) -> Union[Dict[int, Optional['Pattern']],
|
||||
Dict[int, 'Pattern']]:
|
||||
|
||||
"""
|
||||
Create a dictionary of {id(pat): pat} for all Pattern objects referenced by this
|
||||
Create a dictionary with `{id(pat): pat}` for all Pattern objects referenced by this
|
||||
Pattern (operates recursively on all referenced Patterns as well)
|
||||
|
||||
:return: Dictionary of {id(pat): pat} for all referenced Pattern objects
|
||||
Args:
|
||||
include_none: If `True`, references to `None` will be included. Default `False`.
|
||||
|
||||
Returns:
|
||||
Dictionary with `{id(pat): pat}` for all referenced Pattern objects
|
||||
"""
|
||||
ids = {}
|
||||
ids: Dict[int, Optional['Pattern']] = {}
|
||||
for subpat in self.subpatterns:
|
||||
if id(subpat.pattern) not in ids:
|
||||
ids[id(subpat.pattern)] = subpat.pattern
|
||||
ids.update(subpat.pattern.referenced_patterns_by_id())
|
||||
if subpat.pattern is not None:
|
||||
ids[id(subpat.pattern)] = subpat.pattern
|
||||
ids.update(subpat.pattern.referenced_patterns_by_id())
|
||||
elif include_none:
|
||||
ids[id(subpat.pattern)] = subpat.pattern
|
||||
return ids
|
||||
|
||||
def referenced_patterns_by_name(self, **kwargs) -> List[Tuple[Optional[str], Optional['Pattern']]]:
|
||||
"""
|
||||
Create a list of `(pat.name, pat)` tuples for all Pattern objects referenced by this
|
||||
Pattern (operates recursively on all referenced Patterns as well).
|
||||
|
||||
Note that names are not necessarily unique, so a list of tuples is returned
|
||||
rather than a dict.
|
||||
|
||||
Args:
|
||||
**kwargs: passed to `referenced_patterns_by_id()`.
|
||||
|
||||
Returns:
|
||||
List of `(pat.name, pat)` tuples for all referenced Pattern objects
|
||||
"""
|
||||
pats_by_id = self.referenced_patterns_by_id(**kwargs)
|
||||
pat_list = [(p.name if p is not None else None, p) for p in pats_by_id.values()]
|
||||
return pat_list
|
||||
|
||||
def get_bounds(self) -> Union[numpy.ndarray, None]:
|
||||
"""
|
||||
Return a numpy.ndarray containing [[x_min, y_min], [x_max, y_max]], corresponding to the
|
||||
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
|
||||
extent of the Pattern's contents in each dimension.
|
||||
Returns None if the Pattern is empty.
|
||||
Returns `None` if the Pattern is empty.
|
||||
|
||||
:return: [[x_min, y_min], [x_max, y_max]] or None
|
||||
Returns:
|
||||
`[[x_min, y_min], [x_max, y_max]]` or `None`
|
||||
"""
|
||||
entries = self.shapes + self.subpatterns + self.labels
|
||||
if not entries:
|
||||
return None
|
||||
|
||||
init_bounds = entries[0].get_bounds()
|
||||
min_bounds = init_bounds[0, :]
|
||||
max_bounds = init_bounds[1, :]
|
||||
for entry in entries[1:]:
|
||||
min_bounds = numpy.array((+inf, +inf))
|
||||
max_bounds = numpy.array((-inf, -inf))
|
||||
for entry in entries:
|
||||
bounds = entry.get_bounds()
|
||||
if bounds is None:
|
||||
continue
|
||||
min_bounds = numpy.minimum(min_bounds, bounds[0, :])
|
||||
max_bounds = numpy.maximum(max_bounds, bounds[1, :])
|
||||
return numpy.vstack((min_bounds, max_bounds))
|
||||
if (max_bounds < min_bounds).any():
|
||||
return None
|
||||
else:
|
||||
return numpy.vstack((min_bounds, max_bounds))
|
||||
|
||||
def flatten(self) -> 'Pattern':
|
||||
"""
|
||||
Removes all subpatterns and adds equivalent shapes.
|
||||
|
||||
:return: self
|
||||
Shape identifiers are changed to represent their original position in the
|
||||
pattern hierarchy:
|
||||
`(L1_name (str), L1_index (int), L2_name, L2_index, ..., *original_shape_identifier)`
|
||||
where
|
||||
`L1_name` is the first-level subpattern's name (e.g. `self.subpatterns[0].pattern.name`),
|
||||
`L2_name` is the next-level subpattern's name (e.g.
|
||||
`self.subpatterns[0].pattern.subpatterns[0].pattern.name`) and
|
||||
`L1_index` is an integer used to differentiate between multiple instance ofi the same
|
||||
(or same-named) subpatterns.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
subpatterns = copy.deepcopy(self.subpatterns)
|
||||
self.subpatterns = []
|
||||
shape_counts: Dict[Tuple, int] = {}
|
||||
for subpat in subpatterns:
|
||||
if subpat.pattern is None:
|
||||
continue
|
||||
subpat.pattern.flatten()
|
||||
p = subpat.as_pattern()
|
||||
self.shapes += p.shapes
|
||||
self.labels += p.labels
|
||||
|
||||
# Update identifiers so each shape has a unique one
|
||||
for shape in p.shapes:
|
||||
combined_identifier = (subpat.pattern.name,) + shape.identifier
|
||||
shape_count = shape_counts.get(combined_identifier, 0)
|
||||
shape.identifier = (subpat.pattern.name, shape_count) + shape.identifier
|
||||
shape_counts[combined_identifier] = shape_count + 1
|
||||
|
||||
self.append(p)
|
||||
return self
|
||||
|
||||
def translate_elements(self, offset: vector2) -> 'Pattern':
|
||||
"""
|
||||
Translates all shapes, label, and subpatterns by the given offset.
|
||||
|
||||
:param offset: Offset to translate by
|
||||
:return: self
|
||||
Args:
|
||||
offset: (x, y) to translate by
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns + self.labels:
|
||||
entry.translate(offset)
|
||||
return self
|
||||
|
||||
def scale_elements(self, scale: float) -> 'Pattern':
|
||||
def scale_elements(self, c: float) -> 'Pattern':
|
||||
""""
|
||||
Scales all shapes and subpatterns by the given value.
|
||||
|
||||
:param scale: value to scale by
|
||||
:return: self
|
||||
Args:
|
||||
c: factor to scale by
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns:
|
||||
entry.scale(scale)
|
||||
entry.scale_by(c)
|
||||
return self
|
||||
|
||||
def scale_by(self, c: float) -> 'Pattern':
|
||||
@ -353,21 +578,29 @@ class Pattern:
|
||||
Scale this Pattern by the given value
|
||||
(all shapes and subpatterns and their offsets are scaled)
|
||||
|
||||
:param c: value to scale by
|
||||
:return: self
|
||||
Args:
|
||||
c: factor to scale by
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns:
|
||||
entry.offset *= c
|
||||
entry.scale_by(c)
|
||||
for label in self.labels:
|
||||
label.offset *= c
|
||||
return self
|
||||
|
||||
def rotate_around(self, pivot: vector2, rotation: float) -> 'Pattern':
|
||||
"""
|
||||
Rotate the Pattern around the a location.
|
||||
|
||||
:param pivot: Location to rotate around
|
||||
:param rotation: Angle to rotate by (counter-clockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
pivot: (x, y) location to rotate around
|
||||
rotation: Angle to rotate by (counter-clockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pivot = numpy.array(pivot)
|
||||
self.translate_elements(-pivot)
|
||||
@ -380,8 +613,11 @@ class Pattern:
|
||||
"""
|
||||
Rotate the offsets of all shapes, labels, and subpatterns around (0, 0)
|
||||
|
||||
:param rotation: Angle to rotate by (counter-clockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
rotation: Angle to rotate by (counter-clockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns + self.labels:
|
||||
entry.offset = numpy.dot(rotation_matrix_2d(rotation), entry.offset)
|
||||
@ -391,8 +627,11 @@ class Pattern:
|
||||
"""
|
||||
Rotate each shape and subpattern around its center (offset)
|
||||
|
||||
:param rotation: Angle to rotate by (counter-clockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
rotation: Angle to rotate by (counter-clockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns:
|
||||
entry.rotate(rotation)
|
||||
@ -402,8 +641,12 @@ class Pattern:
|
||||
"""
|
||||
Mirror the offsets of all shapes, labels, and subpatterns across an axis
|
||||
|
||||
:param axis: Axis to mirror across
|
||||
:return: self
|
||||
Args:
|
||||
axis: Axis to mirror across
|
||||
(0: mirror across x axis, 1: mirror across y axis)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns + self.labels:
|
||||
entry.offset[axis - 1] *= -1
|
||||
@ -412,10 +655,14 @@ class Pattern:
|
||||
def mirror_elements(self, axis: int) -> 'Pattern':
|
||||
"""
|
||||
Mirror each shape and subpattern across an axis, relative to its
|
||||
center (offset)
|
||||
offset
|
||||
|
||||
:param axis: Axis to mirror across
|
||||
:return: self
|
||||
Args:
|
||||
axis: Axis to mirror across
|
||||
(0: mirror across x axis, 1: mirror across y axis)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns:
|
||||
entry.mirror(axis)
|
||||
@ -425,87 +672,167 @@ class Pattern:
|
||||
"""
|
||||
Mirror the Pattern across an axis
|
||||
|
||||
:param axis: Axis to mirror across
|
||||
:return: self
|
||||
Args:
|
||||
axis: Axis to mirror across
|
||||
(0: mirror across x axis, 1: mirror across y axis)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.mirror_elements(axis)
|
||||
self.mirror_element_centers(axis)
|
||||
return self
|
||||
|
||||
def scale_element_doses(self, factor: float) -> 'Pattern':
|
||||
def scale_element_doses(self, c: float) -> 'Pattern':
|
||||
"""
|
||||
Multiply all shape and subpattern doses by a factor
|
||||
|
||||
:param factor: Factor to multiply doses by
|
||||
:return: self
|
||||
Args:
|
||||
c: Factor to multiply doses by
|
||||
|
||||
Return:
|
||||
self
|
||||
"""
|
||||
for entry in self.shapes + self.subpatterns:
|
||||
entry.dose *= factor
|
||||
entry.dose *= c
|
||||
return self
|
||||
|
||||
def copy(self) -> 'Pattern':
|
||||
"""
|
||||
Return a copy of the Pattern, deep-copying shapes and copying subpattern entries, but not
|
||||
deep-copying any referenced patterns.
|
||||
Return a copy of the Pattern, deep-copying shapes and copying subpattern
|
||||
entries, but not deep-copying any referenced patterns.
|
||||
|
||||
See also: Pattern.deepcopy()
|
||||
See also: `Pattern.deepcopy()`
|
||||
|
||||
:return: A copy of the current Pattern.
|
||||
Returns:
|
||||
A copy of the current Pattern.
|
||||
"""
|
||||
cp = copy.copy(self)
|
||||
cp.shapes = copy.deepcopy(cp.shapes)
|
||||
cp.labels = copy.deepcopy(cp.labels)
|
||||
cp.subpatterns = [copy.copy(subpat) for subpat in cp.subpatterns]
|
||||
return cp
|
||||
return copy.copy(self)
|
||||
|
||||
def deepcopy(self) -> 'Pattern':
|
||||
"""
|
||||
Convenience method for copy.deepcopy(pattern)
|
||||
Convenience method for `copy.deepcopy(pattern)`
|
||||
|
||||
:return: A deep copy of the current Pattern.
|
||||
Returns:
|
||||
A deep copy of the current Pattern.
|
||||
"""
|
||||
return copy.deepcopy(self)
|
||||
|
||||
def is_empty(self) -> bool:
|
||||
"""
|
||||
Returns:
|
||||
True if the pattern is contains no shapes, labels, or subpatterns.
|
||||
"""
|
||||
return (len(self.subpatterns) == 0 and
|
||||
len(self.shapes) == 0 and
|
||||
len(self.labels) == 0)
|
||||
|
||||
def lock(self) -> 'Pattern':
|
||||
"""
|
||||
Lock the pattern, raising an exception if it is modified.
|
||||
Also see `deeplock()`.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.shapes = tuple(self.shapes)
|
||||
self.labels = tuple(self.labels)
|
||||
self.subpatterns = tuple(self.subpatterns)
|
||||
object.__setattr__(self, 'locked', True)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'Pattern':
|
||||
"""
|
||||
Unlock the pattern
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.shapes = list(self.shapes)
|
||||
self.labels = list(self.labels)
|
||||
self.subpatterns = list(self.subpatterns)
|
||||
return self
|
||||
|
||||
def deeplock(self) -> 'Pattern':
|
||||
"""
|
||||
Recursively lock the pattern, all referenced shapes, subpatterns, and labels.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.lock()
|
||||
for ss in self.shapes + self.labels:
|
||||
ss.lock()
|
||||
for sp in self.subpatterns:
|
||||
sp.deeplock()
|
||||
return self
|
||||
|
||||
def deepunlock(self) -> 'Pattern':
|
||||
"""
|
||||
Recursively unlock the pattern, all referenced shapes, subpatterns, and labels.
|
||||
|
||||
This is dangerous unless you have just performed a deepcopy, since anything
|
||||
you change will be changed everywhere it is referenced!
|
||||
|
||||
Return:
|
||||
self
|
||||
"""
|
||||
self.unlock()
|
||||
for ss in self.shapes + self.labels:
|
||||
ss.unlock()
|
||||
for sp in self.subpatterns:
|
||||
sp.deepunlock()
|
||||
return self
|
||||
|
||||
@staticmethod
|
||||
def load(filename: str) -> 'Pattern':
|
||||
"""
|
||||
Load a Pattern from a file
|
||||
Load a Pattern from a file using pickle
|
||||
|
||||
:param filename: Filename to load from
|
||||
:return: Loaded Pattern
|
||||
Args:
|
||||
filename: Filename to load from
|
||||
|
||||
Returns:
|
||||
Loaded Pattern
|
||||
"""
|
||||
with open(filename, 'rb') as f:
|
||||
tmp_dict = pickle.load(f)
|
||||
pattern = pickle.load(f)
|
||||
|
||||
pattern = Pattern()
|
||||
pattern.__dict__.update(tmp_dict)
|
||||
return pattern
|
||||
|
||||
def save(self, filename: str) -> 'Pattern':
|
||||
"""
|
||||
Save the Pattern to a file
|
||||
Save the Pattern to a file using pickle
|
||||
|
||||
:param filename: Filename to save to
|
||||
:return: self
|
||||
Args:
|
||||
filename: Filename to save to
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
with open(filename, 'wb') as f:
|
||||
pickle.dump(self.__dict__, f, protocol=2)
|
||||
pickle.dump(self, f, protocol=pickle.HIGHEST_PROTOCOL)
|
||||
return self
|
||||
|
||||
def visualize(self,
|
||||
offset: vector2=(0., 0.),
|
||||
line_color: str='k',
|
||||
fill_color: str='none',
|
||||
overdraw: bool=False):
|
||||
offset: vector2 = (0., 0.),
|
||||
line_color: str = 'k',
|
||||
fill_color: str = 'none',
|
||||
overdraw: bool = False):
|
||||
"""
|
||||
Draw a picture of the Pattern and wait for the user to inspect it
|
||||
|
||||
Imports matplotlib.
|
||||
Imports `matplotlib`.
|
||||
|
||||
:param offset: Coordinates to offset by before drawing
|
||||
:param line_color: Outlines are drawn with this color (passed to matplotlib PolyCollection)
|
||||
:param fill_color: Interiors are drawn with this color (passed to matplotlib PolyCollection)
|
||||
:param overdraw: Whether to create a new figure or draw on a pre-existing one
|
||||
Note that this can be slow; it is often faster to export to GDSII and use
|
||||
klayout or a different GDS viewer!
|
||||
|
||||
Args:
|
||||
offset: Coordinates to offset by before drawing
|
||||
line_color: Outlines are drawn with this color (passed to `matplotlib.collections.PolyCollection`)
|
||||
fill_color: Interiors are drawn with this color (passed to `matplotlib.collections.PolyCollection`)
|
||||
overdraw: Whether to create a new figure or draw on a pre-existing one
|
||||
"""
|
||||
# TODO: add text labels to visualize()
|
||||
from matplotlib import pyplot
|
||||
@ -537,3 +864,39 @@ class Pattern:
|
||||
|
||||
if not overdraw:
|
||||
pyplot.show()
|
||||
|
||||
@staticmethod
|
||||
def find_toplevel(patterns: Iterable['Pattern']) -> List['Pattern']:
|
||||
"""
|
||||
Given a list of Pattern objects, return those that are not referenced by
|
||||
any other pattern.
|
||||
|
||||
Args:
|
||||
patterns: A list of patterns to filter.
|
||||
|
||||
Returns:
|
||||
A filtered list in which no pattern is referenced by any other pattern.
|
||||
"""
|
||||
def get_children(pat: Pattern, memo: Set) -> Set:
|
||||
if pat in memo:
|
||||
return memo
|
||||
|
||||
children = set(sp.pattern for sp in pat.subpatterns if sp.pattern is not None)
|
||||
new_children = children - memo
|
||||
memo |= children
|
||||
|
||||
for child_pat in new_children:
|
||||
memo |= get_children(child_pat, memo)
|
||||
return memo
|
||||
|
||||
patterns = set(patterns)
|
||||
not_toplevel: Set['Pattern'] = set()
|
||||
for pattern in patterns:
|
||||
not_toplevel |= get_children(pattern, not_toplevel)
|
||||
|
||||
toplevel = list(patterns - not_toplevel)
|
||||
return toplevel
|
||||
|
||||
def __repr__(self) -> str:
|
||||
locked = ' L' if self.locked else ''
|
||||
return (f'<Pattern "{self.name}": sh{len(self.shapes)} sp{len(self.subpatterns)} la{len(self.labels)}{locked}>')
|
||||
|
81
masque/positionable.py
Normal file
81
masque/positionable.py
Normal file
@ -0,0 +1,81 @@
|
||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import copy
|
||||
import numpy
|
||||
|
||||
from ..error import PatternError, PatternLockedError
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t
|
||||
|
||||
|
||||
T = TypeVar('T', bound='Positionable')
|
||||
|
||||
|
||||
class Positionable(metaclass=ABCMeta):
|
||||
"""
|
||||
Abstract class for all positionable entities
|
||||
"""
|
||||
__slots__ = ('_offset',)
|
||||
|
||||
_offset: numpy.ndarray
|
||||
""" `[x_offset, y_offset]` """
|
||||
|
||||
# --- Abstract methods
|
||||
@abstractmethod
|
||||
def get_bounds(self) -> numpy.ndarray:
|
||||
"""
|
||||
Returns `[[x_min, y_min], [x_max, y_max]]` which specify a minimal bounding box for the entity.
|
||||
"""
|
||||
pass
|
||||
|
||||
# ---- Non-abstract properties
|
||||
# offset property
|
||||
@property
|
||||
def offset(self) -> numpy.ndarray:
|
||||
"""
|
||||
[x, y] offset
|
||||
"""
|
||||
return self._offset
|
||||
|
||||
@offset.setter
|
||||
def offset(self, val: vector2):
|
||||
if not isinstance(val, numpy.ndarray):
|
||||
val = numpy.array(val, dtype=float)
|
||||
|
||||
if val.size != 2:
|
||||
raise PatternError('Offset must be convertible to size-2 ndarray')
|
||||
self._offset = val.flatten()
|
||||
|
||||
|
||||
# ---- Non-abstract methods
|
||||
def translate(self: T, offset: vector2) -> T:
|
||||
"""
|
||||
Translate the entity by the given offset
|
||||
|
||||
Args:
|
||||
offset: [x_offset, y,offset]
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset += offset
|
||||
return self
|
||||
|
||||
def lock(self: T) -> T:
|
||||
"""
|
||||
Lock the entity, disallowing further changes
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = False
|
||||
return self
|
||||
|
||||
def unlock(self: T) -> T:
|
||||
"""
|
||||
Unlock the entity
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = True
|
||||
return self
|
@ -3,63 +3,140 @@
|
||||
instances of a Pattern in the same parent Pattern.
|
||||
"""
|
||||
|
||||
from typing import Union, List
|
||||
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any
|
||||
import copy
|
||||
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from .error import PatternError
|
||||
from .error import PatternError, PatternLockedError
|
||||
from .utils import is_scalar, rotation_matrix_2d, vector2
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
if TYPE_CHECKING:
|
||||
from . import Pattern
|
||||
|
||||
|
||||
# TODO need top-level comment about what order rotation/scale/offset/mirror/array are applied
|
||||
|
||||
class GridRepetition:
|
||||
"""
|
||||
GridRepetition provides support for efficiently embedding multiple copies of a Pattern
|
||||
into another Pattern at regularly-spaced offsets.
|
||||
GridRepetition provides support for efficiently embedding multiple copies of a `Pattern`
|
||||
into another `Pattern` at regularly-spaced offsets.
|
||||
|
||||
Note that rotation, scaling, and mirroring are applied to individual instances of the
|
||||
pattern, not to the grid vectors.
|
||||
|
||||
The order of operations is
|
||||
1. A single refernce instance to the target pattern is mirrored
|
||||
2. The single instance is rotated.
|
||||
3. The instance is scaled by the scaling factor.
|
||||
4. The instance is shifted by the provided offset
|
||||
(no mirroring/scaling/rotation is applied to the offset).
|
||||
5. Additional copies of the instance will appear at coordinates specified by
|
||||
`(offset + aa * a_vector + bb * b_vector)`, with `aa in range(0, a_count)`
|
||||
and `bb in range(0, b_count)`. All instance locations remain unaffected by
|
||||
mirroring/scaling/rotation, though each instance's data will be transformed
|
||||
relative to the instance's location (i.e. relative to the contained pattern's
|
||||
(0, 0) point).
|
||||
"""
|
||||
__slots__ = ('_pattern',
|
||||
'_offset',
|
||||
'_rotation',
|
||||
'_dose',
|
||||
'_scale',
|
||||
'_mirrored',
|
||||
'_a_vector',
|
||||
'_b_vector',
|
||||
'_a_count',
|
||||
'_b_count',
|
||||
'identifier',
|
||||
'locked')
|
||||
|
||||
_pattern: Optional['Pattern']
|
||||
""" The `Pattern` being instanced """
|
||||
|
||||
_offset: numpy.ndarray
|
||||
""" (x, y) offset for the base instance """
|
||||
|
||||
_dose: float
|
||||
""" Scaling factor applied to the dose """
|
||||
|
||||
_rotation: float
|
||||
""" Rotation of the individual instances in the grid (not the grid vectors).
|
||||
Radians, counterclockwise.
|
||||
"""
|
||||
|
||||
pattern = None # type: Pattern
|
||||
_scale: float
|
||||
""" Scaling factor applied to individual instances in the grid (not the grid vectors) """
|
||||
|
||||
_offset = (0.0, 0.0) # type: numpy.ndarray
|
||||
_rotation = 0.0 # type: float
|
||||
_dose = 1.0 # type: float
|
||||
_scale = 1.0 # type: float
|
||||
_mirrored = None # type: List[bool]
|
||||
_mirrored: numpy.ndarray # ndarray[bool]
|
||||
""" Whether to mirror individual instances across the x and y axes
|
||||
(Applies to individual instances in the grid, not the grid vectors)
|
||||
"""
|
||||
|
||||
_a_vector = None # type: numpy.ndarray
|
||||
_b_vector = None # type: numpy.ndarray
|
||||
a_count = None # type: int
|
||||
b_count = 1 # type: int
|
||||
_a_vector: numpy.ndarray
|
||||
""" Vector `[x, y]` specifying the first lattice vector of the grid.
|
||||
Specifies center-to-center spacing between adjacent elements.
|
||||
"""
|
||||
|
||||
_a_count: int
|
||||
""" Number of instances along the direction specified by the `a_vector` """
|
||||
|
||||
_b_vector: Optional[numpy.ndarray]
|
||||
""" Vector `[x, y]` specifying a second lattice vector for the grid.
|
||||
Specifies center-to-center spacing between adjacent elements.
|
||||
Can be `None` for a 1D array.
|
||||
"""
|
||||
|
||||
_b_count: int
|
||||
""" Number of instances along the direction specified by the `b_vector` """
|
||||
|
||||
identifier: Tuple[Any, ...]
|
||||
""" Arbitrary identifier, used internally by some `masque` functions. """
|
||||
|
||||
locked: bool
|
||||
""" If `True`, disallows changes to the GridRepetition """
|
||||
|
||||
def __init__(self,
|
||||
pattern: 'Pattern',
|
||||
pattern: Optional['Pattern'],
|
||||
a_vector: numpy.ndarray,
|
||||
a_count: int,
|
||||
b_vector: numpy.ndarray = None,
|
||||
b_count: int = 1,
|
||||
b_vector: Optional[numpy.ndarray] = None,
|
||||
b_count: Optional[int] = 1,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
mirrored: List[bool] = None,
|
||||
mirrored: Optional[Sequence[bool]] = None,
|
||||
dose: float = 1.0,
|
||||
scale: float = 1.0):
|
||||
scale: float = 1.0,
|
||||
locked: bool = False,
|
||||
identifier: Tuple[Any, ...] = ()):
|
||||
"""
|
||||
:param a_vector: First lattice vector, of the form [x, y].
|
||||
Specifies center-to-center spacing between adjacent elements.
|
||||
:param a_count: Number of elements in the a_vector direction.
|
||||
:param b_vector: Second lattice vector, of the form [x, y].
|
||||
Specifies center-to-center spacing between adjacent elements.
|
||||
Can be omitted when specifying a 1D array.
|
||||
:param b_count: Number of elements in the b_vector direction.
|
||||
Should be omitted if b_vector was omitted.
|
||||
:raises: InvalidDataError if b_* inputs conflict with each other
|
||||
or a_count < 1.
|
||||
Args:
|
||||
pattern: Pattern to reference.
|
||||
a_vector: First lattice vector, of the form `[x, y]`.
|
||||
Specifies center-to-center spacing between adjacent instances.
|
||||
a_count: Number of elements in the a_vector direction.
|
||||
b_vector: Second lattice vector, of the form `[x, y]`.
|
||||
Specifies center-to-center spacing between adjacent instances.
|
||||
Can be omitted when specifying a 1D array.
|
||||
b_count: Number of elements in the `b_vector` direction.
|
||||
Should be omitted if `b_vector` was omitted.
|
||||
offset: (x, y) offset applied to all instances.
|
||||
rotation: Rotation (radians, counterclockwise) applied to each instance.
|
||||
Relative to each instance's (0, 0).
|
||||
mirrored: Whether to mirror individual instances across the x and y axes.
|
||||
dose: Scaling factor applied to the dose.
|
||||
scale: Scaling factor applied to the instances' geometry.
|
||||
locked: Whether the `GridRepetition` is locked after initialization.
|
||||
identifier: Arbitrary tuple, used internally by some `masque` functions.
|
||||
|
||||
Raises:
|
||||
PatternError if `b_*` inputs conflict with each other
|
||||
or `a_count < 1`.
|
||||
"""
|
||||
if b_count is None:
|
||||
b_count = 1
|
||||
|
||||
if b_vector is None:
|
||||
if b_count > 1:
|
||||
raise PatternError('Repetition has b_count > 1 but no b_vector')
|
||||
@ -67,16 +144,19 @@ class GridRepetition:
|
||||
b_vector = numpy.array([0.0, 0.0])
|
||||
|
||||
if a_count < 1:
|
||||
raise InvalidDataError('Repetition has too-small a_count: '
|
||||
'{}'.format(a_count))
|
||||
raise PatternError('Repetition has too-small a_count: '
|
||||
'{}'.format(a_count))
|
||||
if b_count < 1:
|
||||
raise InvalidDataError('Repetition has too-small b_count: '
|
||||
'{}'.format(b_count))
|
||||
raise PatternError('Repetition has too-small b_count: '
|
||||
'{}'.format(b_count))
|
||||
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.a_vector = a_vector
|
||||
self.b_vector = b_vector
|
||||
self.a_count = a_count
|
||||
self.b_count = b_count
|
||||
|
||||
self.identifier = identifier
|
||||
self.pattern = pattern
|
||||
self.offset = offset
|
||||
self.rotation = rotation
|
||||
@ -85,6 +165,45 @@ class GridRepetition:
|
||||
if mirrored is None:
|
||||
mirrored = [False, False]
|
||||
self.mirrored = mirrored
|
||||
self.locked = locked
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if self.locked and name != 'locked':
|
||||
raise PatternLockedError()
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
def __copy__(self) -> 'GridRepetition':
|
||||
new = GridRepetition(pattern=self.pattern,
|
||||
a_vector=self.a_vector.copy(),
|
||||
b_vector=copy.copy(self.b_vector),
|
||||
a_count=self.a_count,
|
||||
b_count=self.b_count,
|
||||
offset=self.offset.copy(),
|
||||
rotation=self.rotation,
|
||||
dose=self.dose,
|
||||
scale=self.scale,
|
||||
mirrored=self.mirrored.copy(),
|
||||
locked=self.locked)
|
||||
return new
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'GridRepetition':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new.pattern = copy.deepcopy(self.pattern, memo)
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
# pattern property
|
||||
@property
|
||||
def pattern(self) -> Optional['Pattern']:
|
||||
return self._pattern
|
||||
|
||||
@pattern.setter
|
||||
def pattern(self, val: Optional['Pattern']):
|
||||
from .pattern import Pattern
|
||||
if val is not None and not isinstance(val, Pattern):
|
||||
raise PatternError('Provided pattern {} is not a Pattern object or None!'.format(val))
|
||||
self._pattern = val
|
||||
|
||||
# offset property
|
||||
@property
|
||||
@ -93,6 +212,9 @@ class GridRepetition:
|
||||
|
||||
@offset.setter
|
||||
def offset(self, val: vector2):
|
||||
if self.locked:
|
||||
raise PatternLockedError()
|
||||
|
||||
if not isinstance(val, numpy.ndarray):
|
||||
val = numpy.array(val, dtype=float)
|
||||
|
||||
@ -139,14 +261,14 @@ class GridRepetition:
|
||||
|
||||
# Mirrored property
|
||||
@property
|
||||
def mirrored(self) -> List[bool]:
|
||||
def mirrored(self) -> numpy.ndarray: # ndarray[bool]
|
||||
return self._mirrored
|
||||
|
||||
@mirrored.setter
|
||||
def mirrored(self, val: List[bool]):
|
||||
def mirrored(self, val: Sequence[bool]):
|
||||
if is_scalar(val):
|
||||
raise PatternError('Mirrored must be a 2-element list of booleans')
|
||||
self._mirrored = val
|
||||
self._mirrored = numpy.array(val, dtype=bool, copy=True)
|
||||
|
||||
# a_vector property
|
||||
@property
|
||||
@ -160,7 +282,7 @@ class GridRepetition:
|
||||
|
||||
if val.size != 2:
|
||||
raise PatternError('a_vector must be convertible to size-2 ndarray')
|
||||
self._a_vector = val.flatten()
|
||||
self._a_vector = val.flatten().astype(float)
|
||||
|
||||
# b_vector property
|
||||
@property
|
||||
@ -170,30 +292,50 @@ class GridRepetition:
|
||||
@b_vector.setter
|
||||
def b_vector(self, val: vector2):
|
||||
if not isinstance(val, numpy.ndarray):
|
||||
val = numpy.array(val, dtype=float)
|
||||
val = numpy.array(val, dtype=float, copy=True)
|
||||
|
||||
if val.size != 2:
|
||||
raise PatternError('b_vector must be convertible to size-2 ndarray')
|
||||
self._b_vector = val.flatten()
|
||||
|
||||
# a_count property
|
||||
@property
|
||||
def a_count(self) -> int:
|
||||
return self._a_count
|
||||
|
||||
@a_count.setter
|
||||
def a_count(self, val: int):
|
||||
if val != int(val):
|
||||
raise PatternError('a_count must be convertable to an int!')
|
||||
self._a_count = int(val)
|
||||
|
||||
# b_count property
|
||||
@property
|
||||
def b_count(self) -> int:
|
||||
return self._b_count
|
||||
|
||||
@b_count.setter
|
||||
def b_count(self, val: int):
|
||||
if val != int(val):
|
||||
raise PatternError('b_count must be convertable to an int!')
|
||||
self._b_count = int(val)
|
||||
|
||||
def as_pattern(self) -> 'Pattern':
|
||||
"""
|
||||
Returns a copy of self.pattern which has been scaled, rotated, etc. according to this
|
||||
SubPattern's properties.
|
||||
:return: Copy of self.pattern that has been altered to reflect the SubPattern's properties.
|
||||
"""
|
||||
#xy = numpy.array(element.xy)
|
||||
#origin = xy[0]
|
||||
#col_spacing = (xy[1] - origin) / element.cols
|
||||
#row_spacing = (xy[2] - origin) / element.rows
|
||||
Returns a copy of self.pattern which has been scaled, rotated, repeated, etc.
|
||||
etc. according to this `GridRepetition`'s properties.
|
||||
|
||||
Returns:
|
||||
A copy of self.pattern which has been scaled, rotated, repeated, etc.
|
||||
etc. according to this `GridRepetition`'s properties.
|
||||
"""
|
||||
assert(self.pattern is not None)
|
||||
patterns = []
|
||||
|
||||
for a in range(self.a_count):
|
||||
for b in range(self.b_count):
|
||||
offset = a * self.a_vector + b * self.b_vector
|
||||
newPat = self.pattern.deepcopy()
|
||||
newPat = self.pattern.deepcopy().deepunlock()
|
||||
newPat.translate_elements(offset)
|
||||
patterns.append(newPat)
|
||||
|
||||
@ -213,19 +355,25 @@ class GridRepetition:
|
||||
"""
|
||||
Translate by the given offset
|
||||
|
||||
:param offset: Translate by this offset
|
||||
:return: self
|
||||
Args:
|
||||
offset: `[x, y]` to translate by
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset += offset
|
||||
return self
|
||||
|
||||
def rotate_around(self, pivot: vector2, rotation: float) -> 'GridRepetition':
|
||||
"""
|
||||
Rotate around a point
|
||||
Rotate the array around a point
|
||||
|
||||
:param pivot: Point to rotate around
|
||||
:param rotation: Angle to rotate by (counterclockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
pivot: Point `[x, y]` to rotate around
|
||||
rotation: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pivot = numpy.array(pivot, dtype=float)
|
||||
self.translate(-pivot)
|
||||
@ -238,37 +386,101 @@ class GridRepetition:
|
||||
"""
|
||||
Rotate around (0, 0)
|
||||
|
||||
:param rotation: Angle to rotate by (counterclockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
rotation: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.rotate_elements(rotation)
|
||||
self.a_vector = numpy.dot(rotation_matrix_2d(rotation), self.a_vector)
|
||||
if self.b_vector is not None:
|
||||
self.b_vector = numpy.dot(rotation_matrix_2d(rotation), self.b_vector)
|
||||
return self
|
||||
|
||||
def rotate_elements(self, rotation: float) -> 'GridRepetition':
|
||||
"""
|
||||
Rotate each element around its origin
|
||||
|
||||
Args:
|
||||
rotation: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.rotation += rotation
|
||||
return self
|
||||
|
||||
def mirror(self, axis: int) -> 'GridRepetition':
|
||||
"""
|
||||
Mirror the subpattern across an axis.
|
||||
Mirror the GridRepetition across an axis.
|
||||
|
||||
:param axis: Axis to mirror across.
|
||||
:return: self
|
||||
Args:
|
||||
axis: Axis to mirror across.
|
||||
(0: mirror across x-axis, 1: mirror across y-axis)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.mirrored[axis] = not self.mirrored[axis]
|
||||
self.mirror_elements(axis)
|
||||
self.a_vector[1-axis] *= -1
|
||||
if self.b_vector is not None:
|
||||
self.b_vector[1-axis] *= -1
|
||||
return self
|
||||
|
||||
def get_bounds(self) -> numpy.ndarray or None:
|
||||
def mirror_elements(self, axis: int) -> 'GridRepetition':
|
||||
"""
|
||||
Return a numpy.ndarray containing [[x_min, y_min], [x_max, y_max]], corresponding to the
|
||||
extent of the SubPattern in each dimension.
|
||||
Returns None if the contained Pattern is empty.
|
||||
Mirror each element across an axis relative to its origin.
|
||||
|
||||
:return: [[x_min, y_min], [x_max, y_max]] or None
|
||||
Args:
|
||||
axis: Axis to mirror across.
|
||||
(0: mirror across x-axis, 1: mirror across y-axis)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.mirrored[axis] = not self.mirrored[axis]
|
||||
self.rotation *= -1
|
||||
return self
|
||||
|
||||
def get_bounds(self) -> Optional[numpy.ndarray]:
|
||||
"""
|
||||
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
|
||||
extent of the `GridRepetition` in each dimension.
|
||||
Returns `None` if the contained `Pattern` is empty.
|
||||
|
||||
Returns:
|
||||
`[[x_min, y_min], [x_max, y_max]]` or `None`
|
||||
"""
|
||||
if self.pattern is None:
|
||||
return None
|
||||
return self.as_pattern().get_bounds()
|
||||
|
||||
def scale_by(self, c: float) -> 'GridRepetition':
|
||||
"""
|
||||
Scale the subpattern by a factor
|
||||
Scale the GridRepetition by a factor
|
||||
|
||||
:param c: scaling factor
|
||||
Args:
|
||||
c: scaling factor
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.scale_elements_by(c)
|
||||
self.a_vector *= c
|
||||
if self.b_vector is not None:
|
||||
self.b_vector *= c
|
||||
return self
|
||||
|
||||
def scale_elements_by(self, c: float) -> 'GridRepetition':
|
||||
"""
|
||||
Scale each element by a factor
|
||||
|
||||
Args:
|
||||
c: scaling factor
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.scale *= c
|
||||
return self
|
||||
@ -277,15 +489,84 @@ class GridRepetition:
|
||||
"""
|
||||
Return a shallow copy of the repetition.
|
||||
|
||||
:return: copy.copy(self)
|
||||
Returns:
|
||||
`copy.copy(self)`
|
||||
"""
|
||||
return copy.copy(self)
|
||||
|
||||
def deepcopy(self) -> 'SubPattern':
|
||||
def deepcopy(self) -> 'GridRepetition':
|
||||
"""
|
||||
Return a deep copy of the repetition.
|
||||
|
||||
:return: copy.copy(self)
|
||||
Returns:
|
||||
`copy.deepcopy(self)`
|
||||
"""
|
||||
return copy.deepcopy(self)
|
||||
|
||||
def lock(self) -> 'GridRepetition':
|
||||
"""
|
||||
Lock the `GridRepetition`, disallowing changes.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = False
|
||||
self.a_vector.flags.writeable = False
|
||||
self.mirrored.flags.writeable = False
|
||||
if self.b_vector is not None:
|
||||
self.b_vector.flags.writeable = False
|
||||
object.__setattr__(self, 'locked', True)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'GridRepetition':
|
||||
"""
|
||||
Unlock the `GridRepetition`
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = True
|
||||
self.a_vector.flags.writeable = True
|
||||
self.mirrored.flags.writeable = True
|
||||
if self.b_vector is not None:
|
||||
self.b_vector.flags.writeable = True
|
||||
object.__setattr__(self, 'locked', False)
|
||||
return self
|
||||
|
||||
def deeplock(self) -> 'GridRepetition':
|
||||
"""
|
||||
Recursively lock the `GridRepetition` and its contained pattern
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
assert(self.pattern is not None)
|
||||
self.lock()
|
||||
self.pattern.deeplock()
|
||||
return self
|
||||
|
||||
def deepunlock(self) -> 'GridRepetition':
|
||||
"""
|
||||
Recursively unlock the `GridRepetition` and its contained pattern
|
||||
|
||||
This is dangerous unless you have just performed a deepcopy, since
|
||||
the component parts may be reused elsewhere.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
assert(self.pattern is not None)
|
||||
self.unlock()
|
||||
self.pattern.deepunlock()
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
name = self.pattern.name if self.pattern is not None else None
|
||||
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
|
||||
scale = f' d{self.scale:g}' if self.scale != 1 else ''
|
||||
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
bv = f', {self.b_vector}' if self.b_vector is not None else ''
|
||||
return (f'<GridRepetition "{name}" at {self.offset} {rotation}{scale}{mirrored}{dose}'
|
||||
f' {self.a_count}x{self.b_count} ({self.a_vector}{bv}){locked}>')
|
||||
|
@ -10,3 +10,4 @@ from .circle import Circle
|
||||
from .ellipse import Ellipse
|
||||
from .arc import Arc
|
||||
from .text import Text
|
||||
from .path import Path
|
||||
|
@ -1,14 +1,12 @@
|
||||
from typing import List
|
||||
from typing import List, Tuple, Dict, Optional, Sequence
|
||||
import copy
|
||||
import math
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
|
||||
from .. import PatternError
|
||||
from ..utils import is_scalar, vector2
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
from ..utils import is_scalar, vector2, layer_t
|
||||
|
||||
|
||||
class Arc(Shape):
|
||||
@ -20,23 +18,31 @@ class Arc(Shape):
|
||||
The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius.
|
||||
The start and stop angle are measured counterclockwise from the first (x) radius.
|
||||
"""
|
||||
__slots__ = ('_radii', '_angles', '_width', '_rotation',
|
||||
'poly_num_points', 'poly_max_arclen')
|
||||
_radii: numpy.ndarray
|
||||
""" Two radii for defining an ellipse """
|
||||
|
||||
_radii = None # type: numpy.ndarray
|
||||
_angles = None # type: numpy.ndarray
|
||||
_width = 1.0 # type: float
|
||||
_rotation = 0.0 # type: float
|
||||
_rotation: float
|
||||
""" Rotation (ccw, radians) from the x axis to the first radius """
|
||||
|
||||
# Defaults for to_polygons
|
||||
poly_num_points = DEFAULT_POLY_NUM_POINTS # type: int
|
||||
poly_max_arclen = None # type: float
|
||||
_angles: numpy.ndarray
|
||||
""" Start and stop angles (ccw, radians) for choosing an arc from the ellipse, measured from the first radius """
|
||||
|
||||
_width: float
|
||||
""" Width of the arc """
|
||||
|
||||
poly_num_points: Optional[int]
|
||||
""" Sets the default number of points for `.polygonize()` """
|
||||
|
||||
poly_max_arclen: Optional[float]
|
||||
""" Sets the default max segement length for `.polygonize()` """
|
||||
|
||||
# radius properties
|
||||
@property
|
||||
def radii(self) -> numpy.ndarray:
|
||||
"""
|
||||
Return the radii [rx, ry]
|
||||
|
||||
:return: [rx, ry]
|
||||
Return the radii `[rx, ry]`
|
||||
"""
|
||||
return self._radii
|
||||
|
||||
@ -71,12 +77,13 @@ class Arc(Shape):
|
||||
|
||||
# arc start/stop angle properties
|
||||
@property
|
||||
def angles(self) -> vector2:
|
||||
def angles(self) -> numpy.ndarray: #ndarray[float]
|
||||
"""
|
||||
Return the start and stop angles [a_start, a_stop].
|
||||
Return the start and stop angles `[a_start, a_stop]`.
|
||||
Angles are measured from x-axis after rotation
|
||||
|
||||
:return: [a_start, a_stop]
|
||||
Returns:
|
||||
`[a_start, a_stop]`
|
||||
"""
|
||||
return self._angles
|
||||
|
||||
@ -109,7 +116,8 @@ class Arc(Shape):
|
||||
"""
|
||||
Rotation of radius_x from x_axis, counterclockwise, in radians. Stored mod 2*pi
|
||||
|
||||
:return: rotation counterclockwise in radians
|
||||
Returns:
|
||||
rotation counterclockwise in radians
|
||||
"""
|
||||
return self._rotation
|
||||
|
||||
@ -125,7 +133,8 @@ class Arc(Shape):
|
||||
"""
|
||||
Width of the arc (difference between inner and outer radii)
|
||||
|
||||
:return: width
|
||||
Returns:
|
||||
width
|
||||
"""
|
||||
return self._width
|
||||
|
||||
@ -141,13 +150,16 @@ class Arc(Shape):
|
||||
radii: vector2,
|
||||
angles: vector2,
|
||||
width: float,
|
||||
poly_num_points: int=DEFAULT_POLY_NUM_POINTS,
|
||||
poly_max_arclen: float=None,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
rotation: float=0,
|
||||
mirrored: Tuple[bool] = (False, False),
|
||||
layer: int=0,
|
||||
dose: float=1.0):
|
||||
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0,
|
||||
mirrored: Sequence[bool] = (False, False),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
locked: bool = False):
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.identifier = ()
|
||||
self.radii = radii
|
||||
self.angles = angles
|
||||
self.width = width
|
||||
@ -158,8 +170,21 @@ class Arc(Shape):
|
||||
self.dose = dose
|
||||
self.poly_num_points = poly_num_points
|
||||
self.poly_max_arclen = poly_max_arclen
|
||||
self.locked = locked
|
||||
|
||||
def to_polygons(self, poly_num_points: int=None, poly_max_arclen: float=None) -> List[Polygon]:
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Arc':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new._offset = self._offset.copy()
|
||||
new._radii = self._radii.copy()
|
||||
new._angles = self._angles.copy()
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
def to_polygons(self,
|
||||
poly_num_points: Optional[int] = None,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
) -> List[Polygon]:
|
||||
if poly_num_points is None:
|
||||
poly_num_points = self.poly_num_points
|
||||
if poly_max_arclen is None:
|
||||
@ -209,12 +234,12 @@ class Arc(Shape):
|
||||
def get_bounds(self) -> numpy.ndarray:
|
||||
'''
|
||||
Equation for rotated ellipse is
|
||||
x = x0 + a * cos(t) * cos(rot) - b * sin(t) * sin(phi)
|
||||
y = y0 + a * cos(t) * sin(rot) + b * sin(t) * cos(rot)
|
||||
where t is our parameter.
|
||||
`x = x0 + a * cos(t) * cos(rot) - b * sin(t) * sin(phi)`
|
||||
`y = y0 + a * cos(t) * sin(rot) + b * sin(t) * cos(rot)`
|
||||
where `t` is our parameter.
|
||||
|
||||
Differentiating and solving for 0 slope wrt. t, we find
|
||||
tan(t) = -+ b/a cot(phi)
|
||||
Differentiating and solving for 0 slope wrt. `t`, we find
|
||||
`tan(t) = -+ b/a cot(phi)`
|
||||
where -+ is for x, y cases, so that's where the extrema are.
|
||||
|
||||
If the extrema are innaccessible due to arc constraints, check the arc endpoints instead.
|
||||
@ -308,13 +333,16 @@ class Arc(Shape):
|
||||
width = self.width
|
||||
|
||||
return (type(self), radii, angles, width/norm_value, self.layer), \
|
||||
(self.offset, scale/norm_value, rotation, self.dose), \
|
||||
(self.offset, scale/norm_value, rotation, False, self.dose), \
|
||||
lambda: Arc(radii=radii*norm_value, angles=angles, width=width*norm_value, layer=self.layer)
|
||||
|
||||
def get_cap_edges(self) -> numpy.ndarray:
|
||||
'''
|
||||
:returns: [[[x0, y0], [x1, y1]], array of 4 points, specifying the two cuts which
|
||||
[[x2, y2], [x3, y3]]], would create this arc from its corresponding ellipse.
|
||||
Returns:
|
||||
```
|
||||
[[[x0, y0], [x1, y1]], array of 4 points, specifying the two cuts which
|
||||
[[x2, y2], [x3, y3]]], would create this arc from its corresponding ellipse.
|
||||
```
|
||||
'''
|
||||
a_ranges = self._angles_to_parameters()
|
||||
|
||||
@ -340,8 +368,9 @@ class Arc(Shape):
|
||||
|
||||
def _angles_to_parameters(self) -> numpy.ndarray:
|
||||
'''
|
||||
:return: "Eccentric anomaly" parameter ranges for the inner and outer edges, in the form
|
||||
[[a_min_inner, a_max_inner], [a_min_outer, a_max_outer]]
|
||||
Returns:
|
||||
"Eccentric anomaly" parameter ranges for the inner and outer edges, in the form
|
||||
`[[a_min_inner, a_max_inner], [a_min_outer, a_max_outer]]`
|
||||
'''
|
||||
a = []
|
||||
for sgn in (-1, +1):
|
||||
@ -357,3 +386,22 @@ class Arc(Shape):
|
||||
|
||||
a.append((a0, a1))
|
||||
return numpy.array(a)
|
||||
|
||||
def lock(self) -> 'Arc':
|
||||
self.radii.flags.writeable = False
|
||||
self.angles.flags.writeable = False
|
||||
Shape.lock(self)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'Arc':
|
||||
Shape.unlock(self)
|
||||
self.radii.flags.writeable = True
|
||||
self.angles.flags.writeable = True
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
angles = f' a°{self.angles*180/pi}'
|
||||
rotation = f' r°{self.rotation*180/pi:g}' if self.rotation != 0 else ''
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>'
|
||||
|
@ -1,33 +1,32 @@
|
||||
from typing import List
|
||||
from typing import List, Dict, Optional
|
||||
import copy
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
|
||||
from .. import PatternError
|
||||
from ..utils import is_scalar, vector2
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
from ..utils import is_scalar, vector2, layer_t
|
||||
|
||||
|
||||
class Circle(Shape):
|
||||
"""
|
||||
A circle, which has a position and radius.
|
||||
"""
|
||||
__slots__ = ('_radius', 'poly_num_points', 'poly_max_arclen')
|
||||
_radius: float
|
||||
""" Circle radius """
|
||||
|
||||
_radius = None # type: float
|
||||
poly_num_points: Optional[int]
|
||||
""" Sets the default number of points for `.polygonize()` """
|
||||
|
||||
# Defaults for to_polygons
|
||||
poly_num_points = DEFAULT_POLY_NUM_POINTS # type: int
|
||||
poly_max_arclen = None # type: float
|
||||
poly_max_arclen: Optional[float]
|
||||
""" Sets the default max segement length for `.polygonize()` """
|
||||
|
||||
# radius property
|
||||
@property
|
||||
def radius(self) -> float:
|
||||
"""
|
||||
Circle's radius (float, >= 0)
|
||||
|
||||
:return: radius
|
||||
"""
|
||||
return self._radius
|
||||
|
||||
@ -41,19 +40,33 @@ class Circle(Shape):
|
||||
|
||||
def __init__(self,
|
||||
radius: float,
|
||||
poly_num_points: int=DEFAULT_POLY_NUM_POINTS,
|
||||
poly_max_arclen: float=None,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
layer: int=0,
|
||||
dose: float=1.0):
|
||||
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
locked: bool = False):
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.identifier = ()
|
||||
self.offset = numpy.array(offset, dtype=float)
|
||||
self.layer = layer
|
||||
self.dose = dose
|
||||
self.radius = radius
|
||||
self.poly_num_points = poly_num_points
|
||||
self.poly_max_arclen = poly_max_arclen
|
||||
self.locked = locked
|
||||
|
||||
def to_polygons(self, poly_num_points: int=None, poly_max_arclen: float=None) -> List[Polygon]:
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Circle':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new._offset = self._offset.copy()
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
def to_polygons(self,
|
||||
poly_num_points: Optional[int] = None,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
) -> List[Polygon]:
|
||||
if poly_num_points is None:
|
||||
poly_num_points = self.poly_num_points
|
||||
if poly_max_arclen is None:
|
||||
@ -94,6 +107,10 @@ class Circle(Shape):
|
||||
rotation = 0.0
|
||||
magnitude = self.radius / norm_value
|
||||
return (type(self), self.layer), \
|
||||
(self.offset, magnitude, rotation, self.dose), \
|
||||
(self.offset, magnitude, rotation, False, self.dose), \
|
||||
lambda: Circle(radius=norm_value, layer=self.layer)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
return f'<Circle l{self.layer} o{self.offset} r{self.radius:g}{dose}{locked}>'
|
||||
|
@ -1,14 +1,12 @@
|
||||
from typing import List, Tuple
|
||||
from typing import List, Tuple, Dict, Sequence, Optional
|
||||
import copy
|
||||
import math
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from . import Shape, Polygon, normalized_shape_tuple, DEFAULT_POLY_NUM_POINTS
|
||||
from .. import PatternError
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t
|
||||
|
||||
|
||||
class Ellipse(Shape):
|
||||
@ -16,21 +14,25 @@ class Ellipse(Shape):
|
||||
An ellipse, which has a position, two radii, and a rotation.
|
||||
The rotation gives the angle from x-axis, counterclockwise, to the first (x) radius.
|
||||
"""
|
||||
__slots__ = ('_radii', '_rotation',
|
||||
'poly_num_points', 'poly_max_arclen')
|
||||
_radii: numpy.ndarray
|
||||
""" Ellipse radii """
|
||||
|
||||
_radii = None # type: numpy.ndarray
|
||||
_rotation = 0.0 # type: float
|
||||
_rotation: float
|
||||
""" Angle from x-axis to first radius (ccw, radians) """
|
||||
|
||||
# Defaults for to_polygons
|
||||
poly_num_points = DEFAULT_POLY_NUM_POINTS # type: int
|
||||
poly_max_arclen = None # type: float
|
||||
poly_num_points: Optional[int]
|
||||
""" Sets the default number of points for `.polygonize()` """
|
||||
|
||||
poly_max_arclen: Optional[float]
|
||||
""" Sets the default max segement length for `.polygonize()` """
|
||||
|
||||
# radius properties
|
||||
@property
|
||||
def radii(self) -> numpy.ndarray:
|
||||
"""
|
||||
Return the radii [rx, ry]
|
||||
|
||||
:return: [rx, ry]
|
||||
Return the radii `[rx, ry]`
|
||||
"""
|
||||
return self._radii
|
||||
|
||||
@ -70,7 +72,8 @@ class Ellipse(Shape):
|
||||
Rotation of rx from the x axis. Uses the interval [0, pi) in radians (counterclockwise
|
||||
is positive)
|
||||
|
||||
:return: counterclockwise rotation in radians
|
||||
Returns:
|
||||
counterclockwise rotation in radians
|
||||
"""
|
||||
return self._rotation
|
||||
|
||||
@ -82,13 +85,16 @@ class Ellipse(Shape):
|
||||
|
||||
def __init__(self,
|
||||
radii: vector2,
|
||||
poly_num_points: int=DEFAULT_POLY_NUM_POINTS,
|
||||
poly_max_arclen: float=None,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
rotation: float=0,
|
||||
mirrored: Tuple[bool] = (False, False),
|
||||
layer: int=0,
|
||||
dose: float=1.0):
|
||||
poly_num_points: Optional[int] = DEFAULT_POLY_NUM_POINTS,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0,
|
||||
mirrored: Sequence[bool] = (False, False),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
locked: bool = False):
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.identifier = ()
|
||||
self.radii = radii
|
||||
self.offset = offset
|
||||
self.rotation = rotation
|
||||
@ -97,10 +103,19 @@ class Ellipse(Shape):
|
||||
self.dose = dose
|
||||
self.poly_num_points = poly_num_points
|
||||
self.poly_max_arclen = poly_max_arclen
|
||||
self.locked = locked
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Ellipse':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new._offset = self._offset.copy()
|
||||
new._radii = self._radii.copy()
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
def to_polygons(self,
|
||||
poly_num_points: int=None,
|
||||
poly_max_arclen: float=None
|
||||
poly_num_points: Optional[int] = None,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
) -> List[Polygon]:
|
||||
if poly_num_points is None:
|
||||
poly_num_points = self.poly_num_points
|
||||
@ -162,6 +177,21 @@ class Ellipse(Shape):
|
||||
scale = self.radius_y
|
||||
angle = (self.rotation + pi / 2) % pi
|
||||
return (type(self), radii, self.layer), \
|
||||
(self.offset, scale/norm_value, angle, self.dose), \
|
||||
(self.offset, scale/norm_value, angle, False, self.dose), \
|
||||
lambda: Ellipse(radii=radii*norm_value, layer=self.layer)
|
||||
|
||||
def lock(self) -> 'Ellipse':
|
||||
self.radii.flags.writeable = False
|
||||
Shape.lock(self)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'Ellipse':
|
||||
Shape.unlock(self)
|
||||
self.radii.flags.writeable = True
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
return f'<Ellipse l{self.layer} o{self.offset} r{self.radii}{rotation}{dose}{locked}>'
|
||||
|
@ -1,15 +1,21 @@
|
||||
from typing import List, Tuple
|
||||
from typing import List, Tuple, Dict, Optional, Sequence
|
||||
import copy
|
||||
from enum import Enum
|
||||
import numpy
|
||||
from numpy import pi
|
||||
from numpy import pi, inf
|
||||
|
||||
from . import Shape, normalized_shape_tuple
|
||||
from . import Shape, normalized_shape_tuple, Polygon, Circle
|
||||
from .. import PatternError
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t
|
||||
from ..utils import remove_colinear_vertices, remove_duplicate_vertices
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
class PathCap(Enum):
|
||||
Flush = 0 # Path ends at final vertices
|
||||
Circle = 1 # Path extends past final vertices with a semicircle of radius width/2
|
||||
Square = 2 # Path extends past final vertices with a width-by-width/2 rectangle
|
||||
SquareCustom = 4 # Path extends past final vertices with a rectangle of length
|
||||
# defined by path.cap_extensions
|
||||
|
||||
|
||||
class Path(Shape):
|
||||
@ -19,23 +25,19 @@ class Path(Shape):
|
||||
|
||||
A normalized_form(...) is available, but can be quite slow with lots of vertices.
|
||||
"""
|
||||
_vertices = None # type: numpy.ndarray
|
||||
_width = None # type: float
|
||||
_cap = None # type: Path.Cap
|
||||
|
||||
class Cap(Enum):
|
||||
Flush = 0
|
||||
Circle = 1
|
||||
Square = 2
|
||||
__slots__ = ('_vertices', '_width', '_cap', '_cap_extensions')
|
||||
_vertices: numpy.ndarray
|
||||
_width: float
|
||||
_cap: PathCap
|
||||
_cap_extensions: Optional[numpy.ndarray]
|
||||
|
||||
Cap = PathCap
|
||||
|
||||
# width property
|
||||
@property
|
||||
def width(self) -> float:
|
||||
"""
|
||||
Path width (float, >= 0)
|
||||
|
||||
:return: width
|
||||
"""
|
||||
return self._width
|
||||
|
||||
@ -49,31 +51,56 @@ class Path(Shape):
|
||||
|
||||
# cap property
|
||||
@property
|
||||
def cap(self) -> 'Path.Cap':
|
||||
def cap(self) -> PathCap:
|
||||
"""
|
||||
Path end-cap
|
||||
|
||||
:return: Path.Cap enum
|
||||
"""
|
||||
return self._cap
|
||||
|
||||
@cap.setter
|
||||
def cap(self, val: 'Path.Cap'):
|
||||
self._cap = Path.Cap(val)
|
||||
def cap(self, val: PathCap):
|
||||
# TODO: Document that setting cap can change cap_extensions
|
||||
self._cap = PathCap(val)
|
||||
if self.cap != PathCap.SquareCustom:
|
||||
self.cap_extensions = None
|
||||
elif self.cap_extensions is None:
|
||||
# just got set to SquareCustom
|
||||
self.cap_extensions = numpy.zeros(2)
|
||||
|
||||
# cap_extensions property
|
||||
@property
|
||||
def cap_extensions(self) -> Optional[numpy.ndarray]:
|
||||
"""
|
||||
Path end-cap extension
|
||||
|
||||
Returns:
|
||||
2-element ndarray or `None`
|
||||
"""
|
||||
return self._cap_extensions
|
||||
|
||||
@cap_extensions.setter
|
||||
def cap_extensions(self, vals: Optional[numpy.ndarray]):
|
||||
custom_caps = (PathCap.SquareCustom,)
|
||||
if self.cap in custom_caps:
|
||||
if vals is None:
|
||||
raise Exception('Tried to set cap extensions to None on path with custom cap type')
|
||||
self._cap_extensions = numpy.array(vals, dtype=float)
|
||||
else:
|
||||
if vals is not None:
|
||||
raise Exception('Tried to set custom cap extensions on path with non-custom cap type')
|
||||
self._cap_extensions = vals
|
||||
|
||||
# vertices property
|
||||
@property
|
||||
def vertices(self) -> numpy.ndarray:
|
||||
"""
|
||||
Vertices of the path (Nx2 ndarray: [[x0, y0], [x1, y1], ...]
|
||||
|
||||
:return: vertices
|
||||
Vertices of the path (Nx2 ndarray: `[[x0, y0], [x1, y1], ...]`)
|
||||
"""
|
||||
return self._vertices
|
||||
|
||||
@vertices.setter
|
||||
def vertices(self, val: numpy.ndarray):
|
||||
val = numpy.array(val, dtype=float)
|
||||
val = numpy.array(val, dtype=float) #TODO document that these might not be copied
|
||||
if len(val.shape) < 2 or val.shape[1] != 2:
|
||||
raise PatternError('Vertices must be an Nx2 array')
|
||||
if val.shape[0] < 2:
|
||||
@ -113,42 +140,77 @@ class Path(Shape):
|
||||
def __init__(self,
|
||||
vertices: numpy.ndarray,
|
||||
width: float = 0.0,
|
||||
cap: 'Path.Cap' = Path.Cap.Flush,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
rotation: float = 0
|
||||
mirrored: Tuple[bool] = (False, False),
|
||||
layer: int=0,
|
||||
dose: float=1.0,
|
||||
) -> 'Path':
|
||||
cap: PathCap = PathCap.Flush,
|
||||
cap_extensions: numpy.ndarray = None,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0,
|
||||
mirrored: Sequence[bool] = (False, False),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
locked: bool = False,
|
||||
):
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self._cap_extensions = None # Since .cap setter might access it
|
||||
|
||||
self.identifier = ()
|
||||
self.offset = offset
|
||||
self.layer = layer
|
||||
self.dose = dose
|
||||
self.vertices = vertices
|
||||
self.width = width
|
||||
self.cap = cap
|
||||
if cap_extensions is not None:
|
||||
self.cap_extensions = cap_extensions
|
||||
self.rotate(rotation)
|
||||
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
||||
self.locked = locked
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Path':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new._offset = self._offset.copy()
|
||||
new._vertices = self._vertices.copy()
|
||||
new._cap = copy.deepcopy(self._cap, memo)
|
||||
new._cap_extensions = copy.deepcopy(self._cap_extensions, memo)
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
@staticmethod
|
||||
def travel(travel_pairs: Tuple[Tuple[float, float]],
|
||||
width: float = 0.0,
|
||||
cap: 'Path.Cap' = Path.Cap.Flush,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
rotation: float = 0
|
||||
mirrored: Tuple[bool] = (False, False),
|
||||
layer: int=0,
|
||||
dose: float=1.0,
|
||||
cap: PathCap = PathCap.Flush,
|
||||
cap_extensions = None,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0,
|
||||
mirrored: Sequence[bool] = (False, False),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
) -> 'Path':
|
||||
"""
|
||||
TODO
|
||||
Build a path by specifying the turn angles and travel distances
|
||||
rather than setting the distances directly.
|
||||
|
||||
Args:
|
||||
travel_pairs: A list of (angle, distance) pairs that define
|
||||
the path. Angles are counterclockwise, in radians, and are relative
|
||||
to the previous segment's direction (the initial angle is relative
|
||||
to the +x axis).
|
||||
width: Path width, default `0`
|
||||
cap: End-cap type, default `Path.Cap.Flush` (no end-cap)
|
||||
cap_extensions: End-cap extension distances, when using `Path.Cap.CustomSquare`.
|
||||
Default `(0, 0)` or `None`, depending on cap type
|
||||
offset: Offset, default `(0, 0)`
|
||||
rotation: Rotation counterclockwise, in radians. Default `0`
|
||||
mirrored: Whether to mirror across the x or y axes. For example,
|
||||
`mirrored=(True, False)` results in a reflection across the x-axis,
|
||||
multiplying the path's y-coordinates by -1. Default `(False, False)`
|
||||
layer: Layer, default `0`
|
||||
dose: Dose, default `1.0`
|
||||
|
||||
:param offset: Offset, default (0, 0)
|
||||
:param rotation: Rotation counterclockwise, in radians
|
||||
:param layer: Layer, default 0
|
||||
:param dose: Dose, default 1.0
|
||||
:return: The resulting Path object
|
||||
Returns:
|
||||
The resulting Path object
|
||||
"""
|
||||
#TODO: needs testing
|
||||
direction = numpy.array([1, 0])
|
||||
|
||||
verts = [[0, 0]]
|
||||
@ -156,22 +218,17 @@ class Path(Shape):
|
||||
direction = numpy.dot(rotation_matrix_2d(angle), direction.T).T
|
||||
verts.append(verts[-1] + direction * distance)
|
||||
|
||||
return Path(vertices=verts, width=width, cap=cap,
|
||||
return Path(vertices=verts, width=width, cap=cap, cap_extensions=cap_extensions,
|
||||
offset=offset, rotation=rotation, mirrored=mirrored,
|
||||
layer=layer, dose=dose)
|
||||
|
||||
def to_polygons(self,
|
||||
poly_num_points: int=None,
|
||||
poly_max_arclen: float=None,
|
||||
poly_num_points: int = None,
|
||||
poly_max_arclen: float = None,
|
||||
) -> List['Polygon']:
|
||||
if self.cap in (Path.Cap.Flush, Path.Cap.Circle
|
||||
extension = 0.0
|
||||
elif self.cap == Path.Cap.Square:
|
||||
extension = self.width / 2
|
||||
else:
|
||||
raise PatternError('Unrecognized path endcap: {}'.format(self.cap))
|
||||
extensions = self._calculate_cap_extensions()
|
||||
|
||||
v = remove_colinear_vertices(numpy.array(element.xy, dtype=float), closed_path=False)
|
||||
v = remove_colinear_vertices(self.vertices, closed_path=False)
|
||||
dv = numpy.diff(v, axis=0)
|
||||
dvdir = dv / numpy.sqrt((dv * dv).sum(axis=1))[:, None]
|
||||
|
||||
@ -181,13 +238,12 @@ class Path(Shape):
|
||||
|
||||
perp = dvdir[:, ::-1] * [[1, -1]] * self.width / 2
|
||||
|
||||
# add extension
|
||||
if extension != 0
|
||||
v[0] -= dvdir[0] * extension
|
||||
v[-1] += dvdir[-1] * extension
|
||||
# add extensions
|
||||
if (extensions != 0).any():
|
||||
v[0] -= dvdir[0] * extensions[0]
|
||||
v[-1] += dvdir[-1] * extensions[1]
|
||||
dv = numpy.diff(v, axis=0) # recalculate dv; dvdir and perp should stay the same
|
||||
|
||||
|
||||
# Find intersections of expanded sides
|
||||
As = numpy.stack((dv[:-1], -dv[1:]), axis=2)
|
||||
bs = v[1:-1] - v[:-2] + perp[1:] - perp[:-1]
|
||||
@ -210,18 +266,18 @@ class Path(Shape):
|
||||
if towards_perp[i]:
|
||||
o0.append(intersection_p[i])
|
||||
if acute[i]:
|
||||
o1.append(intersection_n[i])
|
||||
else:
|
||||
# Opposite is >270
|
||||
pt0 = v[i + 1] - perp[i + 0] + dvdir[i + 0] * element.width / 2
|
||||
pt1 = v[i + 1] - perp[i + 1] - dvdir[i + 1] * element.width / 2
|
||||
pt0 = v[i + 1] - perp[i + 0] + dvdir[i + 0] * self.width / 2
|
||||
pt1 = v[i + 1] - perp[i + 1] - dvdir[i + 1] * self.width / 2
|
||||
o1 += [pt0, pt1]
|
||||
else:
|
||||
o1.append(intersection_n[i])
|
||||
else:
|
||||
o1.append(intersection_n[i])
|
||||
if acute[i]:
|
||||
# > 270
|
||||
pt0 = v[i + 1] + perp[i + 0] + dvdir[i + 0] * element.width / 2
|
||||
pt1 = v[i + 1] + perp[i + 1] - dvdir[i + 1] * element.width / 2
|
||||
pt0 = v[i + 1] + perp[i + 0] + dvdir[i + 0] * self.width / 2
|
||||
pt1 = v[i + 1] + perp[i + 1] - dvdir[i + 1] * self.width / 2
|
||||
o0 += [pt0, pt1]
|
||||
else:
|
||||
o0.append(intersection_p[i])
|
||||
@ -231,34 +287,27 @@ class Path(Shape):
|
||||
|
||||
polys = [Polygon(offset=self.offset, vertices=verts, dose=self.dose, layer=self.layer)]
|
||||
|
||||
if self.cap == Path.Cap.Circle:
|
||||
for vert in v:
|
||||
if self.cap == PathCap.Circle:
|
||||
#for vert in v: # not sure if every vertex, or just ends?
|
||||
for vert in [v[0], v[-1]]:
|
||||
circ = Circle(offset=vert, radius=self.width / 2, dose=self.dose, layer=self.layer)
|
||||
polys += circ.to_polygons(poly_num_points=poly_num_points, poly_max_arclen=poly_max_arclen)
|
||||
|
||||
return polys
|
||||
|
||||
def get_bounds(self) -> numpy.ndarray:
|
||||
if self.cap == Path.Cap.Circle:
|
||||
if self.cap == PathCap.Circle:
|
||||
bounds = self.offset + numpy.vstack((numpy.min(self.vertices, axis=0) - self.width / 2,
|
||||
numpy.max(self.vertices, axis=0) + self.width / 2))
|
||||
elif self.cap in (Path.Cap.Flush,
|
||||
Path.Cap.Square):
|
||||
if self.cap == Path.Cap.Flush:
|
||||
extension = 0
|
||||
elif self.cap == Path.Cap.Square:
|
||||
extension = element.width / 2
|
||||
|
||||
v = remove_colinear_vertices(self.vertices, closed_path=False)
|
||||
dv = numpy.diff(v, axis=0)
|
||||
dvdir = dv / numpy.sqrt((dv * dv).sum(axis=1))[:, None]
|
||||
perp = dvdir[:, ::-1] * [[1, -1]] * element.width / 2
|
||||
|
||||
v[0] -= dvdir * extension
|
||||
v[-1] += dvdir * extension
|
||||
|
||||
bounds = self.offset + numpy.vstack((numpy.min(v - numpy.abs(perp), axis=0),
|
||||
numpy.max(v + numpy.abs(perp), axis=0)))
|
||||
elif self.cap in (PathCap.Flush,
|
||||
PathCap.Square,
|
||||
PathCap.SquareCustom):
|
||||
bounds = numpy.array([[+inf, +inf], [-inf, -inf]])
|
||||
polys = self.to_polygons()
|
||||
for poly in polys:
|
||||
poly_bounds = poly.get_bounds()
|
||||
bounds[0, :] = numpy.minimum(bounds[0, :], poly_bounds[0, :])
|
||||
bounds[1, :] = numpy.maximum(bounds[1, :], poly_bounds[1, :])
|
||||
else:
|
||||
raise PatternError('get_bounds() not implemented for endcaps: {}'.format(self.cap))
|
||||
|
||||
@ -301,15 +350,16 @@ class Path(Shape):
|
||||
width0 = self.width / norm_value
|
||||
|
||||
return (type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer), \
|
||||
(offset, scale/norm_value, rotation, self.dose), \
|
||||
lambda: Polygon(reordered_vertices*norm_value, width=self.width*norm_value,
|
||||
cap=self.cap, layer=self.layer)
|
||||
(offset, scale/norm_value, rotation, False, self.dose), \
|
||||
lambda: Path(reordered_vertices*norm_value, width=self.width*norm_value,
|
||||
cap=self.cap, layer=self.layer)
|
||||
|
||||
def clean_vertices(self) -> 'Path':
|
||||
"""
|
||||
Removes duplicate, co-linear and otherwise redundant vertices.
|
||||
|
||||
:returns: self
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.remove_colinear_vertices()
|
||||
return self
|
||||
@ -318,7 +368,8 @@ class Path(Shape):
|
||||
'''
|
||||
Removes all consecutive duplicate (repeated) vertices.
|
||||
|
||||
:returns: self
|
||||
Returns:
|
||||
self
|
||||
'''
|
||||
self.vertices = remove_duplicate_vertices(self.vertices, closed_path=False)
|
||||
return self
|
||||
@ -327,7 +378,38 @@ class Path(Shape):
|
||||
'''
|
||||
Removes consecutive co-linear vertices.
|
||||
|
||||
:returns: self
|
||||
Returns:
|
||||
self
|
||||
'''
|
||||
self.vertices = remove_colinear_vertices(self.vertices, closed_path=False)
|
||||
return self
|
||||
|
||||
def _calculate_cap_extensions(self) -> numpy.ndarray:
|
||||
if self.cap == PathCap.Square:
|
||||
extensions = numpy.full(2, self.width / 2)
|
||||
elif self.cap == PathCap.SquareCustom:
|
||||
extensions = self.cap_extensions
|
||||
else:
|
||||
# Flush or Circle
|
||||
extensions = numpy.zeros(2)
|
||||
return extensions
|
||||
|
||||
def lock(self) -> 'Path':
|
||||
self.vertices.flags.writeable = False
|
||||
if self.cap_extensions is not None:
|
||||
self.cap_extensions.flags.writeable = False
|
||||
Shape.lock(self)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'Path':
|
||||
Shape.unlock(self)
|
||||
self.vertices.flags.writeable = True
|
||||
if self.cap_extensions is not None:
|
||||
self.cap_extensions.flags.writeable = True
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
centroid = self.offset + self.vertices.mean(axis=0)
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
return f'<Path l{self.layer} centroid {centroid} v{len(self.vertices)} w{self.width} c{self.cap}{dose}{locked}>'
|
||||
|
@ -1,38 +1,36 @@
|
||||
from typing import List, Tuple
|
||||
from typing import List, Tuple, Dict, Optional, Sequence
|
||||
import copy
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from . import Shape, normalized_shape_tuple
|
||||
from .. import PatternError
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t
|
||||
from ..utils import remove_colinear_vertices, remove_duplicate_vertices
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
|
||||
class Polygon(Shape):
|
||||
"""
|
||||
A polygon, consisting of a bunch of vertices (Nx2 ndarray) which specify an
|
||||
implicitly-closed boundary, and an offset.
|
||||
|
||||
A normalized_form(...) is available, but can be quite slow with lots of vertices.
|
||||
A `normalized_form(...)` is available, but can be quite slow with lots of vertices.
|
||||
"""
|
||||
_vertices = None # type: numpy.ndarray
|
||||
__slots__ = ('_vertices',)
|
||||
_vertices: numpy.ndarray
|
||||
""" Nx2 ndarray of vertices `[[x0, y0], [x1, y1], ...]` """
|
||||
|
||||
# vertices property
|
||||
@property
|
||||
def vertices(self) -> numpy.ndarray:
|
||||
"""
|
||||
Vertices of the polygon (Nx2 ndarray: [[x0, y0], [x1, y1], ...]
|
||||
|
||||
:return: vertices
|
||||
Vertices of the polygon (Nx2 ndarray: `[[x0, y0], [x1, y1], ...]`)
|
||||
"""
|
||||
return self._vertices
|
||||
|
||||
@vertices.setter
|
||||
def vertices(self, val: numpy.ndarray):
|
||||
val = numpy.array(val, dtype=float)
|
||||
val = numpy.array(val, dtype=float) #TODO document that these might not be copied
|
||||
if len(val.shape) < 2 or val.shape[1] != 2:
|
||||
raise PatternError('Vertices must be an Nx2 array')
|
||||
if val.shape[0] < 3:
|
||||
@ -71,83 +69,101 @@ class Polygon(Shape):
|
||||
|
||||
def __init__(self,
|
||||
vertices: numpy.ndarray,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
rotation: float=0.0,
|
||||
mirrored: Tuple[bool] = (False, False),
|
||||
layer: int=0,
|
||||
dose: float=1.0,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
mirrored: Sequence[bool] = (False, False),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
locked: bool = False,
|
||||
):
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.identifier = ()
|
||||
self.layer = layer
|
||||
self.dose = dose
|
||||
self.vertices = vertices
|
||||
self.offset = offset
|
||||
self.rotate(rotation)
|
||||
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
||||
self.locked = locked
|
||||
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Polygon':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new._offset = self._offset.copy()
|
||||
new._vertices = self._vertices.copy()
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
@staticmethod
|
||||
def square(side_length: float,
|
||||
rotation: float=0.0,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
layer: int=0,
|
||||
dose: float=1.0
|
||||
rotation: float = 0.0,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
) -> 'Polygon':
|
||||
"""
|
||||
Draw a square given side_length, centered on the origin.
|
||||
|
||||
:param side_length: Length of one side
|
||||
:param rotation: Rotation counterclockwise, in radians
|
||||
:param offset: Offset, default (0, 0)
|
||||
:param layer: Layer, default 0
|
||||
:param dose: Dose, default 1.0
|
||||
:return: A Polygon object containing the requested square
|
||||
Args:
|
||||
side_length: Length of one side
|
||||
rotation: Rotation counterclockwise, in radians
|
||||
offset: Offset, default `(0, 0)`
|
||||
layer: Layer, default `0`
|
||||
dose: Dose, default `1.0`
|
||||
|
||||
Returns:
|
||||
A Polygon object containing the requested square
|
||||
"""
|
||||
norm_square = numpy.array([[-1, -1],
|
||||
[-1, +1],
|
||||
[+1, +1],
|
||||
[+1, -1]], dtype=float)
|
||||
vertices = 0.5 * side_length * norm_square
|
||||
poly = Polygon(vertices, offset, layer, dose)
|
||||
poly = Polygon(vertices, offset=offset, layer=layer, dose=dose)
|
||||
poly.rotate(rotation)
|
||||
return poly
|
||||
|
||||
@staticmethod
|
||||
def rectangle(lx: float,
|
||||
ly: float,
|
||||
rotation: float=0,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
layer: int=0,
|
||||
dose: float=1.0
|
||||
rotation: float = 0,
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
) -> 'Polygon':
|
||||
"""
|
||||
Draw a rectangle with side lengths lx and ly, centered on the origin.
|
||||
|
||||
:param lx: Length along x (before rotation)
|
||||
:param ly: Length along y (before rotation)
|
||||
:param rotation: Rotation counterclockwise, in radians
|
||||
:param offset: Offset, default (0, 0)
|
||||
:param layer: Layer, default 0
|
||||
:param dose: Dose, default 1.0
|
||||
:return: A Polygon object containing the requested rectangle
|
||||
Args:
|
||||
lx: Length along x (before rotation)
|
||||
ly: Length along y (before rotation)
|
||||
rotation: Rotation counterclockwise, in radians
|
||||
offset: Offset, default `(0, 0)`
|
||||
layer: Layer, default `0`
|
||||
dose: Dose, default `1.0`
|
||||
|
||||
Returns:
|
||||
A Polygon object containing the requested rectangle
|
||||
"""
|
||||
vertices = 0.5 * numpy.array([[-lx, -ly],
|
||||
[-lx, +ly],
|
||||
[+lx, +ly],
|
||||
[+lx, -ly]], dtype=float)
|
||||
poly = Polygon(vertices, offset, layer, dose)
|
||||
poly = Polygon(vertices, offset=offset, layer=layer, dose=dose)
|
||||
poly.rotate(rotation)
|
||||
return poly
|
||||
|
||||
@staticmethod
|
||||
def rect(xmin: float = None,
|
||||
xctr: float = None,
|
||||
xmax: float = None,
|
||||
lx: float = None,
|
||||
ymin: float = None,
|
||||
yctr: float = None,
|
||||
ymax: float = None,
|
||||
ly: float = None,
|
||||
layer: int = 0,
|
||||
dose: float = 1.0
|
||||
def rect(xmin: Optional[float] = None,
|
||||
xctr: Optional[float] = None,
|
||||
xmax: Optional[float] = None,
|
||||
lx: Optional[float] = None,
|
||||
ymin: Optional[float] = None,
|
||||
yctr: Optional[float] = None,
|
||||
ymax: Optional[float] = None,
|
||||
ly: Optional[float] = None,
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
) -> 'Polygon':
|
||||
"""
|
||||
Draw a rectangle by specifying side/center positions.
|
||||
@ -155,25 +171,34 @@ class Polygon(Shape):
|
||||
Must provide 2 of (xmin, xctr, xmax, lx),
|
||||
and 2 of (ymin, yctr, ymax, ly).
|
||||
|
||||
:param xmin: Minimum x coordinate
|
||||
:param xctr: Center x coordinate
|
||||
:param xmax: Maximum x coordinate
|
||||
:param lx: Length along x direction
|
||||
:param ymin: Minimum y coordinate
|
||||
:param yctr: Center y coordinate
|
||||
:param ymax: Maximum y coordinate
|
||||
:param ly: Length along y direction
|
||||
:param layer: Layer, default 0
|
||||
:param dose: Dose, default 1.0
|
||||
:return: A Polygon object containing the requested rectangle
|
||||
Args:
|
||||
xmin: Minimum x coordinate
|
||||
xctr: Center x coordinate
|
||||
xmax: Maximum x coordinate
|
||||
lx: Length along x direction
|
||||
ymin: Minimum y coordinate
|
||||
yctr: Center y coordinate
|
||||
ymax: Maximum y coordinate
|
||||
ly: Length along y direction
|
||||
layer: Layer, default `0`
|
||||
dose: Dose, default `1.0`
|
||||
|
||||
Returns:
|
||||
A Polygon object containing the requested rectangle
|
||||
"""
|
||||
if lx is None:
|
||||
if xctr is None:
|
||||
assert(xmin is not None)
|
||||
assert(xmax is not None)
|
||||
xctr = 0.5 * (xmax + xmin)
|
||||
lx = xmax - xmin
|
||||
elif xmax is None:
|
||||
assert(xmin is not None)
|
||||
assert(xctr is not None)
|
||||
lx = 2 * (xctr - xmin)
|
||||
elif xmin is None:
|
||||
assert(xctr is not None)
|
||||
assert(xmax is not None)
|
||||
lx = 2 * (xmax - xctr)
|
||||
else:
|
||||
raise PatternError('Two of xmin, xctr, xmax, lx must be None!')
|
||||
@ -181,19 +206,29 @@ class Polygon(Shape):
|
||||
if xctr is not None:
|
||||
pass
|
||||
elif xmax is None:
|
||||
assert(xmin is not None)
|
||||
assert(lx is not None)
|
||||
xctr = xmin + 0.5 * lx
|
||||
elif xmin is None:
|
||||
assert(xmax is not None)
|
||||
assert(lx is not None)
|
||||
xctr = xmax - 0.5 * lx
|
||||
else:
|
||||
raise PatternError('Two of xmin, xctr, xmax, lx must be None!')
|
||||
|
||||
if ly is None:
|
||||
if yctr is None:
|
||||
assert(ymin is not None)
|
||||
assert(ymax is not None)
|
||||
yctr = 0.5 * (ymax + ymin)
|
||||
ly = ymax - ymin
|
||||
elif ymax is None:
|
||||
assert(ymin is not None)
|
||||
assert(yctr is not None)
|
||||
ly = 2 * (yctr - ymin)
|
||||
elif ymin is None:
|
||||
assert(yctr is not None)
|
||||
assert(ymax is not None)
|
||||
ly = 2 * (ymax - yctr)
|
||||
else:
|
||||
raise PatternError('Two of ymin, yctr, ymax, ly must be None!')
|
||||
@ -201,8 +236,12 @@ class Polygon(Shape):
|
||||
if yctr is not None:
|
||||
pass
|
||||
elif ymax is None:
|
||||
assert(ymin is not None)
|
||||
assert(ly is not None)
|
||||
yctr = ymin + 0.5 * ly
|
||||
elif ymin is None:
|
||||
assert(ly is not None)
|
||||
assert(ymax is not None)
|
||||
yctr = ymax - 0.5 * ly
|
||||
else:
|
||||
raise PatternError('Two of ymin, yctr, ymax, ly must be None!')
|
||||
@ -213,8 +252,8 @@ class Polygon(Shape):
|
||||
|
||||
|
||||
def to_polygons(self,
|
||||
_poly_num_points: int=None,
|
||||
_poly_max_arclen: float=None,
|
||||
poly_num_points: int = None, # unused
|
||||
poly_max_arclen: float = None, # unused
|
||||
) -> List['Polygon']:
|
||||
return [copy.deepcopy(self)]
|
||||
|
||||
@ -255,15 +294,18 @@ class Polygon(Shape):
|
||||
x_min = x_min[y_min]
|
||||
reordered_vertices = numpy.roll(rotated_vertices, -x_min, axis=0)
|
||||
|
||||
# TODO: normalize mirroring?
|
||||
|
||||
return (type(self), reordered_vertices.data.tobytes(), self.layer), \
|
||||
(offset, scale/norm_value, rotation, self.dose), \
|
||||
(offset, scale/norm_value, rotation, False, self.dose), \
|
||||
lambda: Polygon(reordered_vertices*norm_value, layer=self.layer)
|
||||
|
||||
def clean_vertices(self) -> 'Polygon':
|
||||
"""
|
||||
Removes duplicate, co-linear and otherwise redundant vertices.
|
||||
|
||||
:returns: self
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.remove_colinear_vertices()
|
||||
return self
|
||||
@ -272,7 +314,8 @@ class Polygon(Shape):
|
||||
'''
|
||||
Removes all consecutive duplicate (repeated) vertices.
|
||||
|
||||
:returns: self
|
||||
Returns:
|
||||
self
|
||||
'''
|
||||
self.vertices = remove_duplicate_vertices(self.vertices, closed_path=True)
|
||||
return self
|
||||
@ -281,7 +324,24 @@ class Polygon(Shape):
|
||||
'''
|
||||
Removes consecutive co-linear vertices.
|
||||
|
||||
:returns: self
|
||||
Returns:
|
||||
self
|
||||
'''
|
||||
self.vertices = remove_colinear_vertices(self.vertices, closed_path=True)
|
||||
return self
|
||||
|
||||
def lock(self) -> 'Polygon':
|
||||
self.vertices.flags.writeable = False
|
||||
Shape.lock(self)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'Polygon':
|
||||
Shape.unlock(self)
|
||||
self.vertices.flags.writeable = True
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
centroid = self.offset + self.vertices.mean(axis=0)
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
return f'<Polygon l{self.layer} centroid {centroid} v{len(self.vertices)}{dose}{locked}>'
|
||||
|
@ -1,18 +1,18 @@
|
||||
from typing import List, Tuple, Callable
|
||||
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING
|
||||
from abc import ABCMeta, abstractmethod
|
||||
import copy
|
||||
import numpy
|
||||
|
||||
from .. import PatternError
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2
|
||||
from ..error import PatternError, PatternLockedError
|
||||
from ..utils import is_scalar, rotation_matrix_2d, vector2, layer_t
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
if TYPE_CHECKING:
|
||||
from . import Polygon
|
||||
|
||||
|
||||
# Type definitions
|
||||
normalized_shape_tuple = Tuple[Tuple,
|
||||
Tuple[numpy.ndarray, float, float, float],
|
||||
Tuple[numpy.ndarray, float, float, bool, float],
|
||||
Callable[[], 'Shape']]
|
||||
|
||||
# ## Module-wide defaults
|
||||
@ -20,103 +20,144 @@ normalized_shape_tuple = Tuple[Tuple,
|
||||
DEFAULT_POLY_NUM_POINTS = 24
|
||||
|
||||
|
||||
T = TypeVar('T', bound='Shape')
|
||||
|
||||
|
||||
class Shape(metaclass=ABCMeta):
|
||||
"""
|
||||
Abstract class specifying functions common to all shapes.
|
||||
"""
|
||||
__slots__ = ('_offset', '_layer', '_dose', 'identifier', 'locked')
|
||||
|
||||
# [x_offset, y_offset]
|
||||
_offset = numpy.array([0.0, 0.0]) # type: numpy.ndarray
|
||||
_offset: numpy.ndarray
|
||||
""" `[x_offset, y_offset]` """
|
||||
|
||||
# Layer (integer >= 0 or tuple)
|
||||
_layer = 0 # type: int or Tuple
|
||||
_layer: layer_t
|
||||
""" Layer (integer >= 0 or tuple) """
|
||||
|
||||
# Dose
|
||||
_dose = 1.0 # type: float
|
||||
_dose: float
|
||||
""" Dose """
|
||||
|
||||
# --- Abstract methods
|
||||
identifier: Tuple
|
||||
""" An arbitrary identifier for the shape, usually empty but used by `Pattern.flatten()` """
|
||||
|
||||
locked: bool
|
||||
""" If `True`, any changes to the shape will raise a `PatternLockedError` """
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if self.locked and name != 'locked':
|
||||
raise PatternLockedError()
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
def __copy__(self) -> 'Shape':
|
||||
cls = self.__class__
|
||||
new = cls.__new__(cls)
|
||||
for name in Shape.__slots__ + self.__slots__:
|
||||
object.__setattr__(new, name, getattr(self, name))
|
||||
return new
|
||||
|
||||
'''
|
||||
--- Abstract methods
|
||||
'''
|
||||
@abstractmethod
|
||||
def to_polygons(self, num_vertices: int, max_arclen: float) -> List['Polygon']:
|
||||
def to_polygons(self,
|
||||
num_vertices: Optional[int] = None,
|
||||
max_arclen: Optional[float] = None,
|
||||
) -> List['Polygon']:
|
||||
"""
|
||||
Returns a list of polygons which approximate the shape.
|
||||
|
||||
:param num_vertices: Number of points to use for each polygon. Can be overridden by
|
||||
max_arclen if that results in more points. Optional, defaults to shapes'
|
||||
internal defaults.
|
||||
:param max_arclen: Maximum arclength which can be approximated by a single line
|
||||
segment. Optional, defaults to shapes' internal defaults.
|
||||
:return: List of polygons equivalent to the shape
|
||||
Args:
|
||||
num_vertices: Number of points to use for each polygon. Can be overridden by
|
||||
max_arclen if that results in more points. Optional, defaults to shapes'
|
||||
internal defaults.
|
||||
max_arclen: Maximum arclength which can be approximated by a single line
|
||||
segment. Optional, defaults to shapes' internal defaults.
|
||||
|
||||
Returns:
|
||||
List of polygons equivalent to the shape
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_bounds(self) -> numpy.ndarray:
|
||||
"""
|
||||
Returns [[x_min, y_min], [x_max, y_max]] which specify a minimal bounding box for the shape.
|
||||
|
||||
:return: [[x_min, y_min], [x_max, y_max]]
|
||||
Returns `[[x_min, y_min], [x_max, y_max]]` which specify a minimal bounding box for the shape.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def rotate(self, theta: float) -> 'Shape':
|
||||
def rotate(self: T, theta: float) -> T:
|
||||
"""
|
||||
Rotate the shape around its center (0, 0), ignoring its offset.
|
||||
Rotate the shape around its origin (0, 0), ignoring its offset.
|
||||
|
||||
:param theta: Angle to rotate by (counterclockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
theta: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def mirror(self, axis: int) -> 'Shape':
|
||||
def mirror(self: T, axis: int) -> T:
|
||||
"""
|
||||
Mirror the shape across an axis.
|
||||
|
||||
:param axis: Axis to mirror across.
|
||||
:return: self
|
||||
Args:
|
||||
axis: Axis to mirror across.
|
||||
(0: mirror across x axis, 1: mirror across y axis)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def scale_by(self, c: float) -> 'Shape':
|
||||
def scale_by(self: T, c: float) -> T:
|
||||
"""
|
||||
Scale the shape's size (eg. radius, for a circle) by a constant factor.
|
||||
|
||||
:param c: Factor to scale by
|
||||
:return: self
|
||||
Args:
|
||||
c: Factor to scale by
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def normalized_form(self, norm_value: int) -> normalized_shape_tuple:
|
||||
def normalized_form(self: T, norm_value: int) -> normalized_shape_tuple:
|
||||
"""
|
||||
Writes the shape in a standardized notation, with offset, scale, rotation, and dose
|
||||
information separated out from the remaining values.
|
||||
|
||||
:param norm_value: This value is used to normalize lengths intrinsic to the shape;
|
||||
Args:
|
||||
norm_value: This value is used to normalize lengths intrinsic to the shape;
|
||||
eg. for a circle, the returned intrinsic radius value will be (radius / norm_value), and
|
||||
the returned callable will create a Circle(radius=norm_value, ...). This is useful
|
||||
the returned callable will create a `Circle(radius=norm_value, ...)`. This is useful
|
||||
when you find it important for quantities to remain in a certain range, eg. for
|
||||
GDSII where vertex locations are stored as integers.
|
||||
:return: The returned information takes the form of a 3-element tuple,
|
||||
(intrinsic, extrinsic, constructor). These are further broken down as:
|
||||
intrinsic: A tuple of basic types containing all information about the instance that
|
||||
is not contained in 'extrinsic'. Usually, intrinsic[0] == type(self).
|
||||
extrinsic: ([x_offset, y_offset], scale, rotation, dose)
|
||||
constructor: A callable (no arguments) which returns an instance of type(self) with
|
||||
internal state equivalent to 'intrinsic'.
|
||||
|
||||
Returns:
|
||||
The returned information takes the form of a 3-element tuple,
|
||||
`(intrinsic, extrinsic, constructor)`. These are further broken down as:
|
||||
`intrinsic`: A tuple of basic types containing all information about the instance that
|
||||
is not contained in 'extrinsic'. Usually, `intrinsic[0] == type(self)`.
|
||||
`extrinsic`: `([x_offset, y_offset], scale, rotation, mirror_across_x_axis, dose)`
|
||||
`constructor`: A callable (no arguments) which returns an instance of `type(self)` with
|
||||
internal state equivalent to `intrinsic`.
|
||||
"""
|
||||
pass
|
||||
|
||||
# ---- Non-abstract properties
|
||||
'''
|
||||
---- Non-abstract properties
|
||||
'''
|
||||
# offset property
|
||||
@property
|
||||
def offset(self) -> numpy.ndarray:
|
||||
"""
|
||||
[x, y] offset
|
||||
|
||||
:return: [x_offset, y_offset]
|
||||
"""
|
||||
return self._offset
|
||||
|
||||
@ -131,16 +172,14 @@ class Shape(metaclass=ABCMeta):
|
||||
|
||||
# layer property
|
||||
@property
|
||||
def layer(self) -> int or Tuple[int]:
|
||||
def layer(self) -> layer_t:
|
||||
"""
|
||||
Layer number (int or tuple of ints)
|
||||
|
||||
:return: Layer
|
||||
Layer number or name (int, tuple of ints, or string)
|
||||
"""
|
||||
return self._layer
|
||||
|
||||
@layer.setter
|
||||
def layer(self, val: int or List[int]):
|
||||
def layer(self, val: layer_t):
|
||||
self._layer = val
|
||||
|
||||
# dose property
|
||||
@ -148,8 +187,6 @@ class Shape(metaclass=ABCMeta):
|
||||
def dose(self) -> float:
|
||||
"""
|
||||
Dose (float >= 0)
|
||||
|
||||
:return: Dose value
|
||||
"""
|
||||
return self._dose
|
||||
|
||||
@ -161,32 +198,41 @@ class Shape(metaclass=ABCMeta):
|
||||
raise PatternError('Dose must be non-negative')
|
||||
self._dose = val
|
||||
|
||||
# ---- Non-abstract methods
|
||||
def copy(self) -> 'Shape':
|
||||
'''
|
||||
---- Non-abstract methods
|
||||
'''
|
||||
def copy(self: T) -> T:
|
||||
"""
|
||||
Returns a deep copy of the shape.
|
||||
|
||||
:return: Deep copy of self
|
||||
Returns:
|
||||
copy.deepcopy(self)
|
||||
"""
|
||||
return copy.deepcopy(self)
|
||||
|
||||
def translate(self, offset: vector2) -> 'Shape':
|
||||
def translate(self: T, offset: vector2) -> T:
|
||||
"""
|
||||
Translate the shape by the given offset
|
||||
|
||||
:param offset: [x_offset, y,offset]
|
||||
:return: self
|
||||
Args:
|
||||
offset: [x_offset, y,offset]
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset += offset
|
||||
return self
|
||||
|
||||
def rotate_around(self, pivot: vector2, rotation: float) -> 'Shape':
|
||||
def rotate_around(self: T, pivot: vector2, rotation: float) -> T:
|
||||
"""
|
||||
Rotate the shape around a point.
|
||||
|
||||
:param pivot: Point (x, y) to rotate around
|
||||
:param rotation: Angle to rotate by (counterclockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
pivot: Point (x, y) to rotate around
|
||||
rotation: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pivot = numpy.array(pivot, dtype=float)
|
||||
self.translate(-pivot)
|
||||
@ -195,19 +241,25 @@ class Shape(metaclass=ABCMeta):
|
||||
self.translate(+pivot)
|
||||
return self
|
||||
|
||||
def manhattanize_fast(self, grid_x: numpy.ndarray, grid_y: numpy.ndarray) -> List['Polygon']:
|
||||
def manhattanize_fast(self,
|
||||
grid_x: numpy.ndarray,
|
||||
grid_y: numpy.ndarray,
|
||||
) -> List['Polygon']:
|
||||
"""
|
||||
Returns a list of polygons with grid-aligned ("Manhattan") edges approximating the shape.
|
||||
|
||||
This function works by
|
||||
1) Converting the shape to polygons using .to_polygons()
|
||||
1) Converting the shape to polygons using `.to_polygons()`
|
||||
2) Approximating each edge with an equivalent Manhattan edge
|
||||
This process results in a reasonable Manhattan representation of the shape, but is
|
||||
imprecise near non-Manhattan or off-grid corners.
|
||||
|
||||
:param grid_x: List of allowed x-coordinates for the Manhattanized polygon edges.
|
||||
:param grid_y: List of allowed y-coordinates for the Manhattanized polygon edges.
|
||||
:return: List of Polygon objects with grid-aligned edges.
|
||||
Args:
|
||||
grid_x: List of allowed x-coordinates for the Manhattanized polygon edges.
|
||||
grid_y: List of allowed y-coordinates for the Manhattanized polygon edges.
|
||||
|
||||
Returns:
|
||||
List of `Polygon` objects with grid-aligned edges.
|
||||
"""
|
||||
from . import Polygon
|
||||
|
||||
@ -297,12 +349,15 @@ class Shape(metaclass=ABCMeta):
|
||||
return manhattan_polygons
|
||||
|
||||
|
||||
def manhattanize(self, grid_x: numpy.ndarray, grid_y: numpy.ndarray) -> List['Polygon']:
|
||||
def manhattanize(self,
|
||||
grid_x: numpy.ndarray,
|
||||
grid_y: numpy.ndarray
|
||||
) -> List['Polygon']:
|
||||
"""
|
||||
Returns a list of polygons with grid-aligned ("Manhattan") edges approximating the shape.
|
||||
|
||||
This function works by
|
||||
1) Converting the shape to polygons using .to_polygons()
|
||||
1) Converting the shape to polygons using `.to_polygons()`
|
||||
2) Accurately rasterizing each polygon on a grid,
|
||||
where the edges of each grid cell correspond to the allowed coordinates
|
||||
3) Thresholding the (anti-aliased) rasterized image
|
||||
@ -311,7 +366,7 @@ class Shape(metaclass=ABCMeta):
|
||||
caveats include:
|
||||
a) If high accuracy is important, perform any polygonization and clipping operations
|
||||
prior to calling this function. This allows you to specify any arguments you may
|
||||
need for .to_polygons(), and also avoids calling .manhattanize() multiple times for
|
||||
need for `.to_polygons()`, and also avoids calling `.manhattanize()` multiple times for
|
||||
the same grid location (which causes inaccuracies in the final representation).
|
||||
b) If the shape is very large or the grid very fine, memory requirements can be reduced
|
||||
by breaking the shape apart into multiple, smaller shapes.
|
||||
@ -319,19 +374,22 @@ class Shape(metaclass=ABCMeta):
|
||||
equidistant from allowed edge location.
|
||||
|
||||
Implementation notes:
|
||||
i) Rasterization is performed using float_raster, giving a high-precision anti-aliased
|
||||
i) Rasterization is performed using `float_raster`, giving a high-precision anti-aliased
|
||||
rasterized image.
|
||||
ii) To find the exact polygon edges, the thresholded rasterized image is supersampled
|
||||
prior to calling skimage.measure.find_contours(), which uses marching squares
|
||||
to find the contours. This is done because find_contours() performs interpolation,
|
||||
prior to calling `skimage.measure.find_contours()`, which uses marching squares
|
||||
to find the contours. This is done because `find_contours()` performs interpolation,
|
||||
which has to be undone in order to regain the axis-aligned contours. A targetted
|
||||
rewrite of find_contours() for this specific application, or use of a different
|
||||
rewrite of `find_contours()` for this specific application, or use of a different
|
||||
boundary tracing method could remove this requirement, but for now this seems to
|
||||
be the most performant approach.
|
||||
|
||||
:param grid_x: List of allowed x-coordinates for the Manhattanized polygon edges.
|
||||
:param grid_y: List of allowed y-coordinates for the Manhattanized polygon edges.
|
||||
:return: List of Polygon objects with grid-aligned edges.
|
||||
Args:
|
||||
grid_x: List of allowed x-coordinates for the Manhattanized polygon edges.
|
||||
grid_y: List of allowed y-coordinates for the Manhattanized polygon edges.
|
||||
|
||||
Returns:
|
||||
List of `Polygon` objects with grid-aligned edges.
|
||||
"""
|
||||
from . import Polygon
|
||||
import skimage.measure
|
||||
@ -384,3 +442,37 @@ class Shape(metaclass=ABCMeta):
|
||||
|
||||
return manhattan_polygons
|
||||
|
||||
def set_layer(self: T, layer: layer_t) -> T:
|
||||
"""
|
||||
Chainable method for changing the layer.
|
||||
|
||||
Args:
|
||||
layer: new value for self.layer
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.layer = layer
|
||||
return self
|
||||
|
||||
def lock(self: T) -> T:
|
||||
"""
|
||||
Lock the Shape, disallowing further changes
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = False
|
||||
object.__setattr__(self, 'locked', True)
|
||||
return self
|
||||
|
||||
def unlock(self: T) -> T:
|
||||
"""
|
||||
Unlock the Shape
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.offset.flags.writeable = True
|
||||
return self
|
||||
|
@ -1,25 +1,28 @@
|
||||
from typing import List, Tuple
|
||||
from typing import List, Tuple, Dict, Sequence, Optional, MutableSequence
|
||||
import copy
|
||||
import numpy
|
||||
from numpy import pi, inf
|
||||
|
||||
from . import Shape, Polygon, normalized_shape_tuple
|
||||
from .. import PatternError
|
||||
from ..utils import is_scalar, vector2, get_bit
|
||||
from ..utils import is_scalar, vector2, get_bit, normalize_mirror, layer_t
|
||||
|
||||
# Loaded on use:
|
||||
# from freetype import Face
|
||||
# from matplotlib.path import Path
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
|
||||
class Text(Shape):
|
||||
_string = ''
|
||||
_height = 1.0
|
||||
_rotation = 0.0
|
||||
_mirrored = None
|
||||
font_path = ''
|
||||
"""
|
||||
Text (to be printed e.g. as a set of polygons).
|
||||
This is distinct from non-printed Label objects.
|
||||
"""
|
||||
__slots__ = ('_string', '_height', '_rotation', '_mirrored', 'font_path')
|
||||
_string: str
|
||||
_height: float
|
||||
_rotation: float
|
||||
_mirrored: numpy.ndarray #ndarray[bool]
|
||||
font_path: str
|
||||
|
||||
# vertices property
|
||||
@property
|
||||
@ -54,24 +57,28 @@ class Text(Shape):
|
||||
|
||||
# Mirrored property
|
||||
@property
|
||||
def mirrored(self) -> List[bool]:
|
||||
def mirrored(self) -> numpy.ndarray: #ndarray[bool]
|
||||
return self._mirrored
|
||||
|
||||
@mirrored.setter
|
||||
def mirrored(self, val: List[bool]):
|
||||
def mirrored(self, val: Sequence[bool]):
|
||||
if is_scalar(val):
|
||||
raise PatternError('Mirrored must be a 2-element list of booleans')
|
||||
self._mirrored = list(val)
|
||||
self._mirrored = numpy.ndarray(val, dtype=bool, copy=True)
|
||||
|
||||
def __init__(self,
|
||||
string: str,
|
||||
height: float,
|
||||
font_path: str,
|
||||
offset: vector2=(0.0, 0.0),
|
||||
rotation: float=0.0,
|
||||
mirrored: Tuple[bool]=(False, False),
|
||||
layer: int=0,
|
||||
dose: float=1.0):
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
mirrored: Tuple[bool, bool] = (False, False),
|
||||
layer: layer_t = 0,
|
||||
dose: float = 1.0,
|
||||
locked: bool = False,
|
||||
):
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.identifier = ()
|
||||
self.offset = offset
|
||||
self.layer = layer
|
||||
self.dose = dose
|
||||
@ -80,13 +87,22 @@ class Text(Shape):
|
||||
self.rotation = rotation
|
||||
self.font_path = font_path
|
||||
self.mirrored = mirrored
|
||||
self.locked = locked
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Text':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new._offset = self._offset.copy()
|
||||
new._mirrored = copy.deepcopy(self._mirrored, memo)
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
def to_polygons(self,
|
||||
_poly_num_points: int=None,
|
||||
_poly_max_arclen: float=None
|
||||
poly_num_points: Optional[int] = None, # unused
|
||||
poly_max_arclen: Optional[float] = None, # unused
|
||||
) -> List[Polygon]:
|
||||
all_polygons = []
|
||||
total_advance = 0
|
||||
total_advance = 0.0
|
||||
for char in self.string:
|
||||
raw_polys, advance = get_char_as_polygons(self.font_path, char)
|
||||
|
||||
@ -117,18 +133,22 @@ class Text(Shape):
|
||||
return self
|
||||
|
||||
def normalized_form(self, norm_value: float) -> normalized_shape_tuple:
|
||||
return (type(self), self.string, self.font_path, self.mirrored, self.layer), \
|
||||
(self.offset, self.height / norm_value, self.rotation, self.dose), \
|
||||
mirror_x, rotation = normalize_mirror(self.mirrored)
|
||||
rotation += self.rotation
|
||||
rotation %= 2 * pi
|
||||
return (type(self), self.string, self.font_path, self.layer), \
|
||||
(self.offset, self.height / norm_value, rotation, mirror_x, self.dose), \
|
||||
lambda: Text(string=self.string,
|
||||
height=self.height * norm_value,
|
||||
font_path=self.font_path,
|
||||
mirrored=self.mirrored,
|
||||
rotation=rotation,
|
||||
mirrored=(mirror_x, False),
|
||||
layer=self.layer)
|
||||
|
||||
def get_bounds(self) -> numpy.ndarray:
|
||||
# rotation makes this a huge pain when using slot.advance and glyph.bbox(), so
|
||||
# just convert to polygons instead
|
||||
bounds = [[+inf, +inf], [-inf, -inf]]
|
||||
bounds = numpy.array([[+inf, +inf], [-inf, -inf]])
|
||||
polys = self.to_polygons()
|
||||
for poly in polys:
|
||||
poly_bounds = poly.get_bounds()
|
||||
@ -140,7 +160,7 @@ class Text(Shape):
|
||||
|
||||
def get_char_as_polygons(font_path: str,
|
||||
char: str,
|
||||
resolution: float=48*64,
|
||||
resolution: float = 48*64,
|
||||
) -> Tuple[List[List[List[float]]], float]:
|
||||
from freetype import Face
|
||||
from matplotlib.path import Path
|
||||
@ -150,12 +170,15 @@ def get_char_as_polygons(font_path: str,
|
||||
|
||||
The output is normalized so that the font size is 1 unit.
|
||||
|
||||
:param font_path: File path specifying a font loadable by freetype
|
||||
:param char: Character to convert to polygons
|
||||
:param resolution: Internal resolution setting (used for freetype
|
||||
Face.set_font_size(resolution)). Modify at your own peril!
|
||||
:return: List of polygons [[[x0, y0], [x1, y1], ...], ...] and 'advance' distance (distance
|
||||
from the start of this glyph to the start of the next one)
|
||||
Args:
|
||||
font_path: File path specifying a font loadable by freetype
|
||||
char: Character to convert to polygons
|
||||
resolution: Internal resolution setting (used for freetype
|
||||
`Face.set_font_size(resolution))`. Modify at your own peril!
|
||||
|
||||
Returns:
|
||||
List of polygons `[[[x0, y0], [x1, y1], ...], ...]` and
|
||||
'advance' distance (distance from the start of this glyph to the start of the next one)
|
||||
"""
|
||||
if len(char) != 1:
|
||||
raise Exception('get_char_as_polygons called with non-char')
|
||||
@ -175,7 +198,7 @@ def get_char_as_polygons(font_path: str,
|
||||
tags = outline.tags[start:end + 1]
|
||||
tags.append(tags[0])
|
||||
|
||||
segments = []
|
||||
segments: List[List[List[float]]] = []
|
||||
for j, point in enumerate(points):
|
||||
# If we already have a segment, add this point to it
|
||||
if j > 0:
|
||||
@ -220,3 +243,20 @@ def get_char_as_polygons(font_path: str,
|
||||
polygons = path.to_polygons()
|
||||
|
||||
return polygons, advance
|
||||
|
||||
def lock(self) -> 'Text':
|
||||
self.mirrored.flags.writeable = False
|
||||
Shape.lock(self)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'Text':
|
||||
Shape.unlock(self)
|
||||
self.mirrored.flags.writeable = True
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
rotation = f' r°{self.rotation*180/pi:g}' if self.rotation != 0 else ''
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
|
||||
return f'<TextShape "{self.string}" l{self.layer} o{self.offset} h{self.height:g}{rotation}{mirrored}{dose}{locked}>'
|
||||
|
@ -2,18 +2,21 @@
|
||||
SubPattern provides basic support for nesting Pattern objects within each other, by adding
|
||||
offset, rotation, scaling, and other such properties to the reference.
|
||||
"""
|
||||
#TODO more top-level documentation
|
||||
|
||||
from typing import Union, List
|
||||
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any
|
||||
import copy
|
||||
|
||||
import numpy
|
||||
from numpy import pi
|
||||
|
||||
from .error import PatternError
|
||||
from .error import PatternError, PatternLockedError
|
||||
from .utils import is_scalar, rotation_matrix_2d, vector2
|
||||
from .repetition import GridRepetition
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
if TYPE_CHECKING:
|
||||
from . import Pattern
|
||||
|
||||
|
||||
class SubPattern:
|
||||
@ -21,21 +24,61 @@ class SubPattern:
|
||||
SubPattern provides basic support for nesting Pattern objects within each other, by adding
|
||||
offset, rotation, scaling, and associated methods.
|
||||
"""
|
||||
__slots__ = ('_pattern',
|
||||
'_offset',
|
||||
'_rotation',
|
||||
'_dose',
|
||||
'_scale',
|
||||
'_mirrored',
|
||||
'identifier',
|
||||
'locked')
|
||||
|
||||
pattern = None # type: Pattern
|
||||
_offset = (0.0, 0.0) # type: numpy.ndarray
|
||||
_rotation = 0.0 # type: float
|
||||
_dose = 1.0 # type: float
|
||||
_scale = 1.0 # type: float
|
||||
_mirrored = None # type: List[bool]
|
||||
_pattern: Optional['Pattern']
|
||||
""" The `Pattern` being instanced """
|
||||
|
||||
_offset: numpy.ndarray
|
||||
""" (x, y) offset for the instance """
|
||||
|
||||
_rotation: float
|
||||
""" rotation for the instance, radians counterclockwise """
|
||||
|
||||
_dose: float
|
||||
""" dose factor for the instance """
|
||||
|
||||
_scale: float
|
||||
""" scale factor for the instance """
|
||||
|
||||
_mirrored: numpy.ndarray # ndarray[bool]
|
||||
""" Whether to mirror the instanc across the x and/or y axes. """
|
||||
|
||||
identifier: Tuple[Any, ...]
|
||||
""" Arbitrary identifier, used internally by some `masque` functions. """
|
||||
|
||||
locked: bool
|
||||
""" If `True`, disallows changes to the GridRepetition """
|
||||
|
||||
def __init__(self,
|
||||
pattern: 'Pattern',
|
||||
offset: vector2=(0.0, 0.0),
|
||||
rotation: float=0.0,
|
||||
mirrored: List[bool]=None,
|
||||
dose: float=1.0,
|
||||
scale: float=1.0):
|
||||
pattern: Optional['Pattern'],
|
||||
offset: vector2 = (0.0, 0.0),
|
||||
rotation: float = 0.0,
|
||||
mirrored: Optional[Sequence[bool]] = None,
|
||||
dose: float = 1.0,
|
||||
scale: float = 1.0,
|
||||
locked: bool = False,
|
||||
identifier: Tuple[Any, ...] = ()):
|
||||
"""
|
||||
Args:
|
||||
pattern: Pattern to reference.
|
||||
offset: (x, y) offset applied to the referenced pattern. Not affected by rotation etc.
|
||||
rotation: Rotation (radians, counterclockwise) relative to the referenced pattern's (0, 0).
|
||||
mirrored: Whether to mirror the referenced pattern across its x and y axes.
|
||||
dose: Scaling factor applied to the dose.
|
||||
scale: Scaling factor applied to the pattern's geometry.
|
||||
locked: Whether the `SubPattern` is locked after initialization.
|
||||
identifier: Arbitrary tuple, used internally by some `masque` functions.
|
||||
"""
|
||||
object.__setattr__(self, 'locked', False)
|
||||
self.identifier = identifier
|
||||
self.pattern = pattern
|
||||
self.offset = offset
|
||||
self.rotation = rotation
|
||||
@ -44,6 +87,41 @@ class SubPattern:
|
||||
if mirrored is None:
|
||||
mirrored = [False, False]
|
||||
self.mirrored = mirrored
|
||||
self.locked = locked
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
if self.locked and name != 'locked':
|
||||
raise PatternLockedError()
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
def __copy__(self) -> 'SubPattern':
|
||||
new = SubPattern(pattern=self.pattern,
|
||||
offset=self.offset.copy(),
|
||||
rotation=self.rotation,
|
||||
dose=self.dose,
|
||||
scale=self.scale,
|
||||
mirrored=self.mirrored.copy(),
|
||||
locked=self.locked)
|
||||
return new
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'SubPattern':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self).unlock()
|
||||
new.pattern = copy.deepcopy(self.pattern, memo)
|
||||
new.locked = self.locked
|
||||
return new
|
||||
|
||||
# pattern property
|
||||
@property
|
||||
def pattern(self) -> Optional['Pattern']:
|
||||
return self._pattern
|
||||
|
||||
@pattern.setter
|
||||
def pattern(self, val: Optional['Pattern']):
|
||||
from .pattern import Pattern
|
||||
if val is not None and not isinstance(val, Pattern):
|
||||
raise PatternError('Provided pattern {} is not a Pattern object or None!'.format(val))
|
||||
self._pattern = val
|
||||
|
||||
# offset property
|
||||
@property
|
||||
@ -98,22 +176,23 @@ class SubPattern:
|
||||
|
||||
# Mirrored property
|
||||
@property
|
||||
def mirrored(self) -> List[bool]:
|
||||
def mirrored(self) -> numpy.ndarray: # ndarray[bool]
|
||||
return self._mirrored
|
||||
|
||||
@mirrored.setter
|
||||
def mirrored(self, val: List[bool]):
|
||||
def mirrored(self, val: Sequence[bool]):
|
||||
if is_scalar(val):
|
||||
raise PatternError('Mirrored must be a 2-element list of booleans')
|
||||
self._mirrored = val
|
||||
self._mirrored = numpy.array(val, dtype=bool, copy=True)
|
||||
|
||||
def as_pattern(self) -> 'Pattern':
|
||||
"""
|
||||
Returns a copy of self.pattern which has been scaled, rotated, etc. according to this
|
||||
SubPattern's properties.
|
||||
:return: Copy of self.pattern that has been altered to reflect the SubPattern's properties.
|
||||
Returns:
|
||||
A copy of self.pattern which has been scaled, rotated, etc. according to this
|
||||
`SubPattern`'s properties.
|
||||
"""
|
||||
pattern = self.pattern.deepcopy()
|
||||
assert(self.pattern is not None)
|
||||
pattern = self.pattern.deepcopy().deepunlock()
|
||||
pattern.scale_by(self.scale)
|
||||
[pattern.mirror(ax) for ax, do in enumerate(self.mirrored) if do]
|
||||
pattern.rotate_around((0.0, 0.0), self.rotation)
|
||||
@ -125,8 +204,11 @@ class SubPattern:
|
||||
"""
|
||||
Translate by the given offset
|
||||
|
||||
:param offset: Translate by this offset
|
||||
:return: self
|
||||
Args:
|
||||
offset: Offset `[x, y]` to translate by
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset += offset
|
||||
return self
|
||||
@ -135,9 +217,12 @@ class SubPattern:
|
||||
"""
|
||||
Rotate around a point
|
||||
|
||||
:param pivot: Point to rotate around
|
||||
:param rotation: Angle to rotate by (counterclockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
pivot: Point `[x, y]` to rotate around
|
||||
rotation: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
pivot = numpy.array(pivot, dtype=float)
|
||||
self.translate(-pivot)
|
||||
@ -148,10 +233,13 @@ class SubPattern:
|
||||
|
||||
def rotate(self, rotation: float) -> 'SubPattern':
|
||||
"""
|
||||
Rotate around (0, 0)
|
||||
Rotate the instance around it's origin
|
||||
|
||||
:param rotation: Angle to rotate by (counterclockwise, radians)
|
||||
:return: self
|
||||
Args:
|
||||
rotation: Angle to rotate by (counterclockwise, radians)
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.rotation += rotation
|
||||
return self
|
||||
@ -160,27 +248,38 @@ class SubPattern:
|
||||
"""
|
||||
Mirror the subpattern across an axis.
|
||||
|
||||
:param axis: Axis to mirror across.
|
||||
:return: self
|
||||
Args:
|
||||
axis: Axis to mirror across.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.mirrored[axis] = not self.mirrored[axis]
|
||||
self.rotation *= -1
|
||||
return self
|
||||
|
||||
def get_bounds(self) -> numpy.ndarray or None:
|
||||
def get_bounds(self) -> Optional[numpy.ndarray]:
|
||||
"""
|
||||
Return a numpy.ndarray containing [[x_min, y_min], [x_max, y_max]], corresponding to the
|
||||
extent of the SubPattern in each dimension.
|
||||
Returns None if the contained Pattern is empty.
|
||||
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
|
||||
extent of the `SubPattern` in each dimension.
|
||||
Returns `None` if the contained `Pattern` is empty.
|
||||
|
||||
:return: [[x_min, y_min], [x_max, y_max]] or None
|
||||
Returns:
|
||||
`[[x_min, y_min], [x_max, y_max]]` or `None`
|
||||
"""
|
||||
if self.pattern is None:
|
||||
return None
|
||||
return self.as_pattern().get_bounds()
|
||||
|
||||
def scale_by(self, c: float) -> 'SubPattern':
|
||||
"""
|
||||
Scale the subpattern by a factor
|
||||
|
||||
:param c: scaling factor
|
||||
Args:
|
||||
c: scaling factor
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.scale *= c
|
||||
return self
|
||||
@ -189,7 +288,8 @@ class SubPattern:
|
||||
"""
|
||||
Return a shallow copy of the subpattern.
|
||||
|
||||
:return: copy.copy(self)
|
||||
Returns:
|
||||
`copy.copy(self)`
|
||||
"""
|
||||
return copy.copy(self)
|
||||
|
||||
@ -197,6 +297,70 @@ class SubPattern:
|
||||
"""
|
||||
Return a deep copy of the subpattern.
|
||||
|
||||
:return: copy.copy(self)
|
||||
Returns:
|
||||
`copy.deepcopy(self)`
|
||||
"""
|
||||
return copy.deepcopy(self)
|
||||
|
||||
def lock(self) -> 'SubPattern':
|
||||
"""
|
||||
Lock the SubPattern, disallowing changes
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = False
|
||||
self.mirrored.flags.writeable = False
|
||||
object.__setattr__(self, 'locked', True)
|
||||
return self
|
||||
|
||||
def unlock(self) -> 'SubPattern':
|
||||
"""
|
||||
Unlock the SubPattern
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
self.offset.flags.writeable = True
|
||||
self.mirrored.flags.writeable = True
|
||||
object.__setattr__(self, 'locked', False)
|
||||
return self
|
||||
|
||||
def deeplock(self) -> 'SubPattern':
|
||||
"""
|
||||
Recursively lock the SubPattern and its contained pattern
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
assert(self.pattern is not None)
|
||||
self.lock()
|
||||
self.pattern.deeplock()
|
||||
return self
|
||||
|
||||
def deepunlock(self) -> 'SubPattern':
|
||||
"""
|
||||
Recursively unlock the SubPattern and its contained pattern
|
||||
|
||||
This is dangerous unless you have just performed a deepcopy, since
|
||||
the subpattern and its components may be used in more than one once!
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
assert(self.pattern is not None)
|
||||
self.unlock()
|
||||
self.pattern.deepunlock()
|
||||
return self
|
||||
|
||||
def __repr__(self) -> str:
|
||||
name = self.pattern.name if self.pattern is not None else None
|
||||
rotation = f' r{self.rotation*180/pi:g}' if self.rotation != 0 else ''
|
||||
scale = f' d{self.scale:g}' if self.scale != 1 else ''
|
||||
mirrored = ' m{:d}{:d}'.format(*self.mirrored) if self.mirrored.any() else ''
|
||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||
locked = ' L' if self.locked else ''
|
||||
return f'<SubPattern "{name}" at {self.offset}{rotation}{scale}{mirrored}{dose}{locked}>'
|
||||
|
||||
|
||||
subpattern_t = Union[SubPattern, GridRepetition]
|
||||
|
116
masque/utils.py
116
masque/utils.py
@ -2,42 +2,50 @@
|
||||
Various helper functions
|
||||
"""
|
||||
|
||||
from typing import Any, Union, Tuple
|
||||
from typing import Any, Union, Tuple, Sequence
|
||||
|
||||
import numpy
|
||||
|
||||
# Type definitions
|
||||
vector2 = Union[numpy.ndarray, Tuple[float, float]]
|
||||
vector2 = Union[numpy.ndarray, Tuple[float, float], Sequence[float]]
|
||||
layer_t = Union[int, Tuple[int, int], str]
|
||||
|
||||
|
||||
def is_scalar(var: Any) -> bool:
|
||||
"""
|
||||
Alias for 'not hasattr(var, "__len__")'
|
||||
|
||||
:param var: Checks if var has a length.
|
||||
Args:
|
||||
var: Checks if `var` has a length.
|
||||
"""
|
||||
return not hasattr(var, "__len__")
|
||||
|
||||
|
||||
def get_bit(bit_string: Any, bit_id: int) -> bool:
|
||||
"""
|
||||
Returns true iff bit number 'bit_id' from the right of 'bit_string' is 1
|
||||
Interprets bit number `bit_id` from the right (lsb) of `bit_string` as a boolean
|
||||
|
||||
:param bit_string: Bit string to test
|
||||
:param bit_id: Bit number, 0-indexed from the right (lsb)
|
||||
:return: value of the requested bit (bool)
|
||||
Args:
|
||||
bit_string: Bit string to test
|
||||
bit_id: Bit number, 0-indexed from the right (lsb)
|
||||
|
||||
Returns:
|
||||
Boolean value of the requested bit
|
||||
"""
|
||||
return bit_string & (1 << bit_id) != 0
|
||||
|
||||
|
||||
def set_bit(bit_string: Any, bit_id: int, value: bool) -> Any:
|
||||
"""
|
||||
Returns 'bit_string' with bit number 'bit_id' set to 'value'.
|
||||
Returns `bit_string`, with bit number `bit_id` set to boolean `value`.
|
||||
|
||||
:param bit_string: Bit string to alter
|
||||
:param bit_id: Bit number, 0-indexed from right (lsb)
|
||||
:param value: Boolean value to set bit to
|
||||
:return: Altered 'bit_string'
|
||||
Args:
|
||||
bit_string: Bit string to alter
|
||||
bit_id: Bit number, 0-indexed from right (lsb)
|
||||
value: Boolean value to set bit to
|
||||
|
||||
Returns:
|
||||
Altered `bit_string`
|
||||
"""
|
||||
mask = (1 << bit_id)
|
||||
bit_string &= ~mask
|
||||
@ -50,40 +58,78 @@ def rotation_matrix_2d(theta: float) -> numpy.ndarray:
|
||||
"""
|
||||
2D rotation matrix for rotating counterclockwise around the origin.
|
||||
|
||||
:param theta: Angle to rotate, in radians
|
||||
:return: rotation matrix
|
||||
Args:
|
||||
theta: Angle to rotate, in radians
|
||||
|
||||
Returns:
|
||||
rotation matrix
|
||||
"""
|
||||
return numpy.array([[numpy.cos(theta), -numpy.sin(theta)],
|
||||
[numpy.sin(theta), +numpy.cos(theta)]])
|
||||
|
||||
|
||||
def normalize_mirror(mirrored: Sequence[bool]) -> Tuple[bool, float]:
|
||||
"""
|
||||
Converts 0-2 mirror operations `(mirror_across_x_axis, mirror_across_y_axis)`
|
||||
into 0-1 mirror operations and a rotation
|
||||
|
||||
Args:
|
||||
mirrored: `(mirror_across_x_axis, mirror_across_y_axis)`
|
||||
|
||||
Returns:
|
||||
`mirror_across_x_axis` (bool) and
|
||||
`angle_to_rotate` in radians
|
||||
"""
|
||||
|
||||
mirrored_x, mirrored_y = mirrored
|
||||
mirror_x = (mirrored_x != mirrored_y) #XOR
|
||||
angle = numpy.pi if mirrored_y else 0
|
||||
return mirror_x, angle
|
||||
|
||||
|
||||
def remove_duplicate_vertices(vertices: numpy.ndarray, closed_path: bool = True) -> numpy.ndarray:
|
||||
duplicates = (vertices == numpy.roll(vertices, 1, axis=0)).all(axis=1)
|
||||
if not closed_path:
|
||||
duplicates[0] = False
|
||||
return vertices[~duplicates]
|
||||
"""
|
||||
Given a list of vertices, remove any consecutive duplicates.
|
||||
|
||||
Args:
|
||||
vertices: `[[x0, y0], [x1, y1], ...]`
|
||||
closed_path: If True, `vertices` is interpreted as an implicity-closed path
|
||||
(i.e. the last vertex will be removed if it is the same as the first)
|
||||
|
||||
Returns:
|
||||
`vertices` with no consecutive duplicates.
|
||||
"""
|
||||
duplicates = (vertices == numpy.roll(vertices, 1, axis=0)).all(axis=1)
|
||||
if not closed_path:
|
||||
duplicates[0] = False
|
||||
return vertices[~duplicates]
|
||||
|
||||
|
||||
def remove_colinear_vertices(vertices: numpy.ndarray, closed_path: bool = True) -> numpy.ndarray:
|
||||
'''
|
||||
Given a list of vertices, remove any superflous vertices (i.e.
|
||||
those which lie along the line formed by their neighbors)
|
||||
"""
|
||||
Given a list of vertices, remove any superflous vertices (i.e.
|
||||
those which lie along the line formed by their neighbors)
|
||||
|
||||
:param vertices: Nx2 ndarray of vertices
|
||||
:param closed_path: If True, the vertices are assumed to represent an implicitly
|
||||
closed path. If False, the path is assumed to be open. Default True.
|
||||
:return:
|
||||
'''
|
||||
# Check for dx0/dy0 == dx1/dy1
|
||||
Args:
|
||||
vertices: Nx2 ndarray of vertices
|
||||
closed_path: If `True`, the vertices are assumed to represent an implicitly
|
||||
closed path. If `False`, the path is assumed to be open. Default `True`.
|
||||
|
||||
dv = numpy.roll(vertices, 1, axis=0) - vertices #[y0 - yn1, y1-y0, ...]
|
||||
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] # [[dx1*dy0, dx1*dy0], ...]
|
||||
Returns:
|
||||
`vertices` with colinear (superflous) vertices removed.
|
||||
"""
|
||||
vertices = numpy.array(vertices)
|
||||
|
||||
dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0]
|
||||
err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40
|
||||
# Check for dx0/dy0 == dx1/dy1
|
||||
|
||||
slopes_equal = (dxdy_diff / err_mult) < 1e-15
|
||||
if not closed_path:
|
||||
slopes_equal[[0, -1]] = False
|
||||
dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...]
|
||||
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] #[[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dy0]]
|
||||
|
||||
return vertices[~slopes_equal]
|
||||
dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0]
|
||||
err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40
|
||||
|
||||
slopes_equal = (dxdy_diff / err_mult) < 1e-15
|
||||
if not closed_path:
|
||||
slopes_equal[[0, -1]] = False
|
||||
|
||||
return vertices[~slopes_equal]
|
||||
|
9
setup.py
9
setup.py
@ -1,13 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
import masque
|
||||
|
||||
with open('README.md', 'r') as f:
|
||||
long_description = f.read()
|
||||
|
||||
with open('masque/VERSION', 'r') as f:
|
||||
version = f.read().strip()
|
||||
|
||||
setup(name='masque',
|
||||
version=masque.version,
|
||||
version=version,
|
||||
description='Lithography mask library',
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
@ -15,6 +17,9 @@ setup(name='masque',
|
||||
author_email='anewusername@gmail.com',
|
||||
url='https://mpxd.net/code/jan/masque',
|
||||
packages=find_packages(),
|
||||
package_data={
|
||||
'masque': ['VERSION']
|
||||
},
|
||||
install_requires=[
|
||||
'numpy',
|
||||
],
|
||||
|
Loading…
x
Reference in New Issue
Block a user