diff --git a/.gitignore b/.gitignore index 3ef4b5d..3557665 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,19 @@ *.pyc __pycache__ + *.idea + build/ dist/ *.egg-info/ +.mypy_cache/ + +*.swp +*.swo + +*.gds +*.gds.gz +*.svg +*.oas +*.dxf +*.dxf.gz diff --git a/MANIFEST.in b/MANIFEST.in index c28ab72..8120ce7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ include README.md include LICENSE.md +include masque/VERSION diff --git a/README.md b/README.md index 150481f..fd16875 100644 --- a/README.md +++ b/README.md @@ -15,15 +15,15 @@ E-beam doses, and the ability to output to multiple formats. Requirements: * python >= 3.5 (written and tested with 3.6) * numpy -* matplotlib (optional, used for visualization functions and text) -* python-gdsii (optional, used for gdsii i/o) -* svgwrite (optional, used for svg output) -* freetype (optional, used for text) +* matplotlib (optional, used for `visualization` functions and `text`) +* python-gdsii (optional, used for `gdsii` i/o) +* svgwrite (optional, used for `svg` output) +* freetype (optional, used for `text`) Install with pip: ```bash -pip3 install masque +pip3 install 'masque[visualization,gdsii,svg,text]' ``` Alternatively, install from git @@ -33,11 +33,7 @@ pip3 install git+https://mpxd.net/code/jan/masque.git@release ## TODO -* Mirroring * Polygon de-embedding - -### Maybe - * Construct from bitmap * Boolean operations on polygons (using pyclipper) * Output to OASIS (using fatamorgana) diff --git a/examples/ellip_grating.py b/examples/ellip_grating.py index 4190cc7..1a12d0e 100644 --- a/examples/ellip_grating.py +++ b/examples/ellip_grating.py @@ -4,6 +4,7 @@ import numpy import masque import masque.file.gdsii +import masque.file.dxf from masque import shapes @@ -13,20 +14,22 @@ def main(): pat.shapes.append(shapes.Arc( radii=(rmin, rmin), width=0.1, - angles=(-numpy.pi/4, numpy.pi/4) + angles=(-numpy.pi/4, numpy.pi/4), + layer=(0, 0), )) + pat.labels.append(masque.Label(string='grating centerline', offset=(1, 0), layer=(1, 2))) + pat.scale_by(1000) # pat.visualize() - pat2 = masque.Pattern(name='p2') - pat2.name = 'ellip_grating' + pat2 = pat.copy() + pat2.name = 'grating2' - pat2.subpatterns += [ - masque.SubPattern(pattern=pat, offset=(20e3, 0)), - masque.SubPattern(pattern=pat, offset=(0, 20e3)), - ] + masque.file.gdsii.writefile((pat, pat2), 'out.gds.gz', 1e-9, 1e-3) - masque.file.gdsii.write_dose2dtype((pat, pat2, pat2.copy(), pat2.copy()), 'out.gds', 1e-9, 1e-3) + masque.file.dxf.writefile(pat, 'out.dxf.gz') + dxf, info = masque.file.dxf.readfile('out.dxf.gz') + masque.file.dxf.writefile(dxf, 'reout.dxf.gz') if __name__ == '__main__': diff --git a/examples/test_rep.py b/examples/test_rep.py new file mode 100644 index 0000000..304f2e5 --- /dev/null +++ b/examples/test_rep.py @@ -0,0 +1,96 @@ +import numpy +from numpy import pi + +import masque +import masque.file.gdsii +import masque.file.dxf +from masque import shapes, Pattern, SubPattern, GridRepetition + +from pprint import pprint + + +def main(): + pat = masque.Pattern(name='ellip_grating') + for rmin in numpy.arange(10, 15, 0.5): + pat.shapes.append(shapes.Arc( + radii=(rmin, rmin), + width=0.1, + angles=(0*-numpy.pi/4, numpy.pi/4) + )) + + pat.scale_by(1000) + pat.visualize() + pat2 = pat.copy() + pat2.name = 'grating2' + + pat3 = Pattern('sref_test') + pat3.subpatterns = [ + SubPattern(pat, offset=(1e5, 3e5)), + SubPattern(pat, offset=(2e5, 3e5), rotation=pi/3), + SubPattern(pat, offset=(3e5, 3e5), rotation=pi/2), + SubPattern(pat, offset=(4e5, 3e5), rotation=pi), + SubPattern(pat, offset=(5e5, 3e5), rotation=3*pi/2), + SubPattern(pat, mirrored=(True, False), offset=(1e5, 4e5)), + SubPattern(pat, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3), + SubPattern(pat, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2), + SubPattern(pat, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi), + SubPattern(pat, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2), + SubPattern(pat, mirrored=(False, True), offset=(1e5, 5e5)), + SubPattern(pat, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3), + SubPattern(pat, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2), + SubPattern(pat, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi), + SubPattern(pat, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2), + SubPattern(pat, mirrored=(True, True), offset=(1e5, 6e5)), + SubPattern(pat, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3), + SubPattern(pat, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2), + SubPattern(pat, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi), + SubPattern(pat, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2), + ] + + pprint(pat3) + pprint(pat3.subpatterns) + pprint(pat.shapes) + + args = { + 'pattern': pat, + 'a_vector': [1e4, 0], + 'b_vector': [0, 1.5e4], + 'a_count': 3, + 'b_count': 2, + } + pat4 = Pattern('aref_test') + pat4.subpatterns = [ + GridRepetition(**args, offset=(1e5, 3e5)), + GridRepetition(**args, offset=(2e5, 3e5), rotation=pi/3), + GridRepetition(**args, offset=(3e5, 3e5), rotation=pi/2), + GridRepetition(**args, offset=(4e5, 3e5), rotation=pi), + GridRepetition(**args, offset=(5e5, 3e5), rotation=3*pi/2), + GridRepetition(**args, mirrored=(True, False), offset=(1e5, 4e5)), + GridRepetition(**args, mirrored=(True, False), offset=(2e5, 4e5), rotation=pi/3), + GridRepetition(**args, mirrored=(True, False), offset=(3e5, 4e5), rotation=pi/2), + GridRepetition(**args, mirrored=(True, False), offset=(4e5, 4e5), rotation=pi), + GridRepetition(**args, mirrored=(True, False), offset=(5e5, 4e5), rotation=3*pi/2), + GridRepetition(**args, mirrored=(False, True), offset=(1e5, 5e5)), + GridRepetition(**args, mirrored=(False, True), offset=(2e5, 5e5), rotation=pi/3), + GridRepetition(**args, mirrored=(False, True), offset=(3e5, 5e5), rotation=pi/2), + GridRepetition(**args, mirrored=(False, True), offset=(4e5, 5e5), rotation=pi), + GridRepetition(**args, mirrored=(False, True), offset=(5e5, 5e5), rotation=3*pi/2), + GridRepetition(**args, mirrored=(True, True), offset=(1e5, 6e5)), + GridRepetition(**args, mirrored=(True, True), offset=(2e5, 6e5), rotation=pi/3), + GridRepetition(**args, mirrored=(True, True), offset=(3e5, 6e5), rotation=pi/2), + GridRepetition(**args, mirrored=(True, True), offset=(4e5, 6e5), rotation=pi), + GridRepetition(**args, mirrored=(True, True), offset=(5e5, 6e5), rotation=3*pi/2), + ] + + masque.file.gdsii.writefile((pat, pat2, pat3, pat4), 'rep.gds.gz', 1e-9, 1e-3) + + cells = list(masque.file.gdsii.readfile('rep.gds.gz')[0].values()) + masque.file.gdsii.writefile(cells, 'rerep.gds.gz', 1e-9, 1e-3) + + masque.file.dxf.writefile(pat4, 'rep.dxf.gz') + dxf, info = masque.file.dxf.readfile('rep.dxf.gz') + masque.file.dxf.writefile(dxf, 'rerep.dxf.gz') + + +if __name__ == '__main__': + main() diff --git a/masque/VERSION b/masque/VERSION new file mode 100644 index 0000000..7e32cd5 --- /dev/null +++ b/masque/VERSION @@ -0,0 +1 @@ +1.3 diff --git a/masque/__init__.py b/masque/__init__.py index acf7cbc..c826d18 100644 --- a/masque/__init__.py +++ b/masque/__init__.py @@ -6,31 +6,38 @@ with some vectorized element types (eg. circles, not just polygons), better support for E-beam doses, and the ability to output to multiple formats. - Pattern is a basic object containing a 2D lithography mask, composed of a list of Shape - objects and a list of SubPattern objects. + `Pattern` is a basic object containing a 2D lithography mask, composed of a list of `Shape` + objects, a list of `Label` objects, and a list of references to other `Patterns` (using + `SubPattern` and `GridRepetition`). - SubPattern provides basic support for nesting Pattern objects within each other, by adding + `SubPattern` provides basic support for nesting `Pattern` objects within each other, by adding offset, rotation, scaling, and other such properties to a Pattern reference. + `GridRepetition` provides support for nesting regular arrays of `Pattern` objects. + Note that the methods for these classes try to avoid copying wherever possible, so unless otherwise noted, assume that arguments are stored by-reference. Dependencies: - - numpy - - matplotlib [Pattern.visualize(...)] - - python-gdsii [masque.file.gdsii] - - svgwrite [masque.file.svg] + - `numpy` + - `matplotlib` [Pattern.visualize(...)] + - `python-gdsii` [masque.file.gdsii] + - `svgwrite` [masque.file.svg] """ -from .error import PatternError +import pathlib + +from .error import PatternError, PatternLockedError from .shapes import Shape from .label import Label -from .subpattern import SubPattern +from .subpattern import SubPattern, subpattern_t from .repetition import GridRepetition from .pattern import Pattern __author__ = 'Jan Petykiewicz' -version = '0.5' +with open(pathlib.Path(__file__).parent / 'VERSION', 'r') as f: + __version__ = f.read().strip() +version = __version__ diff --git a/masque/error.py b/masque/error.py index 8a67b6e..4a5c21a 100644 --- a/masque/error.py +++ b/masque/error.py @@ -7,3 +7,11 @@ class PatternError(Exception): def __str__(self): return repr(self.value) + + +class PatternLockedError(PatternError): + """ + Exception raised when trying to modify a locked pattern + """ + def __init__(self): + PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape') diff --git a/masque/file/dxf.py b/masque/file/dxf.py new file mode 100644 index 0000000..335cd80 --- /dev/null +++ b/masque/file/dxf.py @@ -0,0 +1,382 @@ +""" +DXF file format readers and writers +""" +from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional +import re +import io +import copy +import base64 +import struct +import logging +import pathlib +import gzip +import numpy +from numpy import pi + +import ezdxf + +from .utils import mangle_name, make_dose_table +from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t +from ..shapes import Polygon, Path +from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t +from ..utils import remove_colinear_vertices, normalize_mirror + + +logger = logging.getLogger(__name__) + +logger.warning('DXF support is experimental and only slightly tested!') + + +DEFAULT_LAYER = 'DEFAULT' + + +def write(pattern: Pattern, + stream: io.TextIOBase, + modify_originals: bool = False, + dxf_version='AC1024', + disambiguate_func: Callable[[Iterable[Pattern]], None] = None): + """ + Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes + into polygons, and then writing patterns as DXF `Block`s, polygons as `LWPolyline`s, + and subpatterns as `Insert`s. + + The top level pattern's name is not written to the DXF file. Nested patterns keep their + names. + + Layer numbers are translated as follows: + int: 1 -> '1' + tuple: (1, 2) -> '1.2' + str: '1.2' -> '1.2' (no change) + + It is often a good idea to run `pattern.subpatternize()` prior to calling this function, + especially if calling `.polygonize()` will result in very many vertices. + + If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` + prior to calling this function. + + Only `GridRepetition` objects with manhattan basis vectors are preserved as arrays. Since DXF + rotations apply to basis vectors while `masque`'s rotations do not, the basis vectors of an + array with rotated instances must be manhattan _after_ having a compensating rotation applied. + + Args: + patterns: A Pattern or list of patterns to write to the stream. + stream: Stream object to write to. + modify_original: If `True`, the original pattern is modified as part of the writing + process. Otherwise, a copy is made and `deepunlock()`-ed. + Default `False`. + disambiguate_func: Function which takes a list of patterns and alters them + to make their names valid and unique. Default is `disambiguate_pattern_names`. + WARNING: No additional error checking is performed on the results. + """ + #TODO consider supporting DXF arcs? + if disambiguate_func is None: + disambiguate_func = disambiguate_pattern_names + + if not modify_originals: + pattern = pattern.deepcopy().deepunlock() + + # Get a dict of id(pattern) -> pattern + patterns_by_id = pattern.referenced_patterns_by_id() + disambiguate_func(patterns_by_id.values()) + + # Create library + lib = ezdxf.new(dxf_version, setup=True) + msp = lib.modelspace() + _shapes_to_elements(msp, pattern.shapes) + _labels_to_texts(msp, pattern.labels) + _subpatterns_to_refs(msp, pattern.subpatterns) + + # Now create a block for each referenced pattern, and add in any shapes + for pat in patterns_by_id.values(): + assert(pat is not None) + block = lib.blocks.new(name=pat.name) + + _shapes_to_elements(block, pat.shapes) + _labels_to_texts(block, pat.labels) + _subpatterns_to_refs(block, pat.subpatterns) + + lib.write(stream) + + +def writefile(pattern: Pattern, + filename: Union[str, pathlib.Path], + *args, + **kwargs, + ): + """ + Wrapper for `dxf.write()` that takes a filename or path instead of a stream. + + Will automatically compress the file if it has a .gz suffix. + + Args: + pattern: `Pattern` to save + filename: Filename to save to. + *args: passed to `dxf.write` + **kwargs: passed to `dxf.write` + """ + path = pathlib.Path(filename) + if path.suffix == '.gz': + open_func: Callable = gzip.open + else: + open_func = open + + with open_func(path, mode='wt') as stream: + results = write(pattern, stream, *args, **kwargs) + return results + + +def readfile(filename: Union[str, pathlib.Path], + *args, + **kwargs, + ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: + """ + Wrapper for `dxf.read()` that takes a filename or path instead of a stream. + + Will automatically decompress files with a .gz suffix. + + Args: + filename: Filename to save to. + *args: passed to `dxf.read` + **kwargs: passed to `dxf.read` + """ + path = pathlib.Path(filename) + if path.suffix == '.gz': + open_func: Callable = gzip.open + else: + open_func = open + + with open_func(path, mode='rt') as stream: + results = read(stream, *args, **kwargs) + return results + + +def read(stream: io.TextIOBase, + clean_vertices: bool = True, + ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: + """ + Read a dxf file and translate it into a dict of `Pattern` objects. DXF `Block`s are + translated into `Pattern` objects; `LWPolyline`s are translated into polygons, and `Insert`s + are translated into `SubPattern` objects. + + If an object has no layer it is set to this module's `DEFAULT_LAYER` ("DEFAULT"). + + Args: + stream: Stream to read from. + clean_vertices: If `True`, remove any redundant vertices when loading polygons. + The cleaning process removes any polygons with zero area or <3 vertices. + Default `True`. + + Returns: + - Top level pattern + """ + lib = ezdxf.read(stream) + msp = lib.modelspace() + + pat = _read_block(msp, clean_vertices) + patterns = [pat] + [_read_block(bb, clean_vertices) for bb in lib.blocks if bb.name != '*Model_Space'] + + # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries + # according to the subpattern.identifier (which is deleted after use). + patterns_dict = dict(((p.name, p) for p in patterns)) + for p in patterns_dict.values(): + for sp in p.subpatterns: + sp.pattern = patterns_dict[sp.identifier[0]] + del sp.identifier + + library_info = { + 'layers': [ll.dxfattribs() for ll in lib.layers] + } + + return pat, library_info + + +def _read_block(block, clean_vertices): + pat = Pattern(block.name) + for element in block: + eltype = element.dxftype() + if eltype in ('POLYLINE', 'LWPOLYLINE'): + if eltype == 'LWPOLYLINE': + points = numpy.array(element.lwpoints) + else: + points = numpy.array(element.points) + attr = element.dxfattribs() + args = {'layer': attr.get('layer', DEFAULT_LAYER), + } + + if points.shape[1] == 2: + shape = Polygon(**args) + elif points.shape[1] > 2: + if (points[0, 2] != points[:, 2]).any(): + raise PatternError('PolyLine has non-constant width (not yet representable in masque!)') + elif points.shape[1] == 4 and (points[:, 3] != 0).any(): + raise PatternError('LWPolyLine has bulge (not yet representable in masque!)') + else: + width = points[0, 2] + if width == 0: + width = attr.get('const_width', 0) + + if width == 0 and numpy.array_equal(points[0], points[-1]): + shape = Polygon(**args, vertices=points[:-1, :2]) + else: + shape = Path(**args, width=width, vertices=points[:, :2]) + + if clean_vertices: + try: + shape.clean_vertices() + except PatternError: + continue + + pat.shapes.append(shape) + + elif eltype in ('TEXT',): + args = {'offset': element.get_pos()[1][:2], + 'layer': element.dxfattribs().get('layer', DEFAULT_LAYER), + } + string = element.dxfattribs().get('text', '') + height = element.dxfattribs().get('height', 0) + if height != 0: + logger.warning('Interpreting DXF TEXT as a label despite nonzero height. ' + 'This could be changed in the future by setting a font path in the masque DXF code.') + pat.labels.append(Label(string=string, **args)) +# else: +# pat.shapes.append(Text(string=string, height=height, font_path=????)) + elif eltype in ('INSERT',): + attr = element.dxfattribs() + xscale = attr.get('xscale', 1) + yscale = attr.get('yscale', 1) + if abs(xscale) != abs(yscale): + logger.warning('Masque does not support per-axis scaling; using x-scaling only!') + scale = abs(xscale) + mirrored = (yscale < 0, xscale < 0) + rotation = attr.get('rotation', 0) * pi/180 + + offset = attr.get('insert', (0, 0, 0))[:2] + + args = { + 'offset': offset, + 'scale': scale, + 'mirrored': mirrored, + 'rotation': rotation, + 'pattern': None, + 'identifier': (attr.get('name', None),), + } + + if 'column_count' in attr: + args['a_vector'] = (attr['column_spacing'], 0) + args['b_vector'] = (0, attr['row_spacing']) + args['a_count'] = attr['column_count'] + args['b_count'] = attr['row_count'] + pat.subpatterns.append(GridRepetition(**args)) + else: + pat.subpatterns.append(SubPattern(**args)) + else: + logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).') + return pat + + +def _subpatterns_to_refs(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], + subpatterns: List[subpattern_t]): + for subpat in subpatterns: + if subpat.pattern is None: + continue + encoded_name = subpat.pattern.name + + rotation = (subpat.rotation * 180 / numpy.pi) % 360 + attribs = { + 'xscale': subpat.scale * (-1 if subpat.mirrored[1] else 1), + 'yscale': subpat.scale * (-1 if subpat.mirrored[0] else 1), + 'rotation': rotation, + } + + if isinstance(subpat, GridRepetition): + a = subpat.a_vector + b = subpat.b_vector if subpat.b_vector is not None else numpy.zeros(2) + rotated_a = rotation_matrix_2d(-subpat.rotation) @ a + rotated_b = rotation_matrix_2d(-subpat.rotation) @ b + if rotated_a[1] == 0 and rotated_b[0] == 0: + attribs['column_count'] = subpat.a_count + attribs['row_count'] = subpat.b_count + attribs['column_spacing'] = rotated_a[0] + attribs['row_spacing'] = rotated_b[1] + block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) + elif rotated_a[0] == 0 and rotated_b[1] == 0: + attribs['column_count'] = subpat.b_count + attribs['row_count'] = subpat.a_count + attribs['column_spacing'] = rotated_b[0] + attribs['row_spacing'] = rotated_a[1] + block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) + else: + #NOTE: We could still do non-manhattan (but still orthogonal) grids by getting + # creative with counter-rotated nested patterns, but probably not worth it. + # Instead, just break appart the grid into individual elements: + for aa in numpy.arange(subpat.a_count): + for bb in numpy.arange(subpat.b_count): + block.add_blockref(encoded_name, subpat.offset + aa * a + bb * b, dxfattribs=attribs) + else: + block.add_blockref(encoded_name, subpat.offset, dxfattribs=attribs) + + +def _shapes_to_elements(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], + shapes: List[Shape], + polygonize_paths: bool = False): + # Add `LWPolyline`s for each shape. + # Could set do paths with width setting, but need to consider endcaps. + for shape in shapes: + attribs = {'layer': _mlayer2dxf(shape.layer)} + for polygon in shape.to_polygons(): + xy_open = polygon.vertices + polygon.offset + xy_closed = numpy.vstack((xy_open, xy_open[0, :])) + block.add_lwpolyline(xy_closed, dxfattribs=attribs) + + +def _labels_to_texts(block: Union[ezdxf.layouts.BlockLayout, ezdxf.layouts.Modelspace], + labels: List[Label]): + for label in labels: + attribs = {'layer': _mlayer2dxf(label.layer)} + xy = label.offset + block.add_text(label.string, dxfattribs=attribs).set_pos(xy, align='BOTTOM_LEFT') + + +def _mlayer2dxf(layer: layer_t) -> str: + if isinstance(layer, str): + return layer + if isinstance(layer, int): + return str(layer) + if isinstance(layer, tuple): + return f'{layer[0]}.{layer[1]}' + raise PatternError(f'Unknown layer type: {layer} ({type(layer)})') + + +def disambiguate_pattern_names(patterns, + max_name_length: int = 32, + suffix_length: int = 6, + dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name + ): + used_names = [] + for pat in patterns: + sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name) + + i = 0 + suffixed_name = sanitized_name + while suffixed_name in used_names or suffixed_name == '': + suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII') + + suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A') + i += 1 + + if sanitized_name == '': + logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name)) + elif suffixed_name != sanitized_name: + if dup_warn_filter is None or dup_warn_filter(pat.name): + logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format( + pat.name, sanitized_name, suffixed_name)) + + if len(suffixed_name) == 0: + # Should never happen since zero-length names are replaced + raise PatternError('Zero-length name after sanitize,\n originally "{}"'.format(pat.name)) + if len(suffixed_name) > max_name_length: + raise PatternError('Pattern name "{!r}" length > {} after encode,\n originally "{}"'.format(suffixed_name, max_name_length, pat.name)) + + pat.name = suffixed_name + used_names.append(suffixed_name) + diff --git a/masque/file/gdsii.py b/masque/file/gdsii.py index e7fa39f..9e8314e 100644 --- a/masque/file/gdsii.py +++ b/masque/file/gdsii.py @@ -1,226 +1,188 @@ """ GDSII file format readers and writers + +Note that GDSII references follow the same convention as `masque`, + with this order of operations: + 1. Mirroring + 2. Rotation + 3. Scaling + 4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets) + + Scaling, rotation, and mirroring apply to individual instances, not grid + vectors or offsets. """ +from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional +import re +import io +import copy +import numpy +import base64 +import struct +import logging +import pathlib +import gzip + # python-gdsii import gdsii.library import gdsii.structure import gdsii.elements -from typing import List, Any, Dict, Tuple -import re -import numpy -import base64 -import struct -import logging - -from .utils import mangle_name, make_dose_table -from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape +from .utils import mangle_name, make_dose_table, dose2dtype, dtype2dose +from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t from ..shapes import Polygon, Path -from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar -from ..utils import remove_colinear_vertices +from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t +from ..utils import remove_colinear_vertices, normalize_mirror - -__author__ = 'Jan Petykiewicz' +#TODO absolute positioning logger = logging.getLogger(__name__) -def write(patterns: Pattern or List[Pattern], - filename: str, +path_cap_map = { + None: Path.Cap.Flush, + 0: Path.Cap.Flush, + 1: Path.Cap.Circle, + 2: Path.Cap.Square, + 4: Path.Cap.SquareCustom, + } + + +def write(patterns: Union[Pattern, List[Pattern]], + stream: io.BufferedIOBase, meters_per_unit: float, logical_units_per_unit: float = 1, - library_name: str = 'masque-gdsii-write'): + library_name: str = 'masque-gdsii-write', + modify_originals: bool = False, + disambiguate_func: Callable[[Iterable[Pattern]], None] = None): """ - Write a Pattern or list of patterns to a GDSII file, by first calling - .polygonize() to change the shapes into polygons, and then writing patterns + Write a `Pattern` or list of patterns to a GDSII file, by first calling + `.polygonize()` to change the shapes into polygons, and then writing patterns as GDSII structures, polygons as boundary elements, and subpatterns as structure references (sref). For each shape, - layer is chosen to be equal to shape.layer if it is an int, - or shape.layer[0] if it is a tuple - datatype is chosen to be shape.layer[1] if available, - otherwise 0 + layer is chosen to be equal to `shape.layer` if it is an int, + or `shape.layer[0]` if it is a tuple + datatype is chosen to be `shape.layer[1]` if available, + otherwise `0` - Note that this function modifies the Pattern. + It is often a good idea to run `pattern.subpatternize()` prior to calling this function, + especially if calling `.polygonize()` will result in very many vertices. - It is often a good idea to run pattern.subpatternize() prior to calling this function, - especially if calling .polygonize() will result in very many vertices. - - If you want pattern polygonized with non-default arguments, just call pattern.polygonize() + If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` prior to calling this function. - :param patterns: A Pattern or list of patterns to write to file. Modified by this function. - :param filename: Filename to write to. - :param meters_per_unit: Written into the GDSII file, meters per (database) length unit. - All distances are assumed to be an integer multiple of this unit, and are stored as such. - :param logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a - "logical" unit which is different from the "database" unit, for display purposes. - Default 1. - :param library_name: Library name written into the GDSII file. - Default 'masque-gdsii-write'. + Args: + patterns: A Pattern or list of patterns to write to the stream. + stream: Stream object to write to. + meters_per_unit: Written into the GDSII file, meters per (database) length unit. + All distances are assumed to be an integer multiple of this unit, and are stored as such. + logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a + "logical" unit which is different from the "database" unit, for display purposes. + Default `1`. + library_name: Library name written into the GDSII file. + Default 'masque-gdsii-write'. + modify_originals: If `True`, the original pattern is modified as part of the writing + process. Otherwise, a copy is made and `deepunlock()`-ed. + Default `False`. + disambiguate_func: Function which takes a list of patterns and alters them + to make their names valid and unique. Default is `disambiguate_pattern_names`, which + attempts to adhere to the GDSII standard as well as possible. + WARNING: No additional error checking is performed on the results. """ + if isinstance(patterns, Pattern): + patterns = [patterns] + + if disambiguate_func is None: + disambiguate_func = disambiguate_pattern_names + + if not modify_originals: + patterns = [p.deepunlock() for p in copy.deepcopy(patterns)] + # Create library lib = gdsii.library.Library(version=600, name=library_name.encode('ASCII'), logical_unit=logical_units_per_unit, physical_unit=meters_per_unit) - if isinstance(patterns, Pattern): - patterns = [patterns] - # Get a dict of id(pattern) -> pattern patterns_by_id = {id(pattern): pattern for pattern in patterns} for pattern in patterns: - patterns_by_id.update(pattern.referenced_patterns_by_id()) + for i, p in pattern.referenced_patterns_by_id().items(): + patterns_by_id[i] = p - _disambiguate_pattern_names(patterns_by_id.values()) + disambiguate_func(patterns_by_id.values()) # Now create a structure for each pattern, and add in any Boundary and SREF elements for pat in patterns_by_id.values(): structure = gdsii.structure.Structure(name=pat.name) lib.append(structure) - # Add a Boundary element for each shape - structure += _shapes_to_boundaries(pat.shapes) - + structure += _shapes_to_elements(pat.shapes) structure += _labels_to_texts(pat.labels) - - # Add an SREF / AREF for each subpattern entry structure += _subpatterns_to_refs(pat.subpatterns) - with open(filename, mode='wb') as stream: - lib.save(stream) + lib.save(stream) + return -def write_dose2dtype(patterns: Pattern or List[Pattern], - filename: str, - meters_per_unit: float, - *args, - **kwargs, - ) -> List[float]: +def writefile(patterns: Union[List[Pattern], Pattern], + filename: Union[str, pathlib.Path], + *args, + **kwargs, + ): """ - Write a Pattern or list of patterns to a GDSII file, by first calling - .polygonize() to change the shapes into polygons, and then writing patterns - as GDSII structures, polygons as boundary elements, and subpatterns as structure - references (sref). + Wrapper for `gdsii.write()` that takes a filename or path instead of a stream. - For each shape, - layer is chosen to be equal to shape.layer if it is an int, - or shape.layer[0] if it is a tuple - datatype is chosen arbitrarily, based on calcualted dose for each shape. - Shapes with equal calcualted dose will have the same datatype. - A list of doses is retured, providing a mapping between datatype - (list index) and dose (list entry). + Will automatically compress the file if it has a .gz suffix. - Note that this function modifies the Pattern(s). - - It is often a good idea to run pattern.subpatternize() prior to calling this function, - especially if calling .polygonize() will result in very many vertices. - - If you want pattern polygonized with non-default arguments, just call pattern.polygonize() - prior to calling this function. - - :param patterns: A Pattern or list of patterns to write to file. Modified by this function. - :param filename: Filename to write to. - :param meters_per_unit: Written into the GDSII file, meters per (database) length unit. - All distances are assumed to be an integer multiple of this unit, and are stored as such. - :param args: passed to masque.file.gdsii.write(). - :param kwargs: passed to masque.file.gdsii.write(). - :returns: A list of doses, providing a mapping between datatype (int, list index) - and dose (float, list entry). + Args: + patterns: `Pattern` or list of patterns to save + filename: Filename to save to. + *args: passed to `gdsii.write` + **kwargs: passed to `gdsii.write` """ - patterns, dose_vals = dose2dtype(patterns) - write(patterns, filename, meters_per_unit, *args, **kwargs) - return dose_vals + path = pathlib.Path(filename) + if path.suffix == '.gz': + open_func: Callable = gzip.open + else: + open_func = open + + with io.BufferedWriter(open_func(path, mode='wb')) as stream: + results = write(patterns, stream, *args, **kwargs) + return results -def dose2dtype(patterns: Pattern or List[Pattern], - ) -> Tuple[List[Pattern], List[float]]: +def readfile(filename: Union[str, pathlib.Path], + *args, + **kwargs, + ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: """ - For each shape in each pattern, set shape.layer to the tuple - (base_layer, datatype), where: - layer is chosen to be equal to the original shape.layer if it is an int, - or shape.layer[0] if it is a tuple - datatype is chosen arbitrarily, based on calcualted dose for each shape. - Shapes with equal calcualted dose will have the same datatype. - A list of doses is retured, providing a mapping between datatype - (list index) and dose (list entry). + Wrapper for `gdsii.read()` that takes a filename or path instead of a stream. - Note that this function modifies the input Pattern(s). + Will automatically decompress files with a .gz suffix. - :param patterns: A Pattern or list of patterns to write to file. Modified by this function. - :returns: (patterns, dose_list) - patterns: modified input patterns - dose_list: A list of doses, providing a mapping between datatype (int, list index) - and dose (float, list entry). + Args: + filename: Filename to save to. + *args: passed to `gdsii.read` + **kwargs: passed to `gdsii.read` """ - if isinstance(patterns, Pattern): - patterns = [patterns] + path = pathlib.Path(filename) + if path.suffix == '.gz': + open_func: Callable = gzip.open + else: + open_func = open - # Get a dict of id(pattern) -> pattern - patterns_by_id = {id(pattern): pattern for pattern in patterns} - for pattern in patterns: - patterns_by_id.update(pattern.referenced_patterns_by_id()) - - # Get a table of (id(pat), written_dose) for each pattern and subpattern - sd_table = make_dose_table(patterns) - - # Figure out all the unique doses necessary to write this pattern - # This means going through each row in sd_table and adding the dose values needed to write - # that subpattern at that dose level - dose_vals = set() - for pat_id, pat_dose in sd_table: - pat = patterns_by_id[pat_id] - [dose_vals.add(shape.dose * pat_dose) for shape in pat.shapes] - - if len(dose_vals) > 256: - raise PatternError('Too many dose values: {}, maximum 256 when using dtypes.'.format(len(dose_vals))) - - # Create a new pattern for each non-1-dose entry in the dose table - # and update the shapes to reflect their new dose - new_pats = {} # (id, dose) -> new_pattern mapping - for pat_id, pat_dose in sd_table: - if pat_dose == 1: - new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id] - continue - - pat = patterns_by_id[pat_id].deepcopy() - - encoded_name = mangle_name(pat, pat_dose).encode('ASCII') - if len(encoded_name) == 0: - raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name)) - - for shape in pat.shapes: - data_type = dose_vals_list.index(shape.dose * pat_dose) - if is_scalar(shape.layer): - layer = (shape.layer, data_type) - else: - layer = (shape.layer[0], data_type) - - new_pats[(pat_id, pat_dose)] = pat - - # Go back through all the dose-specific patterns and fix up their subpattern entries - for (pat_id, pat_dose), pat in new_pats.items(): - for subpat in pat.subpatterns: - dose_mult = subpat.dose * pat_dose - subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)] - - return patterns, list(dose_vals) + with io.BufferedReader(open_func(path, mode='rb')) as stream: + results = read(stream, *args, **kwargs) + return results -def read_dtype2dose(filename: str) -> (List[Pattern], Dict[str, Any]): - """ - Alias for read(filename, use_dtype_as_dose=True) - """ - return read(filename, use_dtype_as_dose=True) - - -def read(filename: str, +def read(stream: io.BufferedIOBase, use_dtype_as_dose: bool = False, clean_vertices: bool = True, - ) -> (Dict[str, Pattern], Dict[str, Any]): + ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: """ Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs @@ -232,18 +194,23 @@ def read(filename: str, 'logical_units_per_unit': number of "logical" units displayed by layout tools (typically microns) per database unit - :param filename: Filename specifying a GDSII file to read from. - :param use_dtype_as_dose: If false, set each polygon's layer to (gds_layer, gds_datatype). - If true, set the layer to gds_layer and the dose to gds_datatype. - Default False. - :param clean_vertices: If true, remove any redundant vertices when loading polygons. + Args: + stream: Stream to read from. + use_dtype_as_dose: If `False`, set each polygon's layer to `(gds_layer, gds_datatype)`. + If `True`, set the layer to `gds_layer` and the dose to `gds_datatype`. + Default `False`. + NOTE: This will be deprecated in the future in favor of + `pattern.apply(masque.file.utils.dtype2dose)`. + clean_vertices: If `True`, remove any redundant vertices when loading polygons. The cleaning process removes any polygons with zero area or <3 vertices. - Default True. - :return: Tuple: (Dict of pattern_name:Patterns generated from GDSII structures, Dict of GDSII library info) + Default `True`. + + Returns: + - Dict of pattern_name:Patterns generated from GDSII structures + - Dict of GDSII library info """ - with open(filename, mode='rb') as stream: - lib = gdsii.library.Library.load(stream) + lib = gdsii.library.Library.load(stream) library_info = {'name': lib.name.decode('ASCII'), 'meters_per_unit': lib.physical_unit, @@ -256,46 +223,48 @@ def read(filename: str, for element in structure: # Switch based on element type: if isinstance(element, gdsii.elements.Boundary): - if use_dtype_as_dose: - shape = Polygon(vertices=element.xy[:-1], - dose=element.data_type, - layer=element.layer) - else: - shape = Polygon(vertices=element.xy[:-1], - layer=(element.layer, element.data_type)) + args = {'vertices': element.xy[:-1], + 'layer': (element.layer, element.data_type), + } + + poly = Polygon(**args) + if clean_vertices: try: - shape.clean_vertices() + poly.clean_vertices() except PatternError: continue - pat.shapes.append(shape) + pat.shapes.append(poly) if isinstance(element, gdsii.elements.Path): - cap_map = {0: Path.Cap.Flush, - 1: Path.Cap.Circle, - 2: Path.Cap.Square, - #3: custom? - } - if element.path_type in cap_map: - cap = cap_map[element.path_type] + if element.path_type in path_cap_map: + cap = path_cap_map[element.path_type] else: raise PatternError('Unrecognized path type: {}'.format(element.path_type)) - if use_dtype_as_dose: - shape = Path(vertices=element.xy, - dose=element.data_type, - layer=element.layer) - else: - shape = Path(vertices=element.xy, - layer=(element.layer, element.data_type)) + args = {'vertices': element.xy, + 'layer': (element.layer, element.data_type), + 'width': element.width if element.width is not None else 0.0, + 'cap': cap, + } + + if cap == Path.Cap.SquareCustom: + args['cap_extensions'] = numpy.zeros(2) + if element.bgn_extn is not None: + args['cap_extensions'][0] = element.bgn_extn + if element.end_extn is not None: + args['cap_extensions'][1] = element.end_extn + + path = Path(**args) + if clean_vertices: try: - shape.clean_vertices() + path.clean_vertices() except PatternError as err: continue - pat.shapes.append(shape) + pat.shapes.append(path) elif isinstance(element, gdsii.elements.Text): label = Label(offset=element.xy, @@ -309,41 +278,51 @@ def read(filename: str, elif isinstance(element, gdsii.elements.ARef): pat.subpatterns.append(_aref_to_gridrep(element)) + if use_dose_as_dtype: + logger.warning('use_dose_as_dtype will be removed in the future!') + pat = dose2dtype(pat) + patterns.append(pat) # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries - # according to the subpattern.ref_name (which is deleted after use). + # according to the subpattern.identifier (which is deleted after use). patterns_dict = dict(((p.name, p) for p in patterns)) for p in patterns_dict.values(): for sp in p.subpatterns: - sp.pattern = patterns_dict[sp.ref_name.decode('ASCII')] - del sp.ref_name + sp.pattern = patterns_dict[sp.identifier[0].decode('ASCII')] + del sp.identifier return patterns_dict, library_info -def _mlayer2gds(mlayer): - if is_scalar(mlayer): +def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]: + """ Helper to turn a layer tuple-or-int into a layer and datatype""" + if isinstance(mlayer, int): layer = mlayer data_type = 0 - else: + elif isinstance(mlayer, tuple): layer = mlayer[0] if len(mlayer) > 1: data_type = mlayer[1] else: data_type = 0 + else: + raise PatternError(f'Invalid layer for gdsii: {layer}. Note that gdsii layers cannot be strings.') return layer, data_type def _sref_to_subpat(element: gdsii.elements.SRef) -> SubPattern: - # Helper function to create a SubPattern from an SREF. Sets subpat.pattern to None - # and sets the instance attribute .ref_name to the struct_name. - # - # BUG: "Absolute" means not affected by parent elements. - # That's not currently supported by masque at all, so need to either tag it and - # undo the parent transformations, or implement it in masque. + """ + Helper function to create a SubPattern from an SREF. Sets subpat.pattern to None + and sets the instance .identifier to (struct_name,). + + BUG: + "Absolute" means not affected by parent elements. + That's not currently supported by masque at all, so need to either tag it and + undo the parent transformations, or implement it in masque. + """ subpat = SubPattern(pattern=None, offset=element.xy) - subpat.ref_name = element.struct_name + subpat.identifier = (element.struct_name,) if element.strans is not None: if element.mag is not None: subpat.scale = element.mag @@ -359,22 +338,24 @@ def _sref_to_subpat(element: gdsii.elements.SRef) -> SubPattern: raise PatternError('Absolute rotation is not implemented yet!') # Bit 0 means mirror x-axis if get_bit(element.strans, 15 - 0): - subpat.mirror(axis=0) + subpat.mirrored[0] = 1 return subpat def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition: - # Helper function to create a GridRepetition from an AREF. Sets gridrep.pattern to None - # and sets the instance attribute .ref_name to the struct_name. - # - # BUG: "Absolute" means not affected by parent elements. - # That's not currently supported by masque at all, so need to either tag it and - # undo the parent transformations, or implement it in masque.i + """ + Helper function to create a GridRepetition from an AREF. Sets gridrep.pattern to None + and sets the instance .identifier to (struct_name,). + BUG: + "Absolute" means not affected by parent elements. + That's not currently supported by masque at all, so need to either tag it and + undo the parent transformations, or implement it in masque. + """ rotation = 0 offset = numpy.array(element.xy[0]) scale = 1 - mirror_signs = numpy.ones(2) + mirror_across_x = False if element.strans is not None: if element.mag is not None: @@ -389,15 +370,11 @@ def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition: raise PatternError('Absolute rotation is not implemented yet!') # Bit 0 means mirror x-axis if get_bit(element.strans, 15 - 0): - mirror_signs[0] = -1 + mirror_across_x = True counts = [element.cols, element.rows] - vec_a0 = element.xy[1] - offset - vec_b0 = element.xy[2] - offset - - a_vector = numpy.dot(rotation_matrix_2d(-rotation), vec_a0 / scale / counts[0]) * mirror_signs - b_vector = numpy.dot(rotation_matrix_2d(-rotation), vec_b0 / scale / counts[1]) * mirror_signs - + a_vector = (element.xy[1] - offset) / counts[0] + b_vector = (element.xy[2] - offset) / counts[1] gridrep = GridRepetition(pattern=None, a_vector=a_vector, @@ -407,25 +384,28 @@ def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition: offset=offset, rotation=rotation, scale=scale, - mirrored=(mirror_signs == -1)) - gridrep.ref_name = element.struct_name + mirrored=(mirror_across_x, False)) + gridrep.identifier = (element.struct_name,) return gridrep -def _subpatterns_to_refs(subpatterns: List[SubPattern or GridRepetition] - ) -> List[gdsii.elements.ARef or gdsii.elements.SRef]: - # strans must be set for angle and mag to take effect +def _subpatterns_to_refs(subpatterns: List[subpattern_t] + ) -> List[Union[gdsii.elements.ARef, gdsii.elements.SRef]]: refs = [] for subpat in subpatterns: + if subpat.pattern is None: + continue encoded_name = subpat.pattern.name + # Note: GDS mirrors first and rotates second + mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) + ref: Union[gdsii.elements.SRef, gdsii.elements.ARef] if isinstance(subpat, GridRepetition): - mirror_signs = (-1) ** numpy.array(subpat.mirrored) xy = numpy.array(subpat.offset) + [ [0, 0], - numpy.dot(rotation_matrix_2d(subpat.rotation), subpat.a_vector * mirror_signs) * subpat.scale * subpat.a_count, - numpy.dot(rotation_matrix_2d(subpat.rotation), subpat.b_vector * mirror_signs) * subpat.scale * subpat.b_count, + subpat.a_vector * subpat.a_count, + subpat.b_vector * subpat.b_count, ] ref = gdsii.elements.ARef(struct_name=encoded_name, xy=numpy.round(xy).astype(int), @@ -435,36 +415,40 @@ def _subpatterns_to_refs(subpatterns: List[SubPattern or GridRepetition] ref = gdsii.elements.SRef(struct_name=encoded_name, xy=numpy.round([subpat.offset]).astype(int)) - ref.strans = 0 - ref.angle = subpat.rotation * 180 / numpy.pi - mirror_x, mirror_y = subpat.mirrored - if mirror_x and mirror_y: - ref.angle += 180 - elif mirror_x: - ref.strans = set_bit(ref.strans, 15 - 0, True) - elif mirror_y: - ref.angle += 180 - ref.strans = set_bit(ref.strans, 15 - 0, True) - ref.angle %= 360 + ref.angle = ((subpat.rotation + extra_angle) * 180 / numpy.pi) % 360 + # strans must be non-None for angle and mag to take effect + ref.strans = set_bit(0, 15 - 0, mirror_across_x) ref.mag = subpat.scale refs.append(ref) return refs -def _shapes_to_boundaries(shapes: List[Shape] - ) -> List[gdsii.elements.Boundary]: - # Add a Boundary element for each shape - boundaries = [] +def _shapes_to_elements(shapes: List[Shape], + polygonize_paths: bool = False + ) -> List[Union[gdsii.elements.Boundary, gdsii.elements.Path]]: + elements: List[Union[gdsii.elements.Boundary, gdsii.elements.Path]] = [] + # Add a Boundary element for each shape, and Path elements if necessary for shape in shapes: layer, data_type = _mlayer2gds(shape.layer) - for polygon in shape.to_polygons(): - xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int) - xy_closed = numpy.vstack((xy_open, xy_open[0, :])) - boundaries.append(gdsii.elements.Boundary(layer=layer, - data_type=data_type, - xy=xy_closed)) - return boundaries + if isinstance(shape, Path) and not polygonize_paths: + xy = numpy.round(shape.vertices + shape.offset).astype(int) + width = numpy.round(shape.width).astype(int) + path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup + path = gdsii.elements.Path(layer=layer, + data_type=data_type, + xy=xy) + path.path_type = path_type + path.width = width + elements.append(path) + else: + for polygon in shape.to_polygons(): + xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int) + xy_closed = numpy.vstack((xy_open, xy_open[0, :])) + elements.append(gdsii.elements.Boundary(layer=layer, + data_type=data_type, + xy=xy_closed)) + return elements def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]: @@ -479,10 +463,21 @@ def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]: return texts -def _disambiguate_pattern_names(patterns): +def disambiguate_pattern_names(patterns, + max_name_length: int = 32, + suffix_length: int = 6, + dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name + ): used_names = [] for pat in patterns: - sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name) + if len(pat.name) > max_name_length: + shortened_name = pat.name[:max_name_length - suffix_length] + logger.warning('Pattern name "{}" is too long ({}/{} chars),\n'.format(pat.name, len(pat.name), max_name_length) + + ' shortening to "{}" before generating suffix'.format(shortened_name)) + else: + shortened_name = pat.name + + sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name) i = 0 suffixed_name = sanitized_name @@ -495,14 +490,16 @@ def _disambiguate_pattern_names(patterns): if sanitized_name == '': logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name)) elif suffixed_name != sanitized_name: - logger.warning('Pattern name "{}" appears multiple times; renaming to "{}"'.format(pat.name, suffixed_name)) + if dup_warn_filter is None or dup_warn_filter(pat.name): + logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format( + pat.name, sanitized_name, suffixed_name)) encoded_name = suffixed_name.encode('ASCII') if len(encoded_name) == 0: # Should never happen since zero-length names are replaced - raise PatternError('Zero-length name after sanitize+encode, originally "{}"'.format(pat.name)) - if len(encoded_name) > 32: - raise PatternError('Pattern name "{}" length > 32 after encode, originally "{}"'.format(encoded_name, pat.name)) + raise PatternError('Zero-length name after sanitize+encode,\n originally "{}"'.format(pat.name)) + if len(encoded_name) > max_name_length: + raise PatternError('Pattern name "{!r}" length > {} after encode,\n originally "{}"'.format(encoded_name, max_name_length, pat.name)) pat.name = encoded_name used_names.append(suffixed_name) diff --git a/masque/file/oasis.py b/masque/file/oasis.py new file mode 100644 index 0000000..390b82d --- /dev/null +++ b/masque/file/oasis.py @@ -0,0 +1,441 @@ +""" +OASIS file format readers and writers + +Note that OASIS references follow the same convention as `masque`, + with this order of operations: + 1. Mirroring + 2. Rotation + 3. Scaling + 4. Offset and array expansion (no mirroring/rotation/scaling applied to offsets) + + Scaling, rotation, and mirroring apply to individual instances, not grid + vectors or offsets. +""" +from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional +import re +import io +import copy +import numpy +import base64 +import struct +import logging +import pathlib +import gzip + +import fatamorgana +import fatamorgana.records as fatrec +from fatamorgana.basic import PathExtensionScheme + +from .utils import mangle_name, make_dose_table +from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape, subpattern_t +from ..shapes import Polygon, Path +from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t +from ..utils import remove_colinear_vertices, normalize_mirror + + +logger = logging.getLogger(__name__) + + +path_cap_map = { + PathExtensionScheme.Flush: Path.Cap.Flush, + PathExtensionScheme.HalfWidth: Path.Cap.Square, + PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom, + } + + +def write(patterns: Union[Pattern, List[Pattern]], + stream: io.BufferedIOBase, + units_per_micron: int, + layer_map: Dict[str, Union[int, Tuple[int, int]]] = None, + modify_originals: bool = False, + disambiguate_func: Callable[[Iterable[Pattern]], None] = None): + """ + Write a `Pattern` or list of patterns to a OASIS file, writing patterns + as OASIS cells, polygons as Polygon records, and subpatterns as Placement + records. Other shape types may be converted to polygons if no equivalent + record type exists (or is not implemented here yet). #TODO + + For each shape, + layer is chosen to be equal to `shape.layer` if it is an int, + or `shape.layer[0]` if it is a tuple + datatype is chosen to be `shape.layer[1]` if available, + otherwise `0` + If a layer map is provided, layer strings will be converted + automatically, and layer names will be written to the file. + + If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` + prior to calling this function. + + Args: + patterns: A Pattern or list of patterns to write to file. + stream: Stream object to write to. + units_per_micron: Written into the OASIS file, number of grid steps per micrometer. + All distances are assumed to be an integer multiple of the grid step, and are stored as such. + layer_map: Dictionary which translates + modify_originals: If `True`, the original pattern is modified as part of the writing + process. Otherwise, a copy is made and `deepunlock()`-ed. + Default `False`. + disambiguate_func: Function which takes a list of patterns and alters them + to make their names valid and unique. Default is `disambiguate_pattern_names`. + """ + if isinstance(patterns, Pattern): + patterns = [patterns] + + if layer_map is None: + layer_map = {} + + if disambiguate_func is None: + disambiguate_func = disambiguate_pattern_names + + if not modify_originals: + patterns = [p.deepunlock() for p in copy.deepcopy(patterns)] + + # Create library + lib = fatamorgana.OasisLayout(unit, validation=None) + + for name, layer_num in layer_map.items(): + layer, data_type = _mlayer2oas(layer_num) + lib.layer_names.append( #TODO figure out how to deal with text layers + LayerName(nstring=name, + layer_interval=(layer, layer), + type_interval=(data_type, data_type), + is_textlayer=False)) + + def layer2oas(layer: layer_t) -> Tuple[int, int]: + layer_num = layer_map[layer] if isinstance(layer, str) else layer + return _mlayer2oas(layer_num) + + # Get a dict of id(pattern) -> pattern + patterns_by_id = {id(pattern): pattern for pattern in patterns} + for pattern in patterns: + for i, p in pattern.referenced_patterns_by_id().items(): + patterns_by_id[i] = p + + disambiguate_func(patterns_by_id.values()) + + # Now create a structure for each pattern + for pat in patterns_by_id.values(): + structure = fatamorgana.Cell(name=NString(pat.name)) + lib.cells.append(structure) + + structure.geometry += _shapes_to_elements(pat.shapes, layer2oas) + structure.geometry += _labels_to_texts(pat.labels, layer2oas) + structure.placements += _subpatterns_to_refs(pat.subpatterns) + + lib.write(stream) + return + + +def writefile(patterns: Union[List[Pattern], Pattern], + filename: Union[str, pathlib.Path], + *args, + **kwargs, + ): + """ + Wrapper for `oasis.write()` that takes a filename or path instead of a stream. + + Will automatically compress the file if it has a .gz suffix. + + Args: + patterns: `Pattern` or list of patterns to save + filename: Filename to save to. + *args: passed to `oasis.write` + **kwargs: passed to `oasis.write` + """ + path = pathlib.Path(filename) + if path.suffix == '.gz': + open_func: Callable = gzip.open + else: + open_func = open + + with io.BufferedWriter(open_func(path, mode='wb')) as stream: + results = write(patterns, stream, *args, **kwargs) + return results + + +def readfile(filename: Union[str, pathlib.Path], + *args, + **kwargs, + ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: + """ + Wrapper for `oasis.read()` that takes a filename or path instead of a stream. + + Will automatically decompress files with a .gz suffix. + + Args: + filename: Filename to save to. + *args: passed to `oasis.read` + **kwargs: passed to `oasis.read` + """ + path = pathlib.Path(filename) + if path.suffix == '.gz': + open_func: Callable = gzip.open + else: + open_func = open + + with io.BufferedReader(open_func(path, mode='rb')) as stream: + results = read(stream, *args, **kwargs) + return results + + +def read(stream: io.BufferedIOBase, + clean_vertices: bool = True, + ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: + """ + Read a OASIS file and translate it into a dict of Pattern objects. OASIS cells are + translated into Pattern objects; Polygons are translated into polygons, and Placements + are translated into SubPattern or GridRepetition objects. + + Additional library info is returned in a dict, containing: + 'units_per_micrometer': number of database units per micrometer (all values are in database units) + + Args: + stream: Stream to read from. + clean_vertices: If `True`, remove any redundant vertices when loading polygons. + The cleaning process removes any polygons with zero area or <3 vertices. + Default `True`. + + Returns: + - Dict of pattern_name:Patterns generated from GDSII structures + - Dict of GDSII library info + """ + + lib = fatamorgana.OASISLayout.read(stream) + + library_info = {'units_per_micrometer': lib.unit, + } + + patterns = [] + for cell in lib.cells: + pat = Pattern(name=cell.name.string) + for element in cell.geometry: + if element.repetition is not None: + raise PatternError('masque OASIS reader does not implement repetitions for shapes yet') + + # Switch based on element type: + if isinstance(element, fatrec.Polygon): + args = {'vertices': element.point_list, + 'layer': (element.layer, element.data_type) + 'offset': (element.x, element.y), + } + poly = Polygon(**args) + + if clean_vertices: + try: + poly.clean_vertices() + except PatternError: + continue + + pat.shapes.append(poly) + + if isinstance(element, fatrec.Path): + cap_start = path_cap_map[element.extension_start[0]] + cap_end = path_cap_map[element.extension_end[0]] + if cap_start != cap_end: + raise Exception('masque does not support multiple cap types on a single path.') #TODO handle multiple cap types + cap = cap_start + + args = {'vertices': element.point_list, + 'layer': (element.layer, element.data_type) + 'offset': (element.x, element.y), + 'width': element.half_width * 2, + 'cap': cap, + } + + if cap == Path.Cap.SquareCustom: + args['cap_extensions'] = numpy.array((element.extension_start[1], + element.extension_end[1])) + path = Path(**args) + + if clean_vertices: + try: + path.clean_vertices() + except PatternError as err: + continue + + pat.shapes.append(path) + + elif isinstance(element, fatrec.Text): + args = {'layer': (element.layer, element.data_type) + 'offset': (element.x, element.y), + 'string': str(element.string), + } + pat.labels.append(Label(**args)) + + for placement in cell.placements: + pat.subpattterns.append += _placement_to_subpats(placement) + + patterns.append(pat) + + # Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries + # according to the subpattern.identifier (which is deleted after use). + patterns_dict = dict(((p.name, p) for p in patterns)) + for p in patterns_dict.values(): + for sp in p.subpatterns: + sp.pattern = patterns_dict[sp.identifier[0]] + del sp.identifier + + return patterns_dict, library_info + + +def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]: + """ Helper to turn a layer tuple-or-int into a layer and datatype""" + if isinstance(mlayer, int): + layer = mlayer + data_type = 0 + elif isinstance(mlayer, tuple): + layer = mlayer[0] + if len(mlayer) > 1: + data_type = mlayer[1] + else: + data_type = 0 + else: + raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be strings.') #TODO allow string layers using layer map def + return layer, data_type + + +def _placement_to_subpats(placement: fatrec.Placement) -> List[subpattern_t]: + """ + Helper function to create a SubPattern from a placment. Sets subpat.pattern to None + and sets the instance .identifier to (struct_name,). + """ + xy = numpy.array((placement.x, placement.y)) + kwargs = { + 'pattern': None, + 'mirrored': (placement.flip, False), + 'rotation': float(placement.angle * pi/180) + 'scale': placement.magnification, + 'identifier': (placement.name,), + } + + rep = placement.repetition + if isinstance(rep, fatamorgana.GridRepetition): + subpat = GridRepetition(a_vector=rep.a_vector, + b_vector=rep.b_vector, + a_count=rep.a_count, + b_count=rep.b_count, + offset=xy, + **kwargs) + subpats = [subpat] + elif isinstance(rep, fatamorgana.ArbitraryRepetition): + subpats = [] + for rep_offset in numpy.cumsum(numpy.column_stack((rep.x_displacements, + rep.y_displacements))): + subpats.append(SubPattern(offset=xy + rep_offset, **kwargs)) + elif rep is None + subpats = [SubPattern(offset=xy + rep_offset, **kwargs)] + return subpats + + +def _subpatterns_to_refs(subpatterns: List[subpattern_t] + ) -> List[fatrec.Placement]]: + refs = [] + for subpat in subpatterns: + if subpat.pattern is None: + continue + + # Note: OASIS mirrors first and rotates second + mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored) + xy = numpy.round(subpat.offset).astype(int) + args = { + 'x': xy[0], + 'y': xy[1], + } + + if isinstance(subpat, GridRepetition): + kwargs['rep'] = fatamorgana.GridRepetition( + a_vector=numpy.round(subpat.a_vector).astype(int), + b_vector=numpy.round(subpat.b_vector).astype(int), + a_count=numpy.round(subpat.a_count).astype(int), + b_count=numpy.round(subpat.b_count).astype(int)) + + ref = fatrec.Placement( + name=subpat.pattern.name, + flip=mirror_across_x, + angle=((subpat.rotation + extra_angle) * 180 / numpy.pi) % 360, + magnification=subpat.scale, + **kwargs) + + refs.append(ref) + return refs + + +def _shapes_to_elements(shapes: List[Shape], + layer2oas: Callable[[layer_t], Tuple[int, int]], + polygonize_paths: bool = False, + ) -> List[Union[fatrec.Polygon, fatrec.Path]]: + # Add a Polygon record for each shape, and Path elements if necessary + elements: List[Union[fatrec.Polygon, fatrec.Path]] = [] + for shape in shapes: + layer, data_type = layer2oas(shape.layer) + if isinstance(shape, Path) and not polygonize_paths: + offset = numpy.round(shape.offset).astype(int) + points = numpy.round(shape.vertices).astype(int) + half_width = numpy.round(shape.width / 2).astype(int) + path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup + path = fatrec.Path(layer=layer, + data_type=data_type, + point_list=points, + half_width=half_width, + x=offset[0], + y=offset[1], + extension_start=path_type, #TODO implement multiple cap types? + extension_end=path_type, + ) + elements.append(path) + else: + for polygon in shape.to_polygons(): + points = numpy.round(polygon.vertices).astype(int) + offset = numpy.round(polygon.offset).astype(int) + elements.append(fatrec.Polygon(layer=layer, + data_type=data_type, + x=offset[0], + y=offset[1], + point_list=point_list)) + return elements + + +def _labels_to_texts(labels: List[Label], + layer2oas: Callable[[layer_t], Tuple[int, int]], + ) -> List[fatrec.Text]: + texts = [] + for label in labels: + layer, text_type = layer2oas(label.layer) + xy = numpy.round(label.offset).astype(int) + texts.append(fatrec.Text(layer=layer, + text_type=text_type, + x=xy[0], + y=xy[1], + string=string)) + return texts + + +def disambiguate_pattern_names(patterns, + dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name + ): + used_names = [] + for pat in patterns: + sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name) + + i = 0 + suffixed_name = sanitized_name + while suffixed_name in used_names or suffixed_name == '': + suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII') + + suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A') + i += 1 + + if sanitized_name == '': + logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name)) + elif suffixed_name != sanitized_name: + if dup_warn_filter is None or dup_warn_filter(pat.name): + logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format( + pat.name, sanitized_name, suffixed_name)) + + encoded_name = suffixed_name.encode('ASCII') + if len(encoded_name) == 0: + # Should never happen since zero-length names are replaced + raise PatternError('Zero-length name after sanitize+encode,\n originally "{}"'.format(pat.name)) + + pat.name = encoded_name + used_names.append(suffixed_name) diff --git a/masque/file/svg.py b/masque/file/svg.py index 6a28c25..deef59a 100644 --- a/masque/file/svg.py +++ b/masque/file/svg.py @@ -1,18 +1,16 @@ """ SVG file format readers and writers """ - +from typing import Dict, Optional import svgwrite import numpy +import warnings from .utils import mangle_name from .. import Pattern -__author__ = 'Jan Petykiewicz' - - -def write(pattern: Pattern, +def writefile(pattern: Pattern, filename: str, custom_attributes: bool=False): """ @@ -23,26 +21,32 @@ def write(pattern: Pattern, Note that this function modifies the Pattern. - If custom_attributes is True, non-standard pattern_layer and pattern_dose attributes + If `custom_attributes` is `True`, non-standard `pattern_layer` and `pattern_dose` attributes are written to the relevant elements. - It is often a good idea to run pattern.subpatternize() on pattern prior to - calling this function, especially if calling .polygonize() will result in very + It is often a good idea to run `pattern.subpatternize()` on pattern prior to + calling this function, especially if calling `.polygonize()` will result in very many vertices. - If you want pattern polygonized with non-default arguments, just call pattern.polygonize() + If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` prior to calling this function. - :param pattern: Pattern to write to file. Modified by this function. - :param filename: Filename to write to. - :param custom_attributes: Whether to write non-standard pattern_layer and - pattern_dose attributes to the SVG elements. + Args: + pattern: Pattern to write to file. Modified by this function. + filename: Filename to write to. + custom_attributes: Whether to write non-standard `pattern_layer` and + `pattern_dose` attributes to the SVG elements. """ # Polygonize pattern pattern.polygonize() - [bounds_min, bounds_max] = pattern.get_bounds() + bounds = pattern.get_bounds() + if bounds is None: + bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]]) + warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox') + else: + bounds_min, bounds_max = bounds viewbox = numpy.hstack((bounds_min - 1, (bounds_max - bounds_min) + 2)) viewbox_string = '{:g} {:g} {:g} {:g}'.format(*viewbox) @@ -52,11 +56,13 @@ def write(pattern: Pattern, debug=(not custom_attributes)) # Get a dict of id(pattern) -> pattern - patterns_by_id = {**(pattern.referenced_patterns_by_id()), id(pattern): pattern} + patterns_by_id = {**(pattern.referenced_patterns_by_id()), id(pattern): pattern} # type: Dict[int, Optional[Pattern]] # Now create a group for each row in sd_table (ie, each pattern + dose combination) # and add in any Boundary and Use elements for pat in patterns_by_id.values(): + if pat is None: + continue svg_group = svg.g(id=mangle_name(pat), fill='blue', stroke='red') for shape in pat.shapes: @@ -71,6 +77,8 @@ def write(pattern: Pattern, svg_group.add(path) for subpat in pat.subpatterns: + if subpat.pattern is None: + continue transform = 'scale({:g}) rotate({:g}) translate({:g},{:g})'.format( subpat.scale, subpat.rotation, subpat.offset[0], subpat.offset[1]) use = svg.use(href='#' + mangle_name(subpat.pattern), transform=transform) @@ -83,25 +91,31 @@ def write(pattern: Pattern, svg.save() -def write_inverted(pattern: Pattern, filename: str): +def writefile_inverted(pattern: Pattern, filename: str): """ - Write an inverted Pattern to an SVG file, by first calling .polygonize() and - .flatten() on it to change the shapes into polygons, then drawing a bounding + Write an inverted Pattern to an SVG file, by first calling `.polygonize()` and + `.flatten()` on it to change the shapes into polygons, then drawing a bounding box and drawing the polygons with reverse vertex order inside it, all within - one element. + one `` element. Note that this function modifies the Pattern. - If you want pattern polygonized with non-default arguments, just call pattern.polygonize() + If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()` prior to calling this function. - :param pattern: Pattern to write to file. Modified by this function. - :param filename: Filename to write to. + Args: + pattern: Pattern to write to file. Modified by this function. + filename: Filename to write to. """ # Polygonize and flatten pattern pattern.polygonize().flatten() - [bounds_min, bounds_max] = pattern.get_bounds() + bounds = pattern.get_bounds() + if bounds is None: + bounds_min, bounds_max = numpy.array([[-1, -1], [1, 1]]) + warnings.warn('Pattern had no bounds (empty?); setting arbitrary viewbox') + else: + bounds_min, bounds_max = bounds viewbox = numpy.hstack((bounds_min - 1, (bounds_max - bounds_min) + 2)) viewbox_string = '{:g} {:g} {:g} {:g}'.format(*viewbox) @@ -129,8 +143,11 @@ def poly2path(vertices: numpy.ndarray) -> str: """ Create an SVG path string from an Nx2 list of vertices. - :param vertices: Nx2 array of vertices. - :return: SVG path-string. + Args: + vertices: Nx2 array of vertices. + + Returns: + SVG path-string. """ commands = 'M{:g},{:g} '.format(vertices[0][0], vertices[0][1]) for vertex in vertices[1:]: diff --git a/masque/file/utils.py b/masque/file/utils.py index 97e3d36..f70f1da 100644 --- a/masque/file/utils.py +++ b/masque/file/utils.py @@ -7,16 +7,16 @@ from typing import Set, Tuple, List from masque.pattern import Pattern -__author__ = 'Jan Petykiewicz' - - def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str: """ - Create a name using pattern.name, id(pattern), and the dose multiplier. + Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier. - :param pattern: Pattern whose name we want to mangle. - :param dose_multiplier: Dose multiplier to mangle with. - :return: Mangled name. + Args: + pattern: Pattern whose name we want to mangle. + dose_multiplier: Dose multiplier to mangle with. + + Returns: + Mangled name. """ expression = re.compile('[^A-Za-z0-9_\?\$]') full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern)) @@ -26,17 +26,127 @@ def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str: def make_dose_table(patterns: List[Pattern], dose_multiplier: float=1.0) -> Set[Tuple[int, float]]: """ - Create a set containing (id(pat), written_dose) for each pattern (including subpatterns) + Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns) - :param pattern: Source Patterns. - :param dose_multiplier: Multiplier for all written_dose entries. - :return: {(id(subpat.pattern), written_dose), ...} + Args: + pattern: Source Patterns. + dose_multiplier: Multiplier for all written_dose entries. + + Returns: + `{(id(subpat.pattern), written_dose), ...}` """ dose_table = {(id(pattern), dose_multiplier) for pattern in patterns} for pattern in patterns: for subpat in pattern.subpatterns: + if subpat.pattern is None: + continue subpat_dose_entry = (id(subpat.pattern), subpat.dose * dose_multiplier) if subpat_dose_entry not in dose_table: subpat_dose_table = make_dose_table([subpat.pattern], subpat.dose * dose_multiplier) dose_table = dose_table.union(subpat_dose_table) return dose_table + + +def dtype2dose(pattern: Pattern) -> Pattern: + """ + For each shape in the pattern, if the layer is a tuple, set the + layer to the tuple's first element and set the dose to the + tuple's second element. + + Generally intended for use with `Pattern.apply()`. + + Args: + pattern: Pattern to modify + + Returns: + pattern + """ + for shape in pattern.shapes: + if isinstance(shape.layer, tuple): + shape.dose = shape.layer[1] + shape.layer = shape.layer[0] + return pattern + + +def dose2dtype(patterns: List[Pattern], + ) -> Tuple[List[Pattern], List[float]]: + """ + For each shape in each pattern, set shape.layer to the tuple + (base_layer, datatype), where: + layer is chosen to be equal to the original shape.layer if it is an int, + or shape.layer[0] if it is a tuple. `str` layers raise a PatterError. + datatype is chosen arbitrarily, based on calcualted dose for each shape. + Shapes with equal calcualted dose will have the same datatype. + A list of doses is retured, providing a mapping between datatype + (list index) and dose (list entry). + + Note that this function modifies the input Pattern(s). + + Args: + patterns: A `Pattern` or list of patterns to write to file. Modified by this function. + + Returns: + (patterns, dose_list) + patterns: modified input patterns + dose_list: A list of doses, providing a mapping between datatype (int, list index) + and dose (float, list entry). + """ + # Get a dict of id(pattern) -> pattern + patterns_by_id = {id(pattern): pattern for pattern in patterns} + for pattern in patterns: + for i, p in pattern.referenced_patterns_by_id().items(): + patterns_by_id[i] = p + + # Get a table of (id(pat), written_dose) for each pattern and subpattern + sd_table = make_dose_table(patterns) + + # Figure out all the unique doses necessary to write this pattern + # This means going through each row in sd_table and adding the dose values needed to write + # that subpattern at that dose level + dose_vals = set() + for pat_id, pat_dose in sd_table: + pat = patterns_by_id[pat_id] + for shape in pat.shapes: + dose_vals.add(shape.dose * pat_dose) + + if len(dose_vals) > 256: + raise PatternError('Too many dose values: {}, maximum 256 when using dtypes.'.format(len(dose_vals))) + + dose_vals_list = list(dose_vals) + + # Create a new pattern for each non-1-dose entry in the dose table + # and update the shapes to reflect their new dose + new_pats = {} # (id, dose) -> new_pattern mapping + for pat_id, pat_dose in sd_table: + if pat_dose == 1: + new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id] + continue + + old_pat = patterns_by_id[pat_id] + pat = old_pat.copy() # keep old subpatterns + pat.shapes = copy.deepcopy(old_pat.shapes) + pat.labels = copy.deepcopy(old_pat.labels) + + encoded_name = mangle_name(pat, pat_dose) + if len(encoded_name) == 0: + raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name)) + pat.name = encoded_name + + for shape in pat.shapes: + data_type = dose_vals_list.index(shape.dose * pat_dose) + if isinstance(shape.layer, int): + shape.layer = (shape.layer, data_type) + elif isinstance(shape.layer, tuple): + shape.layer = (shape.layer[0], data_type) + else: + raise PatternError(f'Invalid layer for gdsii: {shape.layer}') + + new_pats[(pat_id, pat_dose)] = pat + + # Go back through all the dose-specific patterns and fix up their subpattern entries + for (pat_id, pat_dose), pat in new_pats.items(): + for subpat in pat.subpatterns: + dose_mult = subpat.dose * pat_dose + subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)] + + return patterns, dose_vals_list diff --git a/masque/label.py b/masque/label.py index b3bbb6f..df7f3cf 100644 --- a/masque/label.py +++ b/masque/label.py @@ -1,28 +1,37 @@ -from typing import List, Tuple +from typing import List, Tuple, Dict import copy import numpy from numpy import pi -from . import PatternError -from .utils import is_scalar, vector2, rotation_matrix_2d - - -__author__ = 'Jan Petykiewicz' +from .error import PatternError, PatternLockedError +from .utils import is_scalar, vector2, rotation_matrix_2d, layer_t class Label: """ - A circle, which has a position and radius. + A text annotation with a position and layer (but no size; it is not drawn) """ + __slots__ = ('_offset', '_layer', '_string', 'identifier', 'locked') - # [x_offset, y_offset] - _offset = numpy.array([0.0, 0.0]) # type: numpy.ndarray + _offset: numpy.ndarray + """ [x_offset, y_offset] """ - # Layer (integer >= 0) - _layer = 0 # type: int or Tuple + _layer: layer_t + """ Layer (integer >= 0, or 2-Tuple of integers) """ - # Label string - _string = None # type: str + _string: str + """ Label string """ + + identifier: Tuple + """ Arbitrary identifier tuple, useful for keeping track of history when flattening """ + + locked: bool + """ If `True`, any changes to the label will raise a `PatternLockedError` """ + + def __setattr__(self, name, value): + if self.locked and name != 'locked': + raise PatternLockedError() + object.__setattr__(self, name, value) # ---- Properties # offset property @@ -30,8 +39,6 @@ class Label: def offset(self) -> numpy.ndarray: """ [x, y] offset - - :return: [x_offset, y_offset] """ return self._offset @@ -42,20 +49,18 @@ class Label: if val.size != 2: raise PatternError('Offset must be convertible to size-2 ndarray') - self._offset = val.flatten() + self._offset = val.flatten().astype(float) # layer property @property - def layer(self) -> int or Tuple[int]: + def layer(self) -> layer_t: """ - Layer number (int or tuple of ints) - - :return: Layer + Layer number or name (int, tuple of ints, or string) """ return self._layer @layer.setter - def layer(self, val: int or List[int]): + def layer(self, val: layer_t): self._layer = val # string property @@ -63,8 +68,6 @@ class Label: def string(self) -> str: """ Label string (str) - - :return: string """ return self._string @@ -74,39 +77,58 @@ class Label: def __init__(self, string: str, - offset: vector2=(0.0, 0.0), - layer: int=0): + offset: vector2 = (0.0, 0.0), + layer: layer_t = 0, + locked: bool = False): + object.__setattr__(self, 'locked', False) + self.identifier = () self.string = string - self.offset = numpy.array(offset, dtype=float) + self.offset = numpy.array(offset, dtype=float, copy=True) self.layer = layer + self.locked = locked + def __copy__(self) -> 'Label': + return Label(string=self.string, + offset=self.offset.copy(), + layer=self.layer, + locked=self.locked) + + def __deepcopy__(self, memo: Dict = None) -> 'Label': + memo = {} if memo is None else memo + new = copy.copy(self).unlock() + new._offset = self._offset.copy() + new.locked = self.locked + return new - # ---- Non-abstract methods def copy(self) -> 'Label': """ - Returns a deep copy of the shape. - - :return: Deep copy of self + Returns a deep copy of the label. """ return copy.deepcopy(self) def translate(self, offset: vector2) -> 'Label': """ - Translate the shape by the given offset + Translate the label by the given offset - :param offset: [x_offset, y,offset] - :return: self + Args: + offset: [x_offset, y,offset] + + Returns: + self """ self.offset += offset return self def rotate_around(self, pivot: vector2, rotation: float) -> 'Label': """ - Rotate the shape around a point. + Rotate the label around a point. - :param pivot: Point (x, y) to rotate around - :param rotation: Angle to rotate by (counterclockwise, radians) - :return: self + Args: + pivot: Point (x, y) to rotate around + rotation: Angle to rotate by (counterclockwise, radians) + + Returns: + self """ pivot = numpy.array(pivot, dtype=float) self.translate(-pivot) @@ -122,8 +144,33 @@ class Label: bounds = [self.offset, self.offset] - :return: Bounds [[xmin, xmax], [ymin, ymax]] + Returns: + Bounds [[xmin, xmax], [ymin, ymax]] """ return numpy.array([self.offset, self.offset]) + def lock(self) -> 'Label': + """ + Lock the Label, causing any modifications to raise an exception. + Return: + self + """ + self.offset.flags.writeable = False + object.__setattr__(self, 'locked', True) + return self + + def unlock(self) -> 'Label': + """ + Unlock the Label, re-allowing changes. + + Return: + self + """ + object.__setattr__(self, 'locked', False) + self.offset.flags.writeable = True + return self + + def __repr__(self) -> str: + locked = ' L' if self.locked else '' + return f'