2016-03-15 19:12:39 -07:00
|
|
|
"""
|
|
|
|
GDSII file format readers and writers
|
|
|
|
"""
|
2017-11-04 12:12:05 -07:00
|
|
|
# python-gdsii
|
2016-03-15 19:12:39 -07:00
|
|
|
import gdsii.library
|
|
|
|
import gdsii.structure
|
|
|
|
import gdsii.elements
|
|
|
|
|
2019-12-12 01:19:07 -08:00
|
|
|
from typing import List, Any, Dict, Tuple, Callable
|
2017-08-29 16:55:58 -07:00
|
|
|
import re
|
2019-04-20 15:29:56 -07:00
|
|
|
import io
|
2019-04-20 15:26:27 -07:00
|
|
|
import copy
|
2016-03-15 19:12:39 -07:00
|
|
|
import numpy
|
2019-04-13 21:10:08 -07:00
|
|
|
import base64
|
|
|
|
import struct
|
|
|
|
import logging
|
2019-04-20 15:29:56 -07:00
|
|
|
import pathlib
|
|
|
|
import gzip
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
from .utils import mangle_name, make_dose_table
|
2019-03-31 20:57:10 -07:00
|
|
|
from .. import Pattern, SubPattern, GridRepetition, PatternError, Label, Shape
|
2019-04-20 15:25:19 -07:00
|
|
|
from ..shapes import Polygon, Path
|
2020-05-11 18:39:02 -07:00
|
|
|
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
2019-05-18 15:05:30 -07:00
|
|
|
from ..utils import remove_colinear_vertices, normalize_mirror
|
2016-03-15 19:12:39 -07:00
|
|
|
|
2019-12-12 01:48:24 -08:00
|
|
|
#TODO document how GDS rotation / mirror works
|
|
|
|
#TODO absolute positioning
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
|
2019-04-13 21:10:08 -07:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
2019-04-20 15:42:42 -07:00
|
|
|
path_cap_map = {
|
|
|
|
None: Path.Cap.Flush,
|
|
|
|
0: Path.Cap.Flush,
|
2019-04-20 15:25:19 -07:00
|
|
|
1: Path.Cap.Circle,
|
|
|
|
2: Path.Cap.Square,
|
2019-05-15 00:14:17 -07:00
|
|
|
4: Path.Cap.SquareCustom,
|
2019-04-20 15:25:19 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2017-11-04 12:18:42 -07:00
|
|
|
def write(patterns: Pattern or List[Pattern],
|
2019-04-20 15:29:56 -07:00
|
|
|
stream: io.BufferedIOBase,
|
2017-09-05 11:00:36 -07:00
|
|
|
meters_per_unit: float,
|
2019-04-15 22:43:03 -07:00
|
|
|
logical_units_per_unit: float = 1,
|
2019-04-20 15:26:27 -07:00
|
|
|
library_name: str = 'masque-gdsii-write',
|
2019-12-12 01:19:07 -08:00
|
|
|
modify_originals: bool = False,
|
|
|
|
disambiguate_func: Callable[[List[Pattern]], None] = None):
|
2017-08-29 15:45:00 -07:00
|
|
|
"""
|
2020-02-17 21:02:53 -08:00
|
|
|
Write a `Pattern` or list of patterns to a GDSII file, by first calling
|
|
|
|
`.polygonize()` to change the shapes into polygons, and then writing patterns
|
2017-11-04 12:18:42 -07:00
|
|
|
as GDSII structures, polygons as boundary elements, and subpatterns as structure
|
2017-08-29 15:45:00 -07:00
|
|
|
references (sref).
|
|
|
|
|
|
|
|
For each shape,
|
2020-02-17 21:02:53 -08:00
|
|
|
layer is chosen to be equal to `shape.layer` if it is an int,
|
|
|
|
or `shape.layer[0]` if it is a tuple
|
|
|
|
datatype is chosen to be `shape.layer[1]` if available,
|
|
|
|
otherwise `0`
|
2017-08-29 15:45:00 -07:00
|
|
|
|
2020-02-17 21:02:53 -08:00
|
|
|
It is often a good idea to run `pattern.subpatternize()` prior to calling this function,
|
|
|
|
especially if calling `.polygonize()` will result in very many vertices.
|
2017-08-29 15:45:00 -07:00
|
|
|
|
2020-02-17 21:02:53 -08:00
|
|
|
If you want pattern polygonized with non-default arguments, just call `pattern.polygonize()`
|
2017-08-29 15:45:00 -07:00
|
|
|
prior to calling this function.
|
|
|
|
|
2020-02-17 21:02:53 -08:00
|
|
|
Args:
|
|
|
|
patterns: A Pattern or list of patterns to write to file.
|
|
|
|
file: Filename or stream object to write to.
|
|
|
|
meters_per_unit: Written into the GDSII file, meters per (database) length unit.
|
|
|
|
All distances are assumed to be an integer multiple of this unit, and are stored as such.
|
|
|
|
logical_units_per_unit: Written into the GDSII file. Allows the GDSII to specify a
|
|
|
|
"logical" unit which is different from the "database" unit, for display purposes.
|
|
|
|
Default `1`.
|
|
|
|
library_name: Library name written into the GDSII file.
|
|
|
|
Default 'masque-gdsii-write'.
|
|
|
|
modify_originals: If `True`, the original pattern is modified as part of the writing
|
|
|
|
process. Otherwise, a copy is made and `deepunlock()`-ed.
|
|
|
|
Default `False`.
|
|
|
|
disambiguate_func: Function which takes a list of patterns and alters them
|
|
|
|
to make their names valid and unique. Default is `disambiguate_pattern_names`, which
|
|
|
|
attempts to adhere to the GDSII standard as well as possible.
|
|
|
|
WARNING: No additional error checking is performed on the results.
|
2017-08-29 15:45:00 -07:00
|
|
|
"""
|
2019-04-20 15:26:27 -07:00
|
|
|
if isinstance(patterns, Pattern):
|
|
|
|
patterns = [patterns]
|
|
|
|
|
2019-12-12 01:19:07 -08:00
|
|
|
if disambiguate_func is None:
|
|
|
|
disambiguate_func = disambiguate_pattern_names
|
|
|
|
|
|
|
|
if not modify_originals:
|
2020-01-07 22:17:00 -08:00
|
|
|
patterns = [p.deepunlock() for p in copy.deepcopy(patterns)]
|
2019-12-12 01:19:07 -08:00
|
|
|
|
2017-08-29 15:45:00 -07:00
|
|
|
# Create library
|
|
|
|
lib = gdsii.library.Library(version=600,
|
2019-04-15 22:43:03 -07:00
|
|
|
name=library_name.encode('ASCII'),
|
2017-09-05 11:00:36 -07:00
|
|
|
logical_unit=logical_units_per_unit,
|
2017-08-29 15:45:00 -07:00
|
|
|
physical_unit=meters_per_unit)
|
|
|
|
|
|
|
|
# Get a dict of id(pattern) -> pattern
|
2017-11-04 12:18:42 -07:00
|
|
|
patterns_by_id = {id(pattern): pattern for pattern in patterns}
|
|
|
|
for pattern in patterns:
|
2020-05-11 18:58:57 -07:00
|
|
|
for i, p in pattern.referenced_patterns_by_id().items():
|
|
|
|
if p is not None:
|
|
|
|
patterns_by_id[i] = p
|
2017-08-29 15:45:00 -07:00
|
|
|
|
2019-12-12 01:19:07 -08:00
|
|
|
disambiguate_func(patterns_by_id.values())
|
2019-04-13 21:10:08 -07:00
|
|
|
|
2017-08-29 15:57:37 -07:00
|
|
|
# Now create a structure for each pattern, and add in any Boundary and SREF elements
|
|
|
|
for pat in patterns_by_id.values():
|
2019-04-13 21:10:08 -07:00
|
|
|
structure = gdsii.structure.Structure(name=pat.name)
|
2017-08-29 15:45:00 -07:00
|
|
|
lib.append(structure)
|
|
|
|
|
2019-04-20 15:25:19 -07:00
|
|
|
structure += _shapes_to_elements(pat.shapes)
|
2019-03-31 20:57:10 -07:00
|
|
|
structure += _labels_to_texts(pat.labels)
|
|
|
|
structure += _subpatterns_to_refs(pat.subpatterns)
|
2017-08-29 15:45:00 -07:00
|
|
|
|
2019-04-20 15:29:56 -07:00
|
|
|
lib.save(stream)
|
|
|
|
return
|
2017-08-29 15:45:00 -07:00
|
|
|
|
|
|
|
|
2019-04-20 15:29:56 -07:00
|
|
|
def writefile(patterns: List[Pattern] or Pattern,
|
|
|
|
filename: str or pathlib.Path,
|
|
|
|
*args,
|
|
|
|
**kwargs,
|
|
|
|
):
|
2016-03-15 19:12:39 -07:00
|
|
|
"""
|
2020-02-17 21:02:53 -08:00
|
|
|
Wrapper for `gdsii.write()` that takes a filename or path instead of a stream.
|
2016-03-15 19:12:39 -07:00
|
|
|
|
2019-04-20 15:29:56 -07:00
|
|
|
Will automatically compress the file if it has a .gz suffix.
|
2020-02-17 21:02:53 -08:00
|
|
|
|
|
|
|
Args:
|
|
|
|
patterns: `Pattern` or list of patterns to save
|
|
|
|
filename: Filename to save to.
|
|
|
|
*args: passed to `gdsii.write`
|
|
|
|
**kwargs: passed to `gdsii.write`
|
2016-03-15 19:12:39 -07:00
|
|
|
"""
|
2019-04-20 15:29:56 -07:00
|
|
|
path = pathlib.Path(filename)
|
2019-05-15 00:11:28 -07:00
|
|
|
if path.suffix == '.gz':
|
2019-04-20 15:29:56 -07:00
|
|
|
open_func = gzip.open
|
|
|
|
else:
|
|
|
|
open_func = open
|
|
|
|
|
2019-05-15 23:50:31 -07:00
|
|
|
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
2019-04-20 15:29:56 -07:00
|
|
|
results = write(patterns, stream, *args, **kwargs)
|
|
|
|
return results
|
2019-03-31 20:57:10 -07:00
|
|
|
|
2016-03-15 19:12:39 -07:00
|
|
|
|
2019-05-17 00:29:17 -07:00
|
|
|
def dose2dtype(patterns: List[Pattern],
|
|
|
|
) -> Tuple[List[Pattern], List[float]]:
|
2019-03-31 20:57:10 -07:00
|
|
|
"""
|
|
|
|
For each shape in each pattern, set shape.layer to the tuple
|
|
|
|
(base_layer, datatype), where:
|
|
|
|
layer is chosen to be equal to the original shape.layer if it is an int,
|
|
|
|
or shape.layer[0] if it is a tuple
|
|
|
|
datatype is chosen arbitrarily, based on calcualted dose for each shape.
|
|
|
|
Shapes with equal calcualted dose will have the same datatype.
|
|
|
|
A list of doses is retured, providing a mapping between datatype
|
|
|
|
(list index) and dose (list entry).
|
|
|
|
|
|
|
|
Note that this function modifies the input Pattern(s).
|
|
|
|
|
2020-02-17 21:02:53 -08:00
|
|
|
Args:
|
|
|
|
patterns: A `Pattern` or list of patterns to write to file. Modified by this function.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
(patterns, dose_list)
|
2019-03-31 20:57:10 -07:00
|
|
|
patterns: modified input patterns
|
|
|
|
dose_list: A list of doses, providing a mapping between datatype (int, list index)
|
|
|
|
and dose (float, list entry).
|
|
|
|
"""
|
2016-03-15 19:12:39 -07:00
|
|
|
# Get a dict of id(pattern) -> pattern
|
2017-11-04 12:18:42 -07:00
|
|
|
patterns_by_id = {id(pattern): pattern for pattern in patterns}
|
|
|
|
for pattern in patterns:
|
2020-05-11 18:58:57 -07:00
|
|
|
for i, p in pattern.referenced_patterns_by_id().items():
|
|
|
|
if p is not None:
|
|
|
|
patterns_by_id[i] = p
|
2016-03-15 19:12:39 -07:00
|
|
|
|
2017-11-04 12:18:42 -07:00
|
|
|
# Get a table of (id(pat), written_dose) for each pattern and subpattern
|
|
|
|
sd_table = make_dose_table(patterns)
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
# Figure out all the unique doses necessary to write this pattern
|
|
|
|
# This means going through each row in sd_table and adding the dose values needed to write
|
|
|
|
# that subpattern at that dose level
|
|
|
|
dose_vals = set()
|
|
|
|
for pat_id, pat_dose in sd_table:
|
|
|
|
pat = patterns_by_id[pat_id]
|
|
|
|
[dose_vals.add(shape.dose * pat_dose) for shape in pat.shapes]
|
|
|
|
|
|
|
|
if len(dose_vals) > 256:
|
|
|
|
raise PatternError('Too many dose values: {}, maximum 256 when using dtypes.'.format(len(dose_vals)))
|
|
|
|
|
2019-05-17 00:28:46 -07:00
|
|
|
dose_vals_list = list(dose_vals)
|
|
|
|
|
2019-03-31 20:57:10 -07:00
|
|
|
# Create a new pattern for each non-1-dose entry in the dose table
|
|
|
|
# and update the shapes to reflect their new dose
|
|
|
|
new_pats = {} # (id, dose) -> new_pattern mapping
|
2016-03-15 19:12:39 -07:00
|
|
|
for pat_id, pat_dose in sd_table:
|
2019-03-31 20:57:10 -07:00
|
|
|
if pat_dose == 1:
|
|
|
|
new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id]
|
|
|
|
continue
|
|
|
|
|
2019-05-17 00:28:46 -07:00
|
|
|
old_pat = patterns_by_id[pat_id]
|
|
|
|
pat = old_pat.copy() # keep old subpatterns
|
|
|
|
pat.shapes = copy.deepcopy(old_pat.shapes)
|
|
|
|
pat.labels = copy.deepcopy(old_pat.labels)
|
2016-03-15 19:12:39 -07:00
|
|
|
|
2019-05-17 00:28:46 -07:00
|
|
|
encoded_name = mangle_name(pat, pat_dose)
|
2018-04-14 15:06:12 -07:00
|
|
|
if len(encoded_name) == 0:
|
|
|
|
raise PatternError('Zero-length name after mangle+encode, originally "{}"'.format(pat.name))
|
2019-05-17 00:28:46 -07:00
|
|
|
pat.name = encoded_name
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
for shape in pat.shapes:
|
2019-03-31 20:57:10 -07:00
|
|
|
data_type = dose_vals_list.index(shape.dose * pat_dose)
|
2020-05-11 18:39:02 -07:00
|
|
|
if isinstance(shape.layer, int):
|
2019-05-17 00:28:46 -07:00
|
|
|
shape.layer = (shape.layer, data_type)
|
2019-03-31 20:57:10 -07:00
|
|
|
else:
|
2019-05-17 00:28:46 -07:00
|
|
|
shape.layer = (shape.layer[0], data_type)
|
2019-03-31 20:57:10 -07:00
|
|
|
|
|
|
|
new_pats[(pat_id, pat_dose)] = pat
|
|
|
|
|
|
|
|
# Go back through all the dose-specific patterns and fix up their subpattern entries
|
|
|
|
for (pat_id, pat_dose), pat in new_pats.items():
|
2016-03-15 19:12:39 -07:00
|
|
|
for subpat in pat.subpatterns:
|
|
|
|
dose_mult = subpat.dose * pat_dose
|
2019-03-31 20:57:10 -07:00
|
|
|
subpat.pattern = new_pats[(id(subpat.pattern), dose_mult)]
|
2016-03-15 19:12:39 -07:00
|
|
|
|
2019-05-17 00:29:17 -07:00
|
|
|
return patterns, dose_vals_list
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
|
2019-04-20 15:29:56 -07:00
|
|
|
def readfile(filename: str or pathlib.Path,
|
|
|
|
*args,
|
|
|
|
**kwargs,
|
|
|
|
) -> (Dict[str, Pattern], Dict[str, Any]):
|
2017-08-29 15:45:00 -07:00
|
|
|
"""
|
2020-02-17 21:02:53 -08:00
|
|
|
Wrapper for `gdsii.read()` that takes a filename or path instead of a stream.
|
2019-04-20 15:29:56 -07:00
|
|
|
|
2020-02-17 21:02:53 -08:00
|
|
|
Will automatically decompress files with a .gz suffix.
|
|
|
|
|
|
|
|
Args:
|
|
|
|
filename: Filename to save to.
|
|
|
|
*args: passed to `gdsii.read`
|
|
|
|
**kwargs: passed to `gdsii.read`
|
2017-08-29 15:45:00 -07:00
|
|
|
"""
|
2019-04-20 15:29:56 -07:00
|
|
|
path = pathlib.Path(filename)
|
2019-05-15 00:11:28 -07:00
|
|
|
if path.suffix == '.gz':
|
2019-04-20 15:29:56 -07:00
|
|
|
open_func = gzip.open
|
|
|
|
else:
|
|
|
|
open_func = open
|
|
|
|
|
2019-05-15 23:50:31 -07:00
|
|
|
with io.BufferedReader(open_func(path, mode='rb')) as stream:
|
2019-04-20 15:29:56 -07:00
|
|
|
results = read(stream, *args, **kwargs)
|
|
|
|
return results
|
2017-08-29 15:45:00 -07:00
|
|
|
|
|
|
|
|
2019-04-20 15:29:56 -07:00
|
|
|
def read(stream: io.BufferedIOBase,
|
2017-11-04 12:15:35 -07:00
|
|
|
use_dtype_as_dose: bool = False,
|
|
|
|
clean_vertices: bool = True,
|
2018-09-16 20:18:04 -07:00
|
|
|
) -> (Dict[str, Pattern], Dict[str, Any]):
|
2016-03-15 19:12:39 -07:00
|
|
|
"""
|
2018-09-16 20:18:04 -07:00
|
|
|
Read a gdsii file and translate it into a dict of Pattern objects. GDSII structures are
|
2016-03-15 19:12:39 -07:00
|
|
|
translated into Pattern objects; boundaries are translated into polygons, and srefs and arefs
|
|
|
|
are translated into SubPattern objects.
|
|
|
|
|
2018-09-16 20:19:28 -07:00
|
|
|
Additional library info is returned in a dict, containing:
|
|
|
|
'name': name of the library
|
|
|
|
'meters_per_unit': number of meters per database unit (all values are in database units)
|
|
|
|
'logical_units_per_unit': number of "logical" units displayed by layout tools (typically microns)
|
|
|
|
per database unit
|
|
|
|
|
2020-02-17 21:02:53 -08:00
|
|
|
Args:
|
|
|
|
filename: Filename specifying a GDSII file to read from.
|
|
|
|
use_dtype_as_dose: If `False`, set each polygon's layer to `(gds_layer, gds_datatype)`.
|
|
|
|
If `True`, set the layer to `gds_layer` and the dose to `gds_datatype`.
|
|
|
|
Default `False`.
|
|
|
|
clean_vertices: If `True`, remove any redundant vertices when loading polygons.
|
2017-11-04 12:15:35 -07:00
|
|
|
The cleaning process removes any polygons with zero area or <3 vertices.
|
2020-02-17 21:02:53 -08:00
|
|
|
Default `True`.
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
- Dict of pattern_name:Patterns generated from GDSII structures
|
|
|
|
- Dict of GDSII library info
|
2016-03-15 19:12:39 -07:00
|
|
|
"""
|
|
|
|
|
2019-04-20 15:29:56 -07:00
|
|
|
lib = gdsii.library.Library.load(stream)
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
library_info = {'name': lib.name.decode('ASCII'),
|
2018-09-16 20:19:28 -07:00
|
|
|
'meters_per_unit': lib.physical_unit,
|
|
|
|
'logical_units_per_unit': lib.logical_unit,
|
2016-03-15 19:12:39 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
patterns = []
|
|
|
|
for structure in lib:
|
|
|
|
pat = Pattern(name=structure.name.decode('ASCII'))
|
|
|
|
for element in structure:
|
|
|
|
# Switch based on element type:
|
|
|
|
if isinstance(element, gdsii.elements.Boundary):
|
2019-04-20 15:25:19 -07:00
|
|
|
args = {'vertices': element.xy[:-1],
|
|
|
|
}
|
|
|
|
|
2017-08-29 15:45:00 -07:00
|
|
|
if use_dtype_as_dose:
|
2019-04-20 15:25:19 -07:00
|
|
|
args['dose'] = element.data_type
|
|
|
|
args['layer'] = element.layer
|
2017-08-29 15:45:00 -07:00
|
|
|
else:
|
2019-04-20 15:25:19 -07:00
|
|
|
args['layer'] = (element.layer, element.data_type)
|
|
|
|
|
|
|
|
shape = Polygon(**args)
|
|
|
|
|
2017-11-12 19:57:24 -08:00
|
|
|
if clean_vertices:
|
2017-11-04 12:15:35 -07:00
|
|
|
try:
|
|
|
|
shape.clean_vertices()
|
|
|
|
except PatternError:
|
|
|
|
continue
|
|
|
|
|
2017-08-29 15:45:00 -07:00
|
|
|
pat.shapes.append(shape)
|
2016-03-15 19:12:39 -07:00
|
|
|
|
2019-04-20 15:25:19 -07:00
|
|
|
if isinstance(element, gdsii.elements.Path):
|
|
|
|
if element.path_type in path_cap_map:
|
|
|
|
cap = path_cap_map[element.path_type]
|
|
|
|
else:
|
|
|
|
raise PatternError('Unrecognized path type: {}'.format(element.path_type))
|
|
|
|
|
|
|
|
args = {'vertices': element.xy,
|
2019-04-20 15:42:42 -07:00
|
|
|
'width': element.width if element.width is not None else 0.0,
|
2019-04-20 15:25:19 -07:00
|
|
|
'cap': cap,
|
|
|
|
}
|
|
|
|
|
2019-05-15 00:14:17 -07:00
|
|
|
if cap == Path.Cap.SquareCustom:
|
|
|
|
args['cap_extensions'] = numpy.zeros(2)
|
|
|
|
if element.bgn_extn is not None:
|
|
|
|
args['cap_extensions'][0] = element.bgn_extn
|
|
|
|
if element.end_extn is not None:
|
|
|
|
args['cap_extensions'][1] = element.end_extn
|
|
|
|
|
2019-04-20 15:25:19 -07:00
|
|
|
if use_dtype_as_dose:
|
|
|
|
args['dose'] = element.data_type
|
|
|
|
args['layer'] = element.layer
|
|
|
|
else:
|
|
|
|
args['layer'] = (element.layer, element.data_type)
|
|
|
|
|
|
|
|
shape = Path(**args)
|
|
|
|
|
|
|
|
if clean_vertices:
|
|
|
|
try:
|
|
|
|
shape.clean_vertices()
|
|
|
|
except PatternError as err:
|
|
|
|
continue
|
|
|
|
|
|
|
|
pat.shapes.append(shape)
|
|
|
|
|
2018-08-30 23:06:31 -07:00
|
|
|
elif isinstance(element, gdsii.elements.Text):
|
|
|
|
label = Label(offset=element.xy,
|
|
|
|
layer=(element.layer, element.text_type),
|
|
|
|
string=element.string.decode('ASCII'))
|
|
|
|
pat.labels.append(label)
|
|
|
|
|
2016-03-15 19:12:39 -07:00
|
|
|
elif isinstance(element, gdsii.elements.SRef):
|
2019-03-31 20:57:10 -07:00
|
|
|
pat.subpatterns.append(_sref_to_subpat(element))
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
elif isinstance(element, gdsii.elements.ARef):
|
2019-03-31 20:57:10 -07:00
|
|
|
pat.subpatterns.append(_aref_to_gridrep(element))
|
2018-04-14 15:20:39 -07:00
|
|
|
|
2016-03-15 19:12:39 -07:00
|
|
|
patterns.append(pat)
|
|
|
|
|
|
|
|
# Create a dict of {pattern.name: pattern, ...}, then fix up all subpattern.pattern entries
|
2019-05-17 20:51:53 -07:00
|
|
|
# according to the subpattern.identifier (which is deleted after use).
|
2016-03-15 19:12:39 -07:00
|
|
|
patterns_dict = dict(((p.name, p) for p in patterns))
|
|
|
|
for p in patterns_dict.values():
|
|
|
|
for sp in p.subpatterns:
|
2020-05-11 18:59:47 -07:00
|
|
|
sp.pattern = patterns_dict[sp.identifier[0].decode('ASCII')]
|
2019-05-17 20:51:53 -07:00
|
|
|
del sp.identifier
|
2016-03-15 19:12:39 -07:00
|
|
|
|
|
|
|
return patterns_dict, library_info
|
2018-08-30 23:05:30 -07:00
|
|
|
|
|
|
|
|
2020-05-11 18:39:02 -07:00
|
|
|
def _mlayer2gds(mlayer: layer_t) -> Tuple[int, int]:
|
2020-02-17 21:02:53 -08:00
|
|
|
""" Helper to turn a layer tuple-or-int into a layer and datatype"""
|
2018-08-30 23:05:30 -07:00
|
|
|
if is_scalar(mlayer):
|
|
|
|
layer = mlayer
|
|
|
|
data_type = 0
|
|
|
|
else:
|
|
|
|
layer = mlayer[0]
|
|
|
|
if len(mlayer) > 1:
|
|
|
|
data_type = mlayer[1]
|
|
|
|
else:
|
|
|
|
data_type = 0
|
|
|
|
return layer, data_type
|
2019-03-31 20:57:10 -07:00
|
|
|
|
|
|
|
|
|
|
|
def _sref_to_subpat(element: gdsii.elements.SRef) -> SubPattern:
|
2020-02-17 21:02:53 -08:00
|
|
|
"""
|
|
|
|
Helper function to create a SubPattern from an SREF. Sets subpat.pattern to None
|
2020-05-11 18:59:47 -07:00
|
|
|
and sets the instance .identifier to (struct_name,).
|
2020-02-17 21:02:53 -08:00
|
|
|
|
|
|
|
BUG:
|
|
|
|
"Absolute" means not affected by parent elements.
|
|
|
|
That's not currently supported by masque at all, so need to either tag it and
|
|
|
|
undo the parent transformations, or implement it in masque.
|
|
|
|
"""
|
2019-03-31 20:57:10 -07:00
|
|
|
subpat = SubPattern(pattern=None, offset=element.xy)
|
2020-05-11 18:59:47 -07:00
|
|
|
subpat.identifier = (element.struct_name,)
|
2019-03-31 20:57:10 -07:00
|
|
|
if element.strans is not None:
|
|
|
|
if element.mag is not None:
|
|
|
|
subpat.scale = element.mag
|
|
|
|
# Bit 13 means absolute scale
|
|
|
|
if get_bit(element.strans, 15 - 13):
|
|
|
|
#subpat.offset *= subpat.scale
|
|
|
|
raise PatternError('Absolute scale is not implemented yet!')
|
|
|
|
if element.angle is not None:
|
|
|
|
subpat.rotation = element.angle * numpy.pi / 180
|
|
|
|
# Bit 14 means absolute rotation
|
|
|
|
if get_bit(element.strans, 15 - 14):
|
|
|
|
#subpat.offset = numpy.dot(rotation_matrix_2d(subpat.rotation), subpat.offset)
|
|
|
|
raise PatternError('Absolute rotation is not implemented yet!')
|
|
|
|
# Bit 0 means mirror x-axis
|
|
|
|
if get_bit(element.strans, 15 - 0):
|
2019-12-11 21:16:31 -08:00
|
|
|
subpat.mirrored[0] = 1
|
2019-03-31 20:57:10 -07:00
|
|
|
return subpat
|
|
|
|
|
|
|
|
|
|
|
|
def _aref_to_gridrep(element: gdsii.elements.ARef) -> GridRepetition:
|
2020-02-17 21:02:53 -08:00
|
|
|
"""
|
|
|
|
Helper function to create a GridRepetition from an AREF. Sets gridrep.pattern to None
|
2020-05-11 18:59:47 -07:00
|
|
|
and sets the instance .identifier to (struct_name,).
|
2019-03-31 20:57:10 -07:00
|
|
|
|
2020-02-17 21:02:53 -08:00
|
|
|
BUG:
|
|
|
|
"Absolute" means not affected by parent elements.
|
|
|
|
That's not currently supported by masque at all, so need to either tag it and
|
|
|
|
undo the parent transformations, or implement it in masque.
|
|
|
|
"""
|
2019-03-31 20:57:10 -07:00
|
|
|
rotation = 0
|
|
|
|
offset = numpy.array(element.xy[0])
|
|
|
|
scale = 1
|
2020-02-07 23:01:14 -08:00
|
|
|
mirror_across_x = False
|
2019-03-31 20:57:10 -07:00
|
|
|
|
|
|
|
if element.strans is not None:
|
|
|
|
if element.mag is not None:
|
|
|
|
scale = element.mag
|
|
|
|
# Bit 13 means absolute scale
|
|
|
|
if get_bit(element.strans, 15 - 13):
|
|
|
|
raise PatternError('Absolute scale is not implemented yet!')
|
|
|
|
if element.angle is not None:
|
|
|
|
rotation = element.angle * numpy.pi / 180
|
|
|
|
# Bit 14 means absolute rotation
|
|
|
|
if get_bit(element.strans, 15 - 14):
|
|
|
|
raise PatternError('Absolute rotation is not implemented yet!')
|
|
|
|
# Bit 0 means mirror x-axis
|
|
|
|
if get_bit(element.strans, 15 - 0):
|
2020-02-07 23:01:14 -08:00
|
|
|
mirror_across_x = True
|
2019-03-31 20:57:10 -07:00
|
|
|
|
|
|
|
counts = [element.cols, element.rows]
|
2020-02-07 23:01:14 -08:00
|
|
|
a_vector = (element.xy[1] - offset) / counts[0]
|
|
|
|
b_vector = (element.xy[2] - offset) / counts[1]
|
2019-03-31 20:57:10 -07:00
|
|
|
|
|
|
|
gridrep = GridRepetition(pattern=None,
|
|
|
|
a_vector=a_vector,
|
|
|
|
b_vector=b_vector,
|
|
|
|
a_count=counts[0],
|
|
|
|
b_count=counts[1],
|
|
|
|
offset=offset,
|
|
|
|
rotation=rotation,
|
|
|
|
scale=scale,
|
2020-02-07 23:01:14 -08:00
|
|
|
mirrored=(mirror_across_x, False))
|
2020-05-11 18:59:47 -07:00
|
|
|
gridrep.identifier = (element.struct_name,)
|
2019-03-31 20:57:10 -07:00
|
|
|
|
|
|
|
return gridrep
|
|
|
|
|
|
|
|
|
|
|
|
def _subpatterns_to_refs(subpatterns: List[SubPattern or GridRepetition]
|
|
|
|
) -> List[gdsii.elements.ARef or gdsii.elements.SRef]:
|
|
|
|
refs = []
|
|
|
|
for subpat in subpatterns:
|
2020-05-11 18:58:57 -07:00
|
|
|
if subpat.pattern is None:
|
|
|
|
continue
|
2019-04-13 21:10:08 -07:00
|
|
|
encoded_name = subpat.pattern.name
|
2019-03-31 20:57:10 -07:00
|
|
|
|
2019-12-05 23:18:18 -08:00
|
|
|
# Note: GDS mirrors first and rotates second
|
2020-02-07 23:01:14 -08:00
|
|
|
mirror_across_x, extra_angle = normalize_mirror(subpat.mirrored)
|
2019-03-31 20:57:10 -07:00
|
|
|
if isinstance(subpat, GridRepetition):
|
|
|
|
xy = numpy.array(subpat.offset) + [
|
|
|
|
[0, 0],
|
2020-02-07 23:01:14 -08:00
|
|
|
subpat.a_vector * subpat.a_count,
|
|
|
|
subpat.b_vector * subpat.b_count,
|
2019-03-31 20:57:10 -07:00
|
|
|
]
|
|
|
|
ref = gdsii.elements.ARef(struct_name=encoded_name,
|
|
|
|
xy=numpy.round(xy).astype(int),
|
2019-04-13 17:33:15 -07:00
|
|
|
cols=numpy.round(subpat.a_count).astype(int),
|
|
|
|
rows=numpy.round(subpat.b_count).astype(int))
|
2019-03-31 20:57:10 -07:00
|
|
|
else:
|
|
|
|
ref = gdsii.elements.SRef(struct_name=encoded_name,
|
|
|
|
xy=numpy.round([subpat.offset]).astype(int))
|
|
|
|
|
2019-05-18 15:05:30 -07:00
|
|
|
ref.angle = ((subpat.rotation + extra_angle) * 180 / numpy.pi) % 360
|
2019-05-20 21:04:07 -07:00
|
|
|
# strans must be non-None for angle and mag to take effect
|
2020-02-07 23:01:14 -08:00
|
|
|
ref.strans = set_bit(0, 15 - 0, mirror_across_x)
|
2019-03-31 20:57:10 -07:00
|
|
|
ref.mag = subpat.scale
|
|
|
|
|
|
|
|
refs.append(ref)
|
|
|
|
return refs
|
|
|
|
|
|
|
|
|
2019-04-20 15:25:19 -07:00
|
|
|
def _shapes_to_elements(shapes: List[Shape],
|
|
|
|
polygonize_paths: bool = False
|
|
|
|
) -> List[gdsii.elements.Boundary]:
|
|
|
|
elements = []
|
|
|
|
# Add a Boundary element for each shape, and Path elements if necessary
|
2019-03-31 20:57:10 -07:00
|
|
|
for shape in shapes:
|
|
|
|
layer, data_type = _mlayer2gds(shape.layer)
|
2019-04-20 15:25:19 -07:00
|
|
|
if isinstance(shape, Path) and not polygonize_paths:
|
|
|
|
xy = numpy.round(shape.vertices + shape.offset).astype(int)
|
|
|
|
width = numpy.round(shape.width).astype(int)
|
|
|
|
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup
|
|
|
|
path = gdsii.elements.Path(layer=layer,
|
|
|
|
data_type=data_type,
|
|
|
|
xy=xy)
|
|
|
|
path.path_type = path_type
|
|
|
|
path.width = width
|
|
|
|
elements.append(path)
|
|
|
|
else:
|
|
|
|
for polygon in shape.to_polygons():
|
|
|
|
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
|
|
|
|
xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
|
|
|
|
elements.append(gdsii.elements.Boundary(layer=layer,
|
|
|
|
data_type=data_type,
|
|
|
|
xy=xy_closed))
|
|
|
|
return elements
|
2019-03-31 20:57:10 -07:00
|
|
|
|
|
|
|
|
|
|
|
def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
|
|
|
|
texts = []
|
|
|
|
for label in labels:
|
|
|
|
layer, text_type = _mlayer2gds(label.layer)
|
|
|
|
xy = numpy.round([label.offset]).astype(int)
|
|
|
|
texts.append(gdsii.elements.Text(layer=layer,
|
|
|
|
text_type=text_type,
|
|
|
|
xy=xy,
|
|
|
|
string=label.string.encode('ASCII')))
|
|
|
|
return texts
|
2019-04-13 21:10:08 -07:00
|
|
|
|
|
|
|
|
2019-12-12 01:19:07 -08:00
|
|
|
def disambiguate_pattern_names(patterns,
|
|
|
|
max_name_length: int = 32,
|
|
|
|
suffix_length: int = 6,
|
|
|
|
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name
|
|
|
|
):
|
2019-04-13 21:10:08 -07:00
|
|
|
used_names = []
|
|
|
|
for pat in patterns:
|
2019-12-12 01:19:07 -08:00
|
|
|
if len(pat.name) > max_name_length:
|
|
|
|
shortened_name = pat.name[:max_name_length - suffix_length]
|
|
|
|
logger.warning('Pattern name "{}" is too long ({}/{} chars),\n'.format(pat.name, len(pat.name), max_name_length) +
|
2019-05-15 23:51:00 -07:00
|
|
|
' shortening to "{}" before generating suffix'.format(shortened_name))
|
|
|
|
else:
|
|
|
|
shortened_name = pat.name
|
|
|
|
|
|
|
|
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
|
2019-04-13 21:10:08 -07:00
|
|
|
|
|
|
|
i = 0
|
|
|
|
suffixed_name = sanitized_name
|
2019-04-18 01:14:08 -07:00
|
|
|
while suffixed_name in used_names or suffixed_name == '':
|
2019-04-13 21:10:08 -07:00
|
|
|
suffix = base64.b64encode(struct.pack('>Q', i), b'$?').decode('ASCII')
|
|
|
|
|
|
|
|
suffixed_name = sanitized_name + '$' + suffix[:-1].lstrip('A')
|
|
|
|
i += 1
|
|
|
|
|
2019-04-18 01:14:08 -07:00
|
|
|
if sanitized_name == '':
|
|
|
|
logger.warning('Empty pattern name saved as "{}"'.format(suffixed_name))
|
|
|
|
elif suffixed_name != sanitized_name:
|
2019-12-12 01:19:07 -08:00
|
|
|
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
|
|
|
logger.warning('Pattern name "{}" ({}) appears multiple times;\n renaming to "{}"'.format(
|
|
|
|
pat.name, sanitized_name, suffixed_name))
|
2019-04-13 21:10:08 -07:00
|
|
|
|
2019-04-16 00:41:18 -07:00
|
|
|
encoded_name = suffixed_name.encode('ASCII')
|
2019-04-13 21:10:08 -07:00
|
|
|
if len(encoded_name) == 0:
|
2019-04-18 01:14:08 -07:00
|
|
|
# Should never happen since zero-length names are replaced
|
2019-05-25 12:40:17 -07:00
|
|
|
raise PatternError('Zero-length name after sanitize+encode,\n originally "{}"'.format(pat.name))
|
2019-12-12 01:19:07 -08:00
|
|
|
if len(encoded_name) > max_name_length:
|
|
|
|
raise PatternError('Pattern name "{}" length > {} after encode,\n originally "{}"'.format(encoded_name, max_name_length, pat.name))
|
2019-04-13 21:10:08 -07:00
|
|
|
|
|
|
|
pat.name = encoded_name
|
|
|
|
used_names.append(suffixed_name)
|
2019-04-20 15:29:56 -07:00
|
|
|
|