style and type fixes (per flake8)

could potentially fix some bugs in `Library` class and dxf reader
lethe/HEAD
Jan Petykiewicz 4 years ago
parent f6ad272c2c
commit f364970403

@ -0,0 +1,29 @@
[flake8]
ignore =
# E501 line too long
E501,
# W391 newlines at EOF
W391,
# E241 multiple spaces after comma
E241,
# E302 expected 2 newlines
E302,
# W503 line break before binary operator (to be deprecated)
W503,
# E265 block comment should start with '# '
E265,
# E123 closing bracket does not match indentation of opening bracket's line
E123,
# E124 closing bracket does not match visual indentation
E124,
# E221 multiple spaces before operator
E221,
# E201 whitespace after '['
E201,
# E741 ambiguous variable name 'I'
E741,
per-file-ignores =
# F401 import without use
*/__init__.py: F401,

@ -1,3 +1,4 @@
""" """
Functions for reading from and writing to various file formats. Functions for reading from and writing to various file formats.
""" """

@ -1,10 +1,9 @@
""" """
DXF file format readers and writers DXF file format readers and writers
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable
import re import re
import io import io
import copy
import base64 import base64
import struct import struct
import logging import logging
@ -12,15 +11,12 @@ import pathlib
import gzip import gzip
import numpy # type: ignore import numpy # type: ignore
from numpy import pi
import ezdxf # type: ignore import ezdxf # type: ignore
from .utils import mangle_name, make_dose_table
from .. import Pattern, SubPattern, PatternError, Label, Shape from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
from ..repetition import Grid from ..repetition import Grid
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t from ..utils import rotation_matrix_2d, layer_t
from ..utils import remove_colinear_vertices, normalize_mirror
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -75,6 +71,7 @@ def write(pattern: Pattern,
#TODO consider supporting DXF arcs? #TODO consider supporting DXF arcs?
if disambiguate_func is None: if disambiguate_func is None:
disambiguate_func = disambiguate_pattern_names disambiguate_func = disambiguate_pattern_names
assert(disambiguate_func is not None)
if not modify_originals: if not modify_originals:
pattern = pattern.deepcopy().deepunlock() pattern = pattern.deepcopy().deepunlock()
@ -125,8 +122,7 @@ def writefile(pattern: Pattern,
open_func = open open_func = open
with open_func(path, mode='wt') as stream: with open_func(path, mode='wt') as stream:
results = write(pattern, stream, *args, **kwargs) write(pattern, stream, *args, **kwargs)
return results
def readfile(filename: Union[str, pathlib.Path], def readfile(filename: Union[str, pathlib.Path],
@ -204,25 +200,26 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
else: else:
points = numpy.array(tuple(element.points())) points = numpy.array(tuple(element.points()))
attr = element.dxfattribs() attr = element.dxfattribs()
args = {'layer': attr.get('layer', DEFAULT_LAYER), layer = attr.get('layer', DEFAULT_LAYER)
}
if points.shape[1] == 2: if points.shape[1] == 2:
shape = Polygon(**args) raise PatternError('Invalid or unimplemented polygon?')
#shape = Polygon(layer=layer)
elif points.shape[1] > 2: elif points.shape[1] > 2:
if (points[0, 2] != points[:, 2]).any(): if (points[0, 2] != points[:, 2]).any():
raise PatternError('PolyLine has non-constant width (not yet representable in masque!)') raise PatternError('PolyLine has non-constant width (not yet representable in masque!)')
elif points.shape[1] == 4 and (points[:, 3] != 0).any(): elif points.shape[1] == 4 and (points[:, 3] != 0).any():
raise PatternError('LWPolyLine has bulge (not yet representable in masque!)') raise PatternError('LWPolyLine has bulge (not yet representable in masque!)')
else:
width = points[0, 2]
if width == 0:
width = attr.get('const_width', 0)
if width == 0 and numpy.array_equal(points[0], points[-1]): width = points[0, 2]
shape = Polygon(**args, vertices=points[:-1, :2]) if width == 0:
else: width = attr.get('const_width', 0)
shape = Path(**args, width=width, vertices=points[:, :2])
shape: Union[Path, Polygon]
if width == 0 and numpy.array_equal(points[0], points[-1]):
shape = Polygon(layer=layer, vertices=points[:-1, :2])
else:
shape = Path(layer=layer, width=width, vertices=points[:, :2])
if clean_vertices: if clean_vertices:
try: try:
@ -237,7 +234,7 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
'layer': element.dxfattribs().get('layer', DEFAULT_LAYER), 'layer': element.dxfattribs().get('layer', DEFAULT_LAYER),
} }
string = element.dxfattribs().get('text', '') string = element.dxfattribs().get('text', '')
height = element.dxfattribs().get('height', 0) # height = element.dxfattribs().get('height', 0)
# if height != 0: # if height != 0:
# logger.warning('Interpreting DXF TEXT as a label despite nonzero height. ' # logger.warning('Interpreting DXF TEXT as a label despite nonzero height. '
# 'This could be changed in the future by setting a font path in the masque DXF code.') # 'This could be changed in the future by setting a font path in the masque DXF code.')
@ -252,7 +249,7 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
logger.warning('Masque does not support per-axis scaling; using x-scaling only!') logger.warning('Masque does not support per-axis scaling; using x-scaling only!')
scale = abs(xscale) scale = abs(xscale)
mirrored = (yscale < 0, xscale < 0) mirrored = (yscale < 0, xscale < 0)
rotation = attr.get('rotation', 0) * pi/180 rotation = numpy.deg2rad(attr.get('rotation', 0))
offset = attr.get('insert', (0, 0, 0))[:2] offset = attr.get('insert', (0, 0, 0))[:2]
@ -266,11 +263,10 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
} }
if 'column_count' in attr: if 'column_count' in attr:
args['repetition'] = Grid( args['repetition'] = Grid(a_vector=(attr['column_spacing'], 0),
a_vector=(attr['column_spacing'], 0), b_vector=(0, attr['row_spacing']),
b_vector=(0, attr['row_spacing']), a_count=attr['column_count'],
a_count=attr['column_count'], b_count=attr['row_count'])
b_count=attr['row_count'])
pat.subpatterns.append(SubPattern(**args)) pat.subpatterns.append(SubPattern(**args))
else: else:
logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).') logger.warning(f'Ignoring DXF element {element.dxftype()} (not implemented).')
@ -356,11 +352,11 @@ def _mlayer2dxf(layer: layer_t) -> str:
def disambiguate_pattern_names(patterns: Sequence[Pattern], def disambiguate_pattern_names(patterns: Sequence[Pattern],
max_name_length: int = 32, max_name_length: int = 32,
suffix_length: int = 6, suffix_length: int = 6,
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
) -> None: ) -> None:
used_names = [] used_names = []
for pat in patterns: for pat in patterns:
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name) sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
i = 0 i = 0
suffixed_name = sanitized_name suffixed_name = sanitized_name
@ -374,15 +370,15 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
logger.warning(f'Empty pattern name saved as "{suffixed_name}"') logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name: elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name): if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
f' renaming to "{suffixed_name}"') + f' renaming to "{suffixed_name}"')
if len(suffixed_name) == 0: if len(suffixed_name) == 0:
# Should never happen since zero-length names are replaced # Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"') raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"')
if len(suffixed_name) > max_name_length: if len(suffixed_name) > max_name_length:
raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n' + raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n'
f' originally "{pat.name}"') + f' originally "{pat.name}"')
pat.name = suffixed_name pat.name = suffixed_name
used_names.append(suffixed_name) used_names.append(suffixed_name)

@ -17,8 +17,8 @@ Notes:
* ELFLAGS are not supported * ELFLAGS are not supported
* GDS does not support library- or structure-level annotations * GDS does not support library- or structure-level annotations
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
from typing import Sequence, Mapping from typing import Sequence
import re import re
import io import io
import copy import copy
@ -34,25 +34,23 @@ import gdsii.library
import gdsii.structure import gdsii.structure
import gdsii.elements import gdsii.elements
from .utils import mangle_name, make_dose_table, dose2dtype, dtype2dose, clean_pattern_vertices from .utils import clean_pattern_vertices, is_gzipped
from .utils import is_gzipped
from .. import Pattern, SubPattern, PatternError, Label, Shape from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
from ..repetition import Grid from ..repetition import Grid
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t from ..utils import get_bit, set_bit, layer_t, normalize_mirror, annotations_t
from ..utils import remove_colinear_vertices, normalize_mirror, annotations_t
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
path_cap_map = { path_cap_map = {
None: Path.Cap.Flush, None: Path.Cap.Flush,
0: Path.Cap.Flush, 0: Path.Cap.Flush,
1: Path.Cap.Circle, 1: Path.Cap.Circle,
2: Path.Cap.Square, 2: Path.Cap.Square,
4: Path.Cap.SquareCustom, 4: Path.Cap.SquareCustom,
} }
def build(patterns: Union[Pattern, Sequence[Pattern]], def build(patterns: Union[Pattern, Sequence[Pattern]],
@ -262,8 +260,7 @@ def read(stream: io.BufferedIOBase,
string=element.string.decode('ASCII')) string=element.string.decode('ASCII'))
pat.labels.append(label) pat.labels.append(label)
elif (isinstance(element, gdsii.elements.SRef) or elif isinstance(element, (gdsii.elements.SRef, gdsii.elements.ARef)):
isinstance(element, gdsii.elements.ARef)):
pat.subpatterns.append(_ref_to_subpat(element)) pat.subpatterns.append(_ref_to_subpat(element))
if clean_vertices: if clean_vertices:
@ -358,7 +355,7 @@ def _gpath_to_mpath(element: gdsii.elements.Path, raw_mode: bool) -> Path:
'width': element.width if element.width is not None else 0.0, 'width': element.width if element.width is not None else 0.0,
'cap': cap, 'cap': cap,
'offset': numpy.zeros(2), 'offset': numpy.zeros(2),
'annotations':_properties_to_annotations(element.properties), 'annotations': _properties_to_annotations(element.properties),
'raw': raw_mode, 'raw': raw_mode,
} }
@ -376,7 +373,7 @@ def _boundary_to_polygon(element: gdsii.elements.Boundary, raw_mode: bool) -> Po
args = {'vertices': element.xy[:-1].astype(float), args = {'vertices': element.xy[:-1].astype(float),
'layer': (element.layer, element.data_type), 'layer': (element.layer, element.data_type),
'offset': numpy.zeros(2), 'offset': numpy.zeros(2),
'annotations':_properties_to_annotations(element.properties), 'annotations': _properties_to_annotations(element.properties),
'raw': raw_mode, 'raw': raw_mode,
} }
return Polygon(**args) return Polygon(**args)
@ -398,14 +395,14 @@ def _subpatterns_to_refs(subpatterns: List[SubPattern]
ref: Union[gdsii.elements.SRef, gdsii.elements.ARef] ref: Union[gdsii.elements.SRef, gdsii.elements.ARef]
if isinstance(rep, Grid): if isinstance(rep, Grid):
xy = numpy.array(subpat.offset) + [ xy = numpy.array(subpat.offset) + [
[0, 0], [0, 0],
rep.a_vector * rep.a_count, rep.a_vector * rep.a_count,
rep.b_vector * rep.b_count, rep.b_vector * rep.b_count,
] ]
ref = gdsii.elements.ARef(struct_name=encoded_name, ref = gdsii.elements.ARef(struct_name=encoded_name,
xy=numpy.round(xy).astype(int), xy=numpy.round(xy).astype(int),
cols=numpy.round(rep.a_count).astype(int), cols=numpy.round(rep.a_count).astype(int),
rows=numpy.round(rep.b_count).astype(int)) rows=numpy.round(rep.b_count).astype(int))
new_refs = [ref] new_refs = [ref]
elif rep is None: elif rep is None:
ref = gdsii.elements.SRef(struct_name=encoded_name, ref = gdsii.elements.SRef(struct_name=encoded_name,
@ -437,7 +434,7 @@ def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -
for key, vals in annotations.items(): for key, vals in annotations.items():
try: try:
i = int(key) i = int(key)
except: except ValueError:
raise PatternError(f'Annotation key {key} is not convertable to an integer') raise PatternError(f'Annotation key {key} is not convertable to an integer')
if not (0 < i < 126): if not (0 < i < 126):
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])') raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
@ -464,7 +461,7 @@ def _shapes_to_elements(shapes: List[Shape],
if isinstance(shape, Path) and not polygonize_paths: if isinstance(shape, Path) and not polygonize_paths:
xy = numpy.round(shape.vertices + shape.offset).astype(int) xy = numpy.round(shape.vertices + shape.offset).astype(int)
width = numpy.round(shape.width).astype(int) width = numpy.round(shape.width).astype(int)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
path = gdsii.elements.Path(layer=layer, path = gdsii.elements.Path(layer=layer,
data_type=data_type, data_type=data_type,
xy=xy) xy=xy)
@ -502,7 +499,7 @@ def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
def disambiguate_pattern_names(patterns: Sequence[Pattern], def disambiguate_pattern_names(patterns: Sequence[Pattern],
max_name_length: int = 32, max_name_length: int = 32,
suffix_length: int = 6, suffix_length: int = 6,
dup_warn_filter: Optional[Callable[[str,], bool]] = None, dup_warn_filter: Optional[Callable[[str], bool]] = None,
): ):
""" """
Args: Args:
@ -519,13 +516,13 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
# Shorten names which already exceed max-length # Shorten names which already exceed max-length
if len(pat.name) > max_name_length: if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length] shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n' + logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
f' shortening to "{shortened_name}" before generating suffix') + f' shortening to "{shortened_name}" before generating suffix')
else: else:
shortened_name = pat.name shortened_name = pat.name
# Remove invalid characters # Remove invalid characters
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name) sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
# Add a suffix that makes the name unique # Add a suffix that makes the name unique
i = 0 i = 0
@ -540,8 +537,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
logger.warning(f'Empty pattern name saved as "{suffixed_name}"') logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name: elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name): if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
f' renaming to "{suffixed_name}"') + f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks # Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII') encoded_name = suffixed_name.encode('ASCII')
@ -549,8 +546,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
# Should never happen since zero-length names are replaced # Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"') raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
if len(encoded_name) > max_name_length: if len(encoded_name) > max_name_length:
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n' + raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
f' originally "{pat.name}"') + f' originally "{pat.name}"')
pat.name = suffixed_name pat.name = suffixed_name
used_names.append(suffixed_name) used_names.append(suffixed_name)

@ -18,8 +18,8 @@ Notes:
* GDS does not support library- or structure-level annotations * GDS does not support library- or structure-level annotations
* Creation/modification/access times are set to 1900-01-01 for reproducibility. * Creation/modification/access times are set to 1900-01-01 for reproducibility.
""" """
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
from typing import Sequence, Mapping, BinaryIO from typing import Sequence, BinaryIO
import re import re
import io import io
import mmap import mmap
@ -29,29 +29,27 @@ import struct
import logging import logging
import pathlib import pathlib
import gzip import gzip
from itertools import chain
import numpy # type: ignore import numpy # type: ignore
import klamath import klamath
from klamath import records from klamath import records
from .utils import mangle_name, make_dose_table, dose2dtype, dtype2dose, is_gzipped from .utils import is_gzipped
from .. import Pattern, SubPattern, PatternError, Label, Shape from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
from ..repetition import Grid from ..repetition import Grid
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t from ..utils import layer_t, normalize_mirror, annotations_t
from ..utils import remove_colinear_vertices, normalize_mirror, annotations_t
from ..library import Library from ..library import Library
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
path_cap_map = { path_cap_map = {
0: Path.Cap.Flush, 0: Path.Cap.Flush,
1: Path.Cap.Circle, 1: Path.Cap.Circle,
2: Path.Cap.Square, 2: Path.Cap.Square,
4: Path.Cap.SquareCustom, 4: Path.Cap.SquareCustom,
} }
def write(patterns: Union[Pattern, Sequence[Pattern]], def write(patterns: Union[Pattern, Sequence[Pattern]],
@ -144,15 +142,15 @@ def writefile(patterns: Union[Sequence[Pattern], Pattern],
**kwargs, **kwargs,
) -> None: ) -> None:
""" """
Wrapper for `masque.file.gdsii.write()` that takes a filename or path instead of a stream. Wrapper for `write()` that takes a filename or path instead of a stream.
Will automatically compress the file if it has a .gz suffix. Will automatically compress the file if it has a .gz suffix.
Args: Args:
patterns: `Pattern` or list of patterns to save patterns: `Pattern` or list of patterns to save
filename: Filename to save to. filename: Filename to save to.
*args: passed to `masque.file.gdsii.write` *args: passed to `write()`
**kwargs: passed to `masque.file.gdsii.write` **kwargs: passed to `write()`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if path.suffix == '.gz': if path.suffix == '.gz':
@ -169,14 +167,14 @@ def readfile(filename: Union[str, pathlib.Path],
**kwargs, **kwargs,
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]: ) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
""" """
Wrapper for `masque.file.gdsii.read()` that takes a filename or path instead of a stream. Wrapper for `read()` that takes a filename or path instead of a stream.
Will automatically decompress gzipped files. Will automatically decompress gzipped files.
Args: Args:
filename: Filename to save to. filename: Filename to save to.
*args: passed to `masque.file.gdsii.read` *args: passed to `read()`
**kwargs: passed to `masque.file.gdsii.read` **kwargs: passed to `read()`
""" """
path = pathlib.Path(filename) path = pathlib.Path(filename)
if is_gzipped(path): if is_gzipped(path):
@ -185,7 +183,7 @@ def readfile(filename: Union[str, pathlib.Path],
open_func = open open_func = open
with io.BufferedReader(open_func(path, mode='rb')) as stream: with io.BufferedReader(open_func(path, mode='rb')) as stream:
results = read(stream)#, *args, **kwargs) results = read(stream, *args, **kwargs)
return results return results
@ -216,7 +214,7 @@ def read(stream: BinaryIO,
found_struct = records.BGNSTR.skip_past(stream) found_struct = records.BGNSTR.skip_past(stream)
while found_struct: while found_struct:
name = records.STRNAME.skip_and_read(stream) name = records.STRNAME.skip_and_read(stream)
pat = read_elements(stream, name=name.decode('ASCII')) pat = read_elements(stream, name=name.decode('ASCII'), raw_mode=raw_mode)
patterns.append(pat) patterns.append(pat)
found_struct = records.BGNSTR.skip_past(stream) found_struct = records.BGNSTR.skip_past(stream)
@ -368,10 +366,10 @@ def _subpatterns_to_refs(subpatterns: List[SubPattern]
if isinstance(rep, Grid): if isinstance(rep, Grid):
xy = numpy.array(subpat.offset) + [ xy = numpy.array(subpat.offset) + [
[0, 0], [0, 0],
rep.a_vector * rep.a_count, rep.a_vector * rep.a_count,
rep.b_vector * rep.b_count, rep.b_vector * rep.b_count,
] ]
aref = klamath.library.Reference(struct_name=encoded_name, aref = klamath.library.Reference(struct_name=encoded_name,
xy=numpy.round(xy).astype(int), xy=numpy.round(xy).astype(int),
colrow=(numpy.round(rep.a_count), numpy.round(rep.b_count)), colrow=(numpy.round(rep.a_count), numpy.round(rep.b_count)),
@ -412,7 +410,7 @@ def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -
for key, vals in annotations.items(): for key, vals in annotations.items():
try: try:
i = int(key) i = int(key)
except: except ValueError:
raise PatternError(f'Annotation key {key} is not convertable to an integer') raise PatternError(f'Annotation key {key} is not convertable to an integer')
if not (0 < i < 126): if not (0 < i < 126):
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])') raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
@ -439,7 +437,7 @@ def _shapes_to_elements(shapes: List[Shape],
if isinstance(shape, Path) and not polygonize_paths: if isinstance(shape, Path) and not polygonize_paths:
xy = numpy.round(shape.vertices + shape.offset).astype(int) xy = numpy.round(shape.vertices + shape.offset).astype(int)
width = numpy.round(shape.width).astype(int) width = numpy.round(shape.width).astype(int)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension: Tuple[int, int] extension: Tuple[int, int]
if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None: if shape.cap == Path.Cap.SquareCustom and shape.cap_extensions is not None:
@ -455,13 +453,13 @@ def _shapes_to_elements(shapes: List[Shape],
properties=properties) properties=properties)
elements.append(path) elements.append(path)
elif isinstance(shape, Polygon): elif isinstance(shape, Polygon):
polygon = shape polygon = shape
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int) xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
xy_closed = numpy.vstack((xy_open, xy_open[0, :])) xy_closed = numpy.vstack((xy_open, xy_open[0, :]))
boundary = klamath.elements.Boundary(layer=(layer, data_type), boundary = klamath.elements.Boundary(layer=(layer, data_type),
xy=xy_closed, xy=xy_closed,
properties=properties) properties=properties)
elements.append(boundary) elements.append(boundary)
else: else:
for polygon in shape.to_polygons(): for polygon in shape.to_polygons():
xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int) xy_open = numpy.round(polygon.vertices + polygon.offset).astype(int)
@ -483,7 +481,7 @@ def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]:
xy=xy, xy=xy,
string=label.string.encode('ASCII'), string=label.string.encode('ASCII'),
properties=properties, properties=properties,
presentation=0, #TODO maybe set some of these? presentation=0, # TODO maybe set some of these?
angle_deg=0, angle_deg=0,
invert_y=False, invert_y=False,
width=0, width=0,
@ -496,7 +494,7 @@ def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]:
def disambiguate_pattern_names(patterns: Sequence[Pattern], def disambiguate_pattern_names(patterns: Sequence[Pattern],
max_name_length: int = 32, max_name_length: int = 32,
suffix_length: int = 6, suffix_length: int = 6,
dup_warn_filter: Optional[Callable[[str,], bool]] = None, dup_warn_filter: Optional[Callable[[str], bool]] = None,
): ):
""" """
Args: Args:
@ -513,13 +511,13 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
# Shorten names which already exceed max-length # Shorten names which already exceed max-length
if len(pat.name) > max_name_length: if len(pat.name) > max_name_length:
shortened_name = pat.name[:max_name_length - suffix_length] shortened_name = pat.name[:max_name_length - suffix_length]
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n' + logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
f' shortening to "{shortened_name}" before generating suffix') + f' shortening to "{shortened_name}" before generating suffix')
else: else:
shortened_name = pat.name shortened_name = pat.name
# Remove invalid characters # Remove invalid characters
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name) sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
# Add a suffix that makes the name unique # Add a suffix that makes the name unique
i = 0 i = 0
@ -534,8 +532,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
logger.warning(f'Empty pattern name saved as "{suffixed_name}"') logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name: elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name): if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
f' renaming to "{suffixed_name}"') + f' renaming to "{suffixed_name}"')
# Encode into a byte-string and perform some final checks # Encode into a byte-string and perform some final checks
encoded_name = suffixed_name.encode('ASCII') encoded_name = suffixed_name.encode('ASCII')
@ -543,8 +541,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
# Should never happen since zero-length names are replaced # Should never happen since zero-length names are replaced
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"') raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
if len(encoded_name) > max_name_length: if len(encoded_name) > max_name_length:
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n' + raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
f' originally "{pat.name}"') + f' originally "{pat.name}"')
pat.name = suffixed_name pat.name = suffixed_name
used_names.append(suffixed_name) used_names.append(suffixed_name)
@ -576,7 +574,8 @@ def load_library(stream: BinaryIO,
Additional library info (dict, same format as from `read`). Additional library info (dict, same format as from `read`).
""" """
if is_secondary is None: if is_secondary is None:
is_secondary = lambda k: False def is_secondary(k: str):
return False
stream.seek(0) stream.seek(0)
library_info = _read_header(stream) library_info = _read_header(stream)
@ -592,7 +591,7 @@ def load_library(stream: BinaryIO,
lib.set_value(name, tag, mkstruct, secondary=is_secondary(name)) lib.set_value(name, tag, mkstruct, secondary=is_secondary(name))
return lib return lib, library_info
def load_libraryfile(filename: Union[str, pathlib.Path], def load_libraryfile(filename: Union[str, pathlib.Path],

@ -22,17 +22,15 @@ import pathlib
import gzip import gzip
import numpy # type: ignore import numpy # type: ignore
from numpy import pi
import fatamorgana import fatamorgana
import fatamorgana.records as fatrec import fatamorgana.records as fatrec
from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference
from .utils import mangle_name, make_dose_table, clean_pattern_vertices, is_gzipped from .utils import clean_pattern_vertices, is_gzipped
from .. import Pattern, SubPattern, PatternError, Label, Shape from .. import Pattern, SubPattern, PatternError, Label, Shape
from ..shapes import Polygon, Path, Circle from ..shapes import Polygon, Path, Circle
from ..repetition import Grid, Arbitrary, Repetition from ..repetition import Grid, Arbitrary, Repetition
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t from ..utils import layer_t, normalize_mirror, annotations_t
from ..utils import remove_colinear_vertices, normalize_mirror, annotations_t
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -42,10 +40,10 @@ logger.warning('OASIS support is experimental and mostly untested!')
path_cap_map = { path_cap_map = {
PathExtensionScheme.Flush: Path.Cap.Flush, PathExtensionScheme.Flush: Path.Cap.Flush,
PathExtensionScheme.HalfWidth: Path.Cap.Square, PathExtensionScheme.HalfWidth: Path.Cap.Square,
PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom, PathExtensionScheme.Arbitrary: Path.Cap.SquareCustom,
} }
#TODO implement more shape types? #TODO implement more shape types?
@ -120,11 +118,11 @@ def build(patterns: Union[Pattern, Sequence[Pattern]],
for name, layer_num in layer_map.items(): for name, layer_num in layer_map.items():
layer, data_type = _mlayer2oas(layer_num) layer, data_type = _mlayer2oas(layer_num)
lib.layers += [ lib.layers += [
fatrec.LayerName(nstring=name, fatrec.LayerName(nstring=name,
layer_interval=(layer, layer), layer_interval=(layer, layer),
type_interval=(data_type, data_type), type_interval=(data_type, data_type),
is_textlayer=tt) is_textlayer=tt)
for tt in (True, False)] for tt in (True, False)]
def layer2oas(mlayer: layer_t) -> Tuple[int, int]: def layer2oas(mlayer: layer_t) -> Tuple[int, int]:
assert(layer_map is not None) assert(layer_map is not None)
@ -252,9 +250,9 @@ def read(stream: io.BufferedIOBase,
lib = fatamorgana.OasisLayout.read(stream) lib = fatamorgana.OasisLayout.read(stream)
library_info: Dict[str, Any] = { library_info: Dict[str, Any] = {
'units_per_micrometer': lib.unit, 'units_per_micrometer': lib.unit,
'annotations': properties_to_annotations(lib.properties, lib.propnames, lib.propstrings), 'annotations': properties_to_annotations(lib.properties, lib.propnames, lib.propstrings),
} }
layer_map = {} layer_map = {}
for layer_name in lib.layers: for layer_name in lib.layers:
@ -296,7 +294,7 @@ def read(stream: io.BufferedIOBase,
cap_start = path_cap_map[element.get_extension_start()[0]] cap_start = path_cap_map[element.get_extension_start()[0]]
cap_end = path_cap_map[element.get_extension_end()[0]] cap_end = path_cap_map[element.get_extension_end()[0]]
if cap_start != cap_end: if cap_start != cap_end:
raise Exception('masque does not support multiple cap types on a single path.') #TODO handle multiple cap types raise Exception('masque does not support multiple cap types on a single path.') # TODO handle multiple cap types
cap = cap_start cap = cap_start
path_args: Dict[str, Any] = {} path_args: Dict[str, Any] = {}
@ -472,7 +470,7 @@ def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]:
data_type = 0 data_type = 0
else: else:
raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be ' raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be '
'strings unless a layer map is provided.') f'strings unless a layer map is provided.')
return layer, data_type return layer, data_type
@ -490,7 +488,7 @@ def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayo
subpat = SubPattern(offset=xy, subpat = SubPattern(offset=xy,
pattern=None, pattern=None,
mirrored=(placement.flip, False), mirrored=(placement.flip, False),
rotation=float(placement.angle * pi/180), rotation=numpy.deg2rad(placement.angle),
scale=float(mag), scale=float(mag),
identifier=(name,), identifier=(name,),
repetition=repetition_fata2masq(placement.repetition), repetition=repetition_fata2masq(placement.repetition),
@ -512,14 +510,14 @@ def _subpatterns_to_placements(subpatterns: List[SubPattern]
offset = numpy.round(subpat.offset + rep_offset).astype(int) offset = numpy.round(subpat.offset + rep_offset).astype(int)
angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360 angle = numpy.rad2deg(subpat.rotation + extra_angle) % 360
ref = fatrec.Placement( ref = fatrec.Placement(
name=subpat.pattern.name, name=subpat.pattern.name,
flip=mirror_across_x, flip=mirror_across_x,
angle=angle, angle=angle,
magnification=subpat.scale, magnification=subpat.scale,
properties=annotations_to_properties(subpat.annotations), properties=annotations_to_properties(subpat.annotations),
x=offset[0], x=offset[0],
y=offset[1], y=offset[1],
repetition=frep) repetition=frep)
refs.append(ref) refs.append(ref)
return refs return refs
@ -549,7 +547,7 @@ def _shapes_to_elements(shapes: List[Shape],
xy = numpy.round(shape.offset + shape.vertices[0] + rep_offset).astype(int) xy = numpy.round(shape.offset + shape.vertices[0] + rep_offset).astype(int)
deltas = numpy.round(numpy.diff(shape.vertices, axis=0)).astype(int) deltas = numpy.round(numpy.diff(shape.vertices, axis=0)).astype(int)
half_width = numpy.round(shape.width / 2).astype(int) half_width = numpy.round(shape.width / 2).astype(int)
path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) #reverse lookup path_type = next(k for k, v in path_cap_map.items() if v == shape.cap) # reverse lookup
extension_start = (path_type, shape.cap_extensions[0] if shape.cap_extensions is not None else None) extension_start = (path_type, shape.cap_extensions[0] if shape.cap_extensions is not None else None)
extension_end = (path_type, shape.cap_extensions[1] if shape.cap_extensions is not None else None) extension_end = (path_type, shape.cap_extensions[1] if shape.cap_extensions is not None else None)
path = fatrec.Path(layer=layer, path = fatrec.Path(layer=layer,
@ -558,7 +556,7 @@ def _shapes_to_elements(shapes: List[Shape],
half_width=half_width, half_width=half_width,
x=xy[0], x=xy[0],
y=xy[1], y=xy[1],
extension_start=extension_start, #TODO implement multiple cap types? extension_start=extension_start, # TODO implement multiple cap types?
extension_end=extension_end, extension_end=extension_end,
properties=properties, properties=properties,
repetition=repetition, repetition=repetition,
@ -598,11 +596,11 @@ def _labels_to_texts(labels: List[Label],
def disambiguate_pattern_names(patterns, def disambiguate_pattern_names(patterns,
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
): ):
used_names = [] used_names = []
for pat in patterns: for pat in patterns:
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name) sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
i = 0 i = 0
suffixed_name = sanitized_name suffixed_name = sanitized_name
@ -616,8 +614,8 @@ def disambiguate_pattern_names(patterns,
logger.warning(f'Empty pattern name saved as "{suffixed_name}"') logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
elif suffixed_name != sanitized_name: elif suffixed_name != sanitized_name:
if dup_warn_filter is None or dup_warn_filter(pat.name): if dup_warn_filter is None or dup_warn_filter(pat.name):
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' + logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
f' renaming to "{suffixed_name}"') + f' renaming to "{suffixed_name}"')
if len(suffixed_name) == 0: if len(suffixed_name) == 0:
# Should never happen since zero-length names are replaced # Should never happen since zero-length names are replaced
@ -653,10 +651,10 @@ def repetition_masq2fata(rep: Optional[Repetition]
frep: Union[fatamorgana.GridRepetition, fatamorgana.ArbitraryRepetition, None] frep: Union[fatamorgana.GridRepetition, fatamorgana.ArbitraryRepetition, None]
if isinstance(rep, Grid): if isinstance(rep, Grid):
frep = fatamorgana.GridRepetition( frep = fatamorgana.GridRepetition(
a_vector=numpy.round(rep.a_vector).astype(int), a_vector=numpy.round(rep.a_vector).astype(int),
b_vector=numpy.round(rep.b_vector).astype(int), b_vector=numpy.round(rep.b_vector).astype(int),
a_count=numpy.round(rep.a_count).astype(int), a_count=numpy.round(rep.a_count).astype(int),
b_count=numpy.round(rep.b_count).astype(int)) b_count=numpy.round(rep.b_count).astype(int))
offset = (0, 0) offset = (0, 0)
elif isinstance(rep, Arbitrary): elif isinstance(rep, Arbitrary):
diffs = numpy.diff(rep.displacements, axis=0) diffs = numpy.diff(rep.displacements, axis=0)

@ -13,7 +13,8 @@ from .. import Pattern
def writefile(pattern: Pattern, def writefile(pattern: Pattern,
filename: str, filename: str,
custom_attributes: bool=False): custom_attributes: bool = False,
) -> None:
""" """
Write a Pattern to an SVG file, by first calling .polygonize() on it Write a Pattern to an SVG file, by first calling .polygonize() on it
to change the shapes into polygons, and then writing patterns as SVG to change the shapes into polygons, and then writing patterns as SVG

@ -4,14 +4,13 @@ Helper functions for file reading and writing
from typing import Set, Tuple, List from typing import Set, Tuple, List
import re import re
import copy import copy
import gzip
import pathlib import pathlib
from .. import Pattern, PatternError from .. import Pattern, PatternError
from ..shapes import Polygon, Path from ..shapes import Polygon, Path
def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str: def mangle_name(pattern: Pattern, dose_multiplier: float = 1.0) -> str:
""" """
Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier. Create a name using `pattern.name`, `id(pattern)`, and the dose multiplier.
@ -22,7 +21,7 @@ def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str:
Returns: Returns:
Mangled name. Mangled name.
""" """
expression = re.compile('[^A-Za-z0-9_\?\$]') expression = re.compile(r'[^A-Za-z0-9_\?\$]')
full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern)) full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern))
sanitized_name = expression.sub('_', full_name) sanitized_name = expression.sub('_', full_name)
return sanitized_name return sanitized_name
@ -52,7 +51,7 @@ def clean_pattern_vertices(pat: Pattern) -> Pattern:
return pat return pat
def make_dose_table(patterns: List[Pattern], dose_multiplier: float=1.0) -> Set[Tuple[int, float]]: def make_dose_table(patterns: List[Pattern], dose_multiplier: float = 1.0) -> Set[Tuple[int, float]]:
""" """
Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns) Create a set containing `(id(pat), written_dose)` for each pattern (including subpatterns)
@ -144,14 +143,14 @@ def dose2dtype(patterns: List[Pattern],
# Create a new pattern for each non-1-dose entry in the dose table # Create a new pattern for each non-1-dose entry in the dose table
# and update the shapes to reflect their new dose # and update the shapes to reflect their new dose
new_pats = {} # (id, dose) -> new_pattern mapping new_pats = {} # (id, dose) -> new_pattern mapping
for pat_id, pat_dose in sd_table: for pat_id, pat_dose in sd_table:
if pat_dose == 1: if pat_dose == 1:
new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id] new_pats[(pat_id, pat_dose)] = patterns_by_id[pat_id]
continue continue
old_pat = patterns_by_id[pat_id] old_pat = patterns_by_id[pat_id]
pat = old_pat.copy() # keep old subpatterns pat = old_pat.copy() # keep old subpatterns
pat.shapes = copy.deepcopy(old_pat.shapes) pat.shapes = copy.deepcopy(old_pat.shapes)
pat.labels = copy.deepcopy(old_pat.labels) pat.labels = copy.deepcopy(old_pat.labels)

@ -1,10 +1,8 @@
from typing import List, Tuple, Dict, Optional from typing import Tuple, Dict, Optional
import copy import copy
import numpy # type: ignore import numpy # type: ignore
from numpy import pi
from .repetition import Repetition from .repetition import Repetition
from .error import PatternError, PatternLockedError
from .utils import vector2, rotation_matrix_2d, layer_t, AutoSlots, annotations_t from .utils import vector2, rotation_matrix_2d, layer_t, AutoSlots, annotations_t
from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl
from .traits import AnnotatableImpl from .traits import AnnotatableImpl
@ -63,7 +61,7 @@ class Label(PositionableImpl, LayerableImpl, LockableImpl, RepeatableImpl, Annot
repetition=self.repetition, repetition=self.repetition,
locked=self.locked) locked=self.locked)
def __deepcopy__(self, memo: Dict = None) -> 'Label': def __deepcopy__(self, memo: Dict = None) -> 'Label':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()

@ -2,12 +2,11 @@
Library class for managing unique name->pattern mappings and Library class for managing unique name->pattern mappings and
deferred loading or creation. deferred loading or creation.
""" """
from typing import Dict, Callable, TypeVar, Generic, TYPE_CHECKING from typing import Dict, Callable, TypeVar, TYPE_CHECKING
from typing import Any, Tuple, Union, Iterator from typing import Any, Tuple, Union, Iterator
import logging import logging
from pprint import pformat from pprint import pformat
from dataclasses import dataclass from dataclasses import dataclass
from functools import lru_cache
from ..error import LibraryError from ..error import LibraryError
@ -133,13 +132,13 @@ class Library:
return pat return pat
def keys(self) -> Iterator[str]: def keys(self) -> Iterator[str]:
return self.primary.keys() return iter(self.primary.keys())
def values(self) -> Iterator['Pattern']: def values(self) -> Iterator['Pattern']:
return (self[key] for key in self.keys()) return iter(self[key] for key in self.keys())
def items(self) -> Iterator[Tuple[str, 'Pattern']]: def items(self) -> Iterator[Tuple[str, 'Pattern']]:
return ((key, self[key]) for key in self.keys()) return iter((key, self[key]) for key in self.keys())
def __repr__(self) -> str: def __repr__(self) -> str:
return '<Library with keys ' + repr(list(self.primary.keys())) + '>' return '<Library with keys ' + repr(list(self.primary.keys())) + '>'
@ -191,7 +190,7 @@ class Library:
for key in self.primary: for key in self.primary:
_ = self.get_primary(key) _ = self.get_primary(key)
for key2 in self.secondary: for key2 in self.secondary:
_ = self.get_secondary(key2) _ = self.get_secondary(*key2)
return self return self
def add(self, other: 'Library') -> 'Library': def add(self, other: 'Library') -> 'Library':

@ -93,7 +93,7 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
raise PatternLockedError() raise PatternLockedError()
object.__setattr__(self, name, value) object.__setattr__(self, name, value)
def __copy__(self, memo: Dict = None) -> 'Pattern': def __copy__(self, memo: Dict = None) -> 'Pattern':
return Pattern(name=self.name, return Pattern(name=self.name,
shapes=copy.deepcopy(self.shapes), shapes=copy.deepcopy(self.shapes),
labels=copy.deepcopy(self.labels), labels=copy.deepcopy(self.labels),
@ -101,14 +101,15 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
annotations=copy.deepcopy(self.annotations), annotations=copy.deepcopy(self.annotations),
locked=self.locked) locked=self.locked)
def __deepcopy__(self, memo: Dict = None) -> 'Pattern': def __deepcopy__(self, memo: Dict = None) -> 'Pattern':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = Pattern(name=self.name, new = Pattern(
shapes=copy.deepcopy(self.shapes, memo), name=self.name,
labels=copy.deepcopy(self.labels, memo), shapes=copy.deepcopy(self.shapes, memo),
subpatterns=copy.deepcopy(self.subpatterns, memo), labels=copy.deepcopy(self.labels, memo),
annotations=copy.deepcopy(self.annotations, memo), subpatterns=copy.deepcopy(self.subpatterns, memo),
locked=self.locked) annotations=copy.deepcopy(self.annotations, memo),
locked=self.locked)
return new return new
def rename(self, name: str) -> 'Pattern': def rename(self, name: str) -> 'Pattern':
@ -281,7 +282,7 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
if transform is not False: if transform is not False:
sign = numpy.ones(2) sign = numpy.ones(2)
if transform[3]: if transform[3]:
sign[1] = -1 sign[1] = -1
xy = numpy.dot(rotation_matrix_2d(transform[2]), subpattern.offset * sign) xy = numpy.dot(rotation_matrix_2d(transform[2]), subpattern.offset * sign)
mirror_x, angle = normalize_mirror(subpattern.mirrored) mirror_x, angle = normalize_mirror(subpattern.mirrored)
angle += subpattern.rotation angle += subpattern.rotation
@ -325,8 +326,8 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
""" """
old_shapes = self.shapes old_shapes = self.shapes
self.shapes = list(chain.from_iterable( self.shapes = list(chain.from_iterable(
(shape.to_polygons(poly_num_points, poly_max_arclen) (shape.to_polygons(poly_num_points, poly_max_arclen)
for shape in old_shapes))) for shape in old_shapes)))
for subpat in self.subpatterns: for subpat in self.subpatterns:
if subpat.pattern is not None: if subpat.pattern is not None:
subpat.pattern.polygonize(poly_num_points, poly_max_arclen) subpat.pattern.polygonize(poly_num_points, poly_max_arclen)
@ -351,7 +352,7 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
self.polygonize().flatten() self.polygonize().flatten()
old_shapes = self.shapes old_shapes = self.shapes
self.shapes = list(chain.from_iterable( self.shapes = list(chain.from_iterable(
(shape.manhattanize(grid_x, grid_y) for shape in old_shapes))) (shape.manhattanize(grid_x, grid_y) for shape in old_shapes)))
return self return self
def subpatternize(self, def subpatternize(self,
@ -518,7 +519,6 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
ids.update(pat.subpatterns_by_id(include_none=include_none)) ids.update(pat.subpatterns_by_id(include_none=include_none))
return dict(ids) return dict(ids)
def get_bounds(self) -> Union[numpy.ndarray, None]: def get_bounds(self) -> Union[numpy.ndarray, None]:
""" """
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
@ -625,7 +625,6 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
return self return self
def translate_elements(self, offset: vector2) -> 'Pattern': def translate_elements(self, offset: vector2) -> 'Pattern':
""" """
Translates all shapes, label, and subpatterns by the given offset. Translates all shapes, label, and subpatterns by the given offset.
@ -805,9 +804,9 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
Returns: Returns:
True if the pattern is contains no shapes, labels, or subpatterns. True if the pattern is contains no shapes, labels, or subpatterns.
""" """
return (len(self.subpatterns) == 0 and return (len(self.subpatterns) == 0
len(self.shapes) == 0 and and len(self.shapes) == 0
len(self.labels) == 0) and len(self.labels) == 0)
def lock(self) -> 'Pattern': def lock(self) -> 'Pattern':
""" """

@ -3,13 +3,13 @@
instances of an object . instances of an object .
""" """
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any from typing import Union, Dict, Optional, Sequence, Any
import copy import copy
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import numpy # type: ignore import numpy # type: ignore
from .error import PatternError, PatternLockedError from .error import PatternError
from .utils import rotation_matrix_2d, vector2, AutoSlots from .utils import rotation_matrix_2d, vector2, AutoSlots
from .traits import LockableImpl, Copyable, Scalable, Rotatable, Mirrorable from .traits import LockableImpl, Copyable, Scalable, Rotatable, Mirrorable
@ -103,7 +103,7 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
self.b_count = b_count self.b_count = b_count
self.locked = locked self.locked = locked
def __copy__(self) -> 'Grid': def __copy__(self) -> 'Grid':
new = Grid(a_vector=self.a_vector.copy(), new = Grid(a_vector=self.a_vector.copy(),
b_vector=copy.copy(self.b_vector), b_vector=copy.copy(self.b_vector),
a_count=self.a_count, a_count=self.a_count,
@ -111,7 +111,7 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
locked=self.locked) locked=self.locked)
return new return new
def __deepcopy__(self, memo: Dict = None) -> 'Grid': def __deepcopy__(self, memo: Dict = None) -> 'Grid':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new.locked = self.locked new.locked = self.locked
@ -170,8 +170,8 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
@property @property
def displacements(self) -> numpy.ndarray: def displacements(self) -> numpy.ndarray:
aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij') aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij')
return (aa.flatten()[:, None] * self.a_vector[None, :] + return (aa.flatten()[:, None] * self.a_vector[None, :]
bb.flatten()[:, None] * self.b_vector[None, :]) + bb.flatten()[:, None] * self.b_vector[None, :]) # noqa
def rotate(self, rotation: float) -> 'Grid': def rotate(self, rotation: float) -> 'Grid':
""" """
@ -199,9 +199,9 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
Returns: Returns:
self self
""" """
self.a_vector[1-axis] *= -1 self.a_vector[1 - axis] *= -1
if self.b_vector is not None: if self.b_vector is not None:
self.b_vector[1-axis] *= -1 self.b_vector[1 - axis] *= -1
return self return self
def get_bounds(self) -> Optional[numpy.ndarray]: def get_bounds(self) -> Optional[numpy.ndarray]:
@ -377,7 +377,7 @@ class Arbitrary(LockableImpl, Repetition, metaclass=AutoSlots):
Returns: Returns:
self self
""" """
self.displacements[1-axis] *= -1 self.displacements[1 - axis] *= -1
return self return self
def get_bounds(self) -> Optional[numpy.ndarray]: def get_bounds(self) -> Optional[numpy.ndarray]:

@ -1,4 +1,4 @@
from typing import List, Tuple, Dict, Optional, Sequence from typing import List, Dict, Optional, Sequence
import copy import copy
import math import math
@ -81,7 +81,7 @@ class Arc(Shape, metaclass=AutoSlots):
# arc start/stop angle properties # arc start/stop angle properties
@property @property
def angles(self) -> numpy.ndarray: #ndarray[float] def angles(self) -> numpy.ndarray:
""" """
Return the start and stop angles `[a_start, a_stop]`. Return the start and stop angles `[a_start, a_stop]`.
Angles are measured from x-axis after rotation Angles are measured from x-axis after rotation
@ -194,7 +194,7 @@ class Arc(Shape, metaclass=AutoSlots):
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked) self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Arc': def __deepcopy__(self, memo: Dict = None) -> 'Arc':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
@ -214,8 +214,8 @@ class Arc(Shape, metaclass=AutoSlots):
poly_max_arclen = self.poly_max_arclen poly_max_arclen = self.poly_max_arclen
if (poly_num_points is None) and (poly_max_arclen is None): if (poly_num_points is None) and (poly_max_arclen is None):
raise PatternError('Max number of points and arclength left unspecified' + raise PatternError('Max number of points and arclength left unspecified'
' (default was also overridden)') + ' (default was also overridden)')
r0, r1 = self.radii r0, r1 = self.radii
@ -273,7 +273,7 @@ class Arc(Shape, metaclass=AutoSlots):
mins = [] mins = []
maxs = [] maxs = []
for a, sgn in zip(a_ranges, (-1, +1)): for a, sgn in zip(a_ranges, (-1, +1)):
wh = sgn * self.width/2 wh = sgn * self.width / 2
rx = self.radius_x + wh rx = self.radius_x + wh
ry = self.radius_y + wh ry = self.radius_y + wh
@ -287,7 +287,7 @@ class Arc(Shape, metaclass=AutoSlots):
# Cutoff angles # Cutoff angles
xpt = (-self.rotation) % (2 * pi) + a0_offset xpt = (-self.rotation) % (2 * pi) + a0_offset
ypt = (pi/2 - self.rotation) % (2 * pi) + a0_offset ypt = (pi / 2 - self.rotation) % (2 * pi) + a0_offset
xnt = (xpt - pi) % (2 * pi) + a0_offset xnt = (xpt - pi) % (2 * pi) + a0_offset
ynt = (ypt - pi) % (2 * pi) + a0_offset ynt = (ypt - pi) % (2 * pi) + a0_offset
@ -356,9 +356,9 @@ class Arc(Shape, metaclass=AutoSlots):
rotation %= 2 * pi rotation %= 2 * pi
width = self.width width = self.width
return (type(self), radii, angles, width/norm_value, self.layer), \ return ((type(self), radii, angles, width / norm_value, self.layer),
(self.offset, scale/norm_value, rotation, False, self.dose), \ (self.offset, scale / norm_value, rotation, False, self.dose),
lambda: Arc(radii=radii*norm_value, angles=angles, width=width*norm_value, layer=self.layer) lambda: Arc(radii=radii * norm_value, angles=angles, width=width * norm_value, layer=self.layer))
def get_cap_edges(self) -> numpy.ndarray: def get_cap_edges(self) -> numpy.ndarray:
''' '''
@ -373,7 +373,7 @@ class Arc(Shape, metaclass=AutoSlots):
mins = [] mins = []
maxs = [] maxs = []
for a, sgn in zip(a_ranges, (-1, +1)): for a, sgn in zip(a_ranges, (-1, +1)):
wh = sgn * self.width/2 wh = sgn * self.width / 2
rx = self.radius_x + wh rx = self.radius_x + wh
ry = self.radius_y + wh ry = self.radius_y + wh
@ -388,7 +388,7 @@ class Arc(Shape, metaclass=AutoSlots):
mins.append([xn, yn]) mins.append([xn, yn])
maxs.append([xp, yp]) maxs.append([xp, yp])
return numpy.array([mins, maxs]) + self.offset return numpy.array([mins, maxs]) + self.offset
def _angles_to_parameters(self) -> numpy.ndarray: def _angles_to_parameters(self) -> numpy.ndarray:
''' '''
@ -398,12 +398,12 @@ class Arc(Shape, metaclass=AutoSlots):
''' '''
a = [] a = []
for sgn in (-1, +1): for sgn in (-1, +1):
wh = sgn * self.width/2 wh = sgn * self.width / 2
rx = self.radius_x + wh rx = self.radius_x + wh
ry = self.radius_y + wh ry = self.radius_y + wh
# create paremeter 'a' for parametrized ellipse # create paremeter 'a' for parametrized ellipse
a0, a1 = (numpy.arctan2(rx*numpy.sin(a), ry*numpy.cos(a)) for a in self.angles) a0, a1 = (numpy.arctan2(rx * numpy.sin(a), ry * numpy.cos(a)) for a in self.angles)
sign = numpy.sign(self.angles[1] - self.angles[0]) sign = numpy.sign(self.angles[1] - self.angles[0])
if sign != numpy.sign(a1 - a0): if sign != numpy.sign(a1 - a0):
a1 += sign * 2 * pi a1 += sign * 2 * pi
@ -424,8 +424,8 @@ class Arc(Shape, metaclass=AutoSlots):
return self return self
def __repr__(self) -> str: def __repr__(self) -> str:
angles = f'{self.angles*180/pi}' angles = f'{numpy.rad2deg(self.angles)}'
rotation = f'{self.rotation*180/pi:g}' if self.rotation != 0 else '' rotation = f'{numpy.rad2deg(self.rotation):g}' if self.rotation != 0 else ''
dose = f' d{self.dose:g}' if self.dose != 1 else '' dose = f' d{self.dose:g}' if self.dose != 1 else ''
locked = ' L' if self.locked else '' locked = ' L' if self.locked else ''
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>' return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>'

@ -75,7 +75,7 @@ class Circle(Shape, metaclass=AutoSlots):
self.poly_max_arclen = poly_max_arclen self.poly_max_arclen = poly_max_arclen
self.set_locked(locked) self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Circle': def __deepcopy__(self, memo: Dict = None) -> 'Circle':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
@ -127,9 +127,9 @@ class Circle(Shape, metaclass=AutoSlots):
def normalized_form(self, norm_value) -> normalized_shape_tuple: def normalized_form(self, norm_value) -> normalized_shape_tuple:
rotation = 0.0 rotation = 0.0
magnitude = self.radius / norm_value magnitude = self.radius / norm_value
return (type(self), self.layer), \ return ((type(self), self.layer),
(self.offset, magnitude, rotation, False, self.dose), \ (self.offset, magnitude, rotation, False, self.dose),
lambda: Circle(radius=norm_value, layer=self.layer) lambda: Circle(radius=norm_value, layer=self.layer))
def __repr__(self) -> str: def __repr__(self) -> str:
dose = f' d{self.dose:g}' if self.dose != 1 else '' dose = f' d{self.dose:g}' if self.dose != 1 else ''

@ -1,4 +1,4 @@
from typing import List, Tuple, Dict, Sequence, Optional from typing import List, Dict, Sequence, Optional
import copy import copy
import math import math
@ -125,7 +125,7 @@ class Ellipse(Shape, metaclass=AutoSlots):
self.poly_max_arclen = poly_max_arclen self.poly_max_arclen = poly_max_arclen
self.set_locked(locked) self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Ellipse': def __deepcopy__(self, memo: Dict = None) -> 'Ellipse':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
@ -198,9 +198,9 @@ class Ellipse(Shape, metaclass=AutoSlots):
radii = self.radii[::-1] / self.radius_y radii = self.radii[::-1] / self.radius_y
scale = self.radius_y scale = self.radius_y
angle = (self.rotation + pi / 2) % pi angle = (self.rotation + pi / 2) % pi
return (type(self), radii, self.layer), \ return ((type(self), radii, self.layer),
(self.offset, scale/norm_value, angle, False, self.dose), \ (self.offset, scale / norm_value, angle, False, self.dose),
lambda: Ellipse(radii=radii*norm_value, layer=self.layer) lambda: Ellipse(radii=radii * norm_value, layer=self.layer))
def lock(self) -> 'Ellipse': def lock(self) -> 'Ellipse':
self.radii.flags.writeable = False self.radii.flags.writeable = False

@ -18,7 +18,7 @@ class PathCap(Enum):
Circle = 1 # Path extends past final vertices with a semicircle of radius width/2 Circle = 1 # Path extends past final vertices with a semicircle of radius width/2
Square = 2 # Path extends past final vertices with a width-by-width/2 rectangle Square = 2 # Path extends past final vertices with a width-by-width/2 rectangle
SquareCustom = 4 # Path extends past final vertices with a rectangle of length SquareCustom = 4 # Path extends past final vertices with a rectangle of length
# defined by path.cap_extensions # # defined by path.cap_extensions
class Path(Shape, metaclass=AutoSlots): class Path(Shape, metaclass=AutoSlots):
@ -103,7 +103,7 @@ class Path(Shape, metaclass=AutoSlots):
@vertices.setter @vertices.setter
def vertices(self, val: numpy.ndarray): def vertices(self, val: numpy.ndarray):
val = numpy.array(val, dtype=float) #TODO document that these might not be copied val = numpy.array(val, dtype=float) # TODO document that these might not be copied
if len(val.shape) < 2 or val.shape[1] != 2: if len(val.shape) < 2 or val.shape[1] != 2:
raise PatternError('Vertices must be an Nx2 array') raise PatternError('Vertices must be an Nx2 array')
if val.shape[0] < 2: if val.shape[0] < 2:
@ -184,7 +184,7 @@ class Path(Shape, metaclass=AutoSlots):
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked) self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Path': def __deepcopy__(self, memo: Dict = None) -> 'Path':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
@ -199,7 +199,7 @@ class Path(Shape, metaclass=AutoSlots):
def travel(travel_pairs: Tuple[Tuple[float, float]], def travel(travel_pairs: Tuple[Tuple[float, float]],
width: float = 0.0, width: float = 0.0,
cap: PathCap = PathCap.Flush, cap: PathCap = PathCap.Flush,
cap_extensions = None, cap_extensions: Optional[Tuple[float, float]] = None,
offset: vector2 = (0.0, 0.0), offset: vector2 = (0.0, 0.0),
rotation: float = 0, rotation: float = 0,
mirrored: Sequence[bool] = (False, False), mirrored: Sequence[bool] = (False, False),
@ -275,9 +275,9 @@ class Path(Shape, metaclass=AutoSlots):
intersection_p = v[:-2] + rp * dv[:-1] + perp[:-1] intersection_p = v[:-2] + rp * dv[:-1] + perp[:-1]
intersection_n = v[:-2] + rn * dv[:-1] - perp[:-1] intersection_n = v[:-2] + rn * dv[:-1] - perp[:-1]
towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp? towards_perp = (dv[1:] * perp[:-1]).sum(axis=1) > 0 # path bends towards previous perp?
# straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight # straight = (dv[1:] * perp[:-1]).sum(axis=1) == 0 # path is straight
acute = (dv[1:] * dv[:-1]).sum(axis=1) < 0 # angle is acute? acute = (dv[1:] * dv[:-1]).sum(axis=1) < 0 # angle is acute?
# Build vertices # Build vertices
o0 = [v[0] + perp[0]] o0 = [v[0] + perp[0]]
@ -370,10 +370,10 @@ class Path(Shape, metaclass=AutoSlots):
width0 = self.width / norm_value width0 = self.width / norm_value
return (type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer), \ return ((type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer),
(offset, scale/norm_value, rotation, False, self.dose), \ (offset, scale / norm_value, rotation, False, self.dose),
lambda: Path(reordered_vertices*norm_value, width=self.width*norm_value, lambda: Path(reordered_vertices * norm_value, width=self.width * norm_value,
cap=self.cap, layer=self.layer) cap=self.cap, layer=self.layer))
def clean_vertices(self) -> 'Path': def clean_vertices(self) -> 'Path':
""" """
@ -409,7 +409,7 @@ class Path(Shape, metaclass=AutoSlots):
if self.cap == PathCap.Square: if self.cap == PathCap.Square:
extensions = numpy.full(2, self.width / 2) extensions = numpy.full(2, self.width / 2)
elif self.cap == PathCap.SquareCustom: elif self.cap == PathCap.SquareCustom:
extensions = self.cap_extensions extensions = self.cap_extensions
else: else:
# Flush or Circle # Flush or Circle
extensions = numpy.zeros(2) extensions = numpy.zeros(2)

@ -1,4 +1,4 @@
from typing import List, Tuple, Dict, Optional, Sequence from typing import List, Dict, Optional, Sequence
import copy import copy
import numpy # type: ignore import numpy # type: ignore
@ -34,7 +34,7 @@ class Polygon(Shape, metaclass=AutoSlots):
@vertices.setter @vertices.setter
def vertices(self, val: numpy.ndarray): def vertices(self, val: numpy.ndarray):
val = numpy.array(val, dtype=float) #TODO document that these might not be copied val = numpy.array(val, dtype=float) # TODO document that these might not be copied
if len(val.shape) < 2 or val.shape[1] != 2: if len(val.shape) < 2 or val.shape[1] != 2:
raise PatternError('Vertices must be an Nx2 array') raise PatternError('Vertices must be an Nx2 array')
if val.shape[0] < 3: if val.shape[0] < 3:
@ -104,7 +104,7 @@ class Polygon(Shape, metaclass=AutoSlots):
[self.mirror(a) for a, do in enumerate(mirrored) if do] [self.mirror(a) for a, do in enumerate(mirrored) if do]
self.set_locked(locked) self.set_locked(locked)
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Polygon': def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Polygon':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
@ -269,7 +269,6 @@ class Polygon(Shape, metaclass=AutoSlots):
layer=layer, dose=dose) layer=layer, dose=dose)
return poly return poly
def to_polygons(self, def to_polygons(self,
poly_num_points: int = None, # unused poly_num_points: int = None, # unused
poly_max_arclen: float = None, # unused poly_max_arclen: float = None, # unused
@ -316,9 +315,9 @@ class Polygon(Shape, metaclass=AutoSlots):
# TODO: normalize mirroring? # TODO: normalize mirroring?
return (type(self), reordered_vertices.data.tobytes(), self.layer), \ return ((type(self), reordered_vertices.data.tobytes(), self.layer),
(offset, scale/norm_value, rotation, False, self.dose), \ (offset, scale / norm_value, rotation, False, self.dose),
lambda: Polygon(reordered_vertices*norm_value, layer=self.layer) lambda: Polygon(reordered_vertices * norm_value, layer=self.layer))
def clean_vertices(self) -> 'Polygon': def clean_vertices(self) -> 'Polygon':
""" """

@ -1,11 +1,8 @@
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
import numpy # type: ignore import numpy # type: ignore
from ..error import PatternError, PatternLockedError
from ..utils import rotation_matrix_2d, vector2, layer_t
from ..traits import (PositionableImpl, LayerableImpl, DoseableImpl, from ..traits import (PositionableImpl, LayerableImpl, DoseableImpl,
Rotatable, Mirrorable, Copyable, Scalable, Rotatable, Mirrorable, Copyable, Scalable,
PivotableImpl, LockableImpl, RepeatableImpl, PivotableImpl, LockableImpl, RepeatableImpl,
@ -142,7 +139,6 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
if err_xmax >= 0.5: if err_xmax >= 0.5:
gxi_max += 1 gxi_max += 1
if abs(dv[0]) < 1e-20: if abs(dv[0]) < 1e-20:
# Vertical line, don't calculate slope # Vertical line, don't calculate slope
xi = [gxi_min, gxi_max - 1] xi = [gxi_min, gxi_max - 1]
@ -155,8 +151,9 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
vertex_lists.append(segment) vertex_lists.append(segment)
continue continue
m = dv[1]/dv[0] m = dv[1] / dv[0]
def get_grid_inds(xes):
def get_grid_inds(xes: numpy.ndarray) -> numpy.ndarray:
ys = m * (xes - v[0]) + v[1] ys = m * (xes - v[0]) + v[1]
# (inds - 1) is the index of the y-grid line below the edge's intersection with the x-grid # (inds - 1) is the index of the y-grid line below the edge's intersection with the x-grid
@ -178,7 +175,7 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
xs2 = (xs[:-1] + xs[1:]) / 2 xs2 = (xs[:-1] + xs[1:]) / 2
inds2 = get_grid_inds(xs2) inds2 = get_grid_inds(xs2)
xinds = numpy.round(numpy.arange(gxi_min, gxi_max - 0.99, 1/3)).astype(int) xinds = numpy.round(numpy.arange(gxi_min, gxi_max - 0.99, 1 / 3)).astype(int)
# interleave the results # interleave the results
yinds = xinds.copy() yinds = xinds.copy()
@ -202,7 +199,6 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
return manhattan_polygons return manhattan_polygons
def manhattanize(self, def manhattanize(self,
grid_x: numpy.ndarray, grid_x: numpy.ndarray,
grid_y: numpy.ndarray grid_y: numpy.ndarray

@ -1,4 +1,4 @@
from typing import List, Tuple, Dict, Sequence, Optional, MutableSequence from typing import List, Tuple, Dict, Sequence, Optional
import copy import copy
import numpy # type: ignore import numpy # type: ignore
@ -26,7 +26,7 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
_string: str _string: str
_height: float _height: float
_mirrored: numpy.ndarray #ndarray[bool] _mirrored: numpy.ndarray # ndarray[bool]
font_path: str font_path: str
# vertices property # vertices property
@ -51,7 +51,7 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
# Mirrored property # Mirrored property
@property @property
def mirrored(self) -> numpy.ndarray: #ndarray[bool] def mirrored(self) -> numpy.ndarray: # ndarray[bool]
return self._mirrored return self._mirrored
@mirrored.setter @mirrored.setter
@ -100,7 +100,7 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
self.font_path = font_path self.font_path = font_path
self.set_locked(locked) self.set_locked(locked)
def __deepcopy__(self, memo: Dict = None) -> 'Text': def __deepcopy__(self, memo: Dict = None) -> 'Text':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new._offset = self._offset.copy() new._offset = self._offset.copy()
@ -144,14 +144,14 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
mirror_x, rotation = normalize_mirror(self.mirrored) mirror_x, rotation = normalize_mirror(self.mirrored)
rotation += self.rotation rotation += self.rotation
rotation %= 2 * pi rotation %= 2 * pi
return (type(self), self.string, self.font_path, self.layer), \ return ((type(self), self.string, self.font_path, self.layer),
(self.offset, self.height / norm_value, rotation, mirror_x, self.dose), \ (self.offset, self.height / norm_value, rotation, mirror_x, self.dose),
lambda: Text(string=self.string, lambda: Text(string=self.string,
height=self.height * norm_value, height=self.height * norm_value,
font_path=self.font_path, font_path=self.font_path,
rotation=rotation, rotation=rotation,
mirrored=(mirror_x, False), mirrored=(mirror_x, False),
layer=self.layer) layer=self.layer))
def get_bounds(self) -> numpy.ndarray: def get_bounds(self) -> numpy.ndarray:
# rotation makes this a huge pain when using slot.advance and glyph.bbox(), so # rotation makes this a huge pain when using slot.advance and glyph.bbox(), so
@ -168,7 +168,7 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
def get_char_as_polygons(font_path: str, def get_char_as_polygons(font_path: str,
char: str, char: str,
resolution: float = 48*64, resolution: float = 48 * 64,
) -> Tuple[List[List[List[float]]], float]: ) -> Tuple[List[List[List[float]]], float]:
from freetype import Face # type: ignore from freetype import Face # type: ignore
from matplotlib.path import Path # type: ignore from matplotlib.path import Path # type: ignore

@ -4,14 +4,14 @@
""" """
#TODO more top-level documentation #TODO more top-level documentation
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any from typing import Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any
import copy import copy
import numpy # type: ignore import numpy # type: ignore
from numpy import pi from numpy import pi
from .error import PatternError, PatternLockedError from .error import PatternError
from .utils import is_scalar, rotation_matrix_2d, vector2, AutoSlots, annotations_t from .utils import is_scalar, vector2, AutoSlots, annotations_t
from .repetition import Repetition from .repetition import Repetition
from .traits import (PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, from .traits import (PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl,
Mirrorable, PivotableImpl, Copyable, LockableImpl, RepeatableImpl, Mirrorable, PivotableImpl, Copyable, LockableImpl, RepeatableImpl,
@ -82,7 +82,7 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
self.annotations = annotations if annotations is not None else {} self.annotations = annotations if annotations is not None else {}
self.set_locked(locked) self.set_locked(locked)
def __copy__(self) -> 'SubPattern': def __copy__(self) -> 'SubPattern':
new = SubPattern(pattern=self.pattern, new = SubPattern(pattern=self.pattern,
offset=self.offset.copy(), offset=self.offset.copy(),
rotation=self.rotation, rotation=self.rotation,
@ -94,7 +94,7 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
locked=self.locked) locked=self.locked)
return new return new
def __deepcopy__(self, memo: Dict = None) -> 'SubPattern': def __deepcopy__(self, memo: Dict = None) -> 'SubPattern':
memo = {} if memo is None else memo memo = {} if memo is None else memo
new = copy.copy(self).unlock() new = copy.copy(self).unlock()
new.pattern = copy.deepcopy(self.pattern, memo) new.pattern = copy.deepcopy(self.pattern, memo)

@ -1,7 +1,6 @@
from typing import TypeVar from typing import TypeVar
from types import MappingProxyType #from types import MappingProxyType
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
from ..utils import annotations_t from ..utils import annotations_t
from ..error import PatternError from ..error import PatternError
@ -44,10 +43,10 @@ class AnnotatableImpl(Annotatable, metaclass=ABCMeta):
''' '''
@property @property
def annotations(self) -> annotations_t: def annotations(self) -> annotations_t:
return self._annotations
# # TODO: Find a way to make sure the subclass implements Lockable without dealing with diamond inheritance or this extra hasattr # # TODO: Find a way to make sure the subclass implements Lockable without dealing with diamond inheritance or this extra hasattr
# if hasattr(self, 'is_locked') and self.is_locked(): # if hasattr(self, 'is_locked') and self.is_locked():
# return MappingProxyType(self._annotations) # return MappingProxyType(self._annotations)
return self._annotations
@annotations.setter @annotations.setter
def annotations(self, annotations: annotations_t): def annotations(self, annotations: annotations_t):

@ -1,5 +1,5 @@
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta
import copy import copy

@ -1,9 +1,7 @@
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
from ..error import PatternError, PatternLockedError from ..error import PatternError
from ..utils import is_scalar
T = TypeVar('T', bound='Doseable') T = TypeVar('T', bound='Doseable')
@ -70,7 +68,6 @@ class DoseableImpl(Doseable, metaclass=ABCMeta):
raise PatternError('Dose must be non-negative') raise PatternError('Dose must be non-negative')
self._dose = val self._dose = val
''' '''
---- Non-abstract methods ---- Non-abstract methods
''' '''

@ -1,8 +1,6 @@
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
from ..error import PatternError, PatternLockedError
from ..utils import layer_t from ..utils import layer_t

@ -1,8 +1,7 @@
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
from ..error import PatternError, PatternLockedError from ..error import PatternLockedError
T = TypeVar('T', bound='Lockable') T = TypeVar('T', bound='Lockable')

@ -1,8 +1,5 @@
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
from ..error import PatternError, PatternLockedError
T = TypeVar('T', bound='Mirrorable') T = TypeVar('T', bound='Mirrorable')

@ -1,12 +1,11 @@
# TODO top-level comment about how traits should set __slots__ = (), and how to use AutoSlots # TODO top-level comment about how traits should set __slots__ = (), and how to use AutoSlots
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
import numpy # type: ignore import numpy # type: ignore
from ..error import PatternError, PatternLockedError from ..error import PatternError
from ..utils import is_scalar, rotation_matrix_2d, vector2 from ..utils import vector2
T = TypeVar('T', bound='Positionable') T = TypeVar('T', bound='Positionable')
@ -101,7 +100,6 @@ class PositionableImpl(Positionable, metaclass=ABCMeta):
raise PatternError('Offset must be convertible to size-2 ndarray') raise PatternError('Offset must be convertible to size-2 ndarray')
self._offset = val.flatten() self._offset = val.flatten()
''' '''
---- Methods ---- Methods
''' '''

@ -1,8 +1,7 @@
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING from typing import TypeVar, Optional, TYPE_CHECKING
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
from ..error import PatternError, PatternLockedError from ..error import PatternError
if TYPE_CHECKING: if TYPE_CHECKING:

@ -1,12 +1,11 @@
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
import numpy # type: ignore import numpy # type: ignore
from numpy import pi from numpy import pi
from .positionable import Positionable #from .positionable import Positionable
from ..error import PatternError, PatternLockedError from ..error import PatternError
from ..utils import is_scalar, rotation_matrix_2d, vector2 from ..utils import is_scalar, rotation_matrix_2d, vector2
T = TypeVar('T', bound='Rotatable') T = TypeVar('T', bound='Rotatable')

@ -1,8 +1,7 @@
from typing import List, Tuple, Callable, TypeVar, Optional from typing import TypeVar
from abc import ABCMeta, abstractmethod from abc import ABCMeta, abstractmethod
import copy
from ..error import PatternError, PatternLockedError from ..error import PatternError
from ..utils import is_scalar from ..utils import is_scalar

@ -84,7 +84,7 @@ def normalize_mirror(mirrored: Sequence[bool]) -> Tuple[bool, float]:
""" """
mirrored_x, mirrored_y = mirrored mirrored_x, mirrored_y = mirrored
mirror_x = (mirrored_x != mirrored_y) #XOR mirror_x = (mirrored_x != mirrored_y) # XOR
angle = numpy.pi if mirrored_y else 0 angle = numpy.pi if mirrored_y else 0
return mirror_x, angle return mirror_x, angle
@ -124,8 +124,8 @@ def remove_colinear_vertices(vertices: numpy.ndarray, closed_path: bool = True)
# Check for dx0/dy0 == dx1/dy1 # Check for dx0/dy0 == dx1/dy1
dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...] dv = numpy.roll(vertices, -1, axis=0) - vertices # [y1-y0, y2-y1, ...]
dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] #[[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dy0]] dxdy = dv * numpy.roll(dv, 1, axis=0)[:, ::-1] # [[dx0*(dy_-1), (dx_-1)*dy0], dx1*dy0, dy1*dy0]]
dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0] dxdy_diff = numpy.abs(numpy.diff(dxdy, axis=1))[:, 0]
err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40 err_mult = 2 * numpy.abs(dxdy).sum(axis=1) + 1e-40

Loading…
Cancel
Save