style and type fixes (per flake8)
could potentially fix some bugs in `Library` class and dxf reader
This commit is contained in:
parent
f6ad272c2c
commit
f364970403
29
.flake8
Normal file
29
.flake8
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
[flake8]
|
||||||
|
ignore =
|
||||||
|
# E501 line too long
|
||||||
|
E501,
|
||||||
|
# W391 newlines at EOF
|
||||||
|
W391,
|
||||||
|
# E241 multiple spaces after comma
|
||||||
|
E241,
|
||||||
|
# E302 expected 2 newlines
|
||||||
|
E302,
|
||||||
|
# W503 line break before binary operator (to be deprecated)
|
||||||
|
W503,
|
||||||
|
# E265 block comment should start with '# '
|
||||||
|
E265,
|
||||||
|
# E123 closing bracket does not match indentation of opening bracket's line
|
||||||
|
E123,
|
||||||
|
# E124 closing bracket does not match visual indentation
|
||||||
|
E124,
|
||||||
|
# E221 multiple spaces before operator
|
||||||
|
E221,
|
||||||
|
# E201 whitespace after '['
|
||||||
|
E201,
|
||||||
|
# E741 ambiguous variable name 'I'
|
||||||
|
E741,
|
||||||
|
|
||||||
|
|
||||||
|
per-file-ignores =
|
||||||
|
# F401 import without use
|
||||||
|
*/__init__.py: F401,
|
@ -1,3 +1,4 @@
|
|||||||
"""
|
"""
|
||||||
Functions for reading from and writing to various file formats.
|
Functions for reading from and writing to various file formats.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
"""
|
"""
|
||||||
DXF file format readers and writers
|
DXF file format readers and writers
|
||||||
"""
|
"""
|
||||||
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional
|
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable
|
||||||
import re
|
import re
|
||||||
import io
|
import io
|
||||||
import copy
|
|
||||||
import base64
|
import base64
|
||||||
import struct
|
import struct
|
||||||
import logging
|
import logging
|
||||||
@ -12,15 +11,12 @@ import pathlib
|
|||||||
import gzip
|
import gzip
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
from numpy import pi
|
|
||||||
import ezdxf # type: ignore
|
import ezdxf # type: ignore
|
||||||
|
|
||||||
from .utils import mangle_name, make_dose_table
|
|
||||||
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
||||||
from ..shapes import Polygon, Path
|
from ..shapes import Polygon, Path
|
||||||
from ..repetition import Grid
|
from ..repetition import Grid
|
||||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
from ..utils import rotation_matrix_2d, layer_t
|
||||||
from ..utils import remove_colinear_vertices, normalize_mirror
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -75,6 +71,7 @@ def write(pattern: Pattern,
|
|||||||
#TODO consider supporting DXF arcs?
|
#TODO consider supporting DXF arcs?
|
||||||
if disambiguate_func is None:
|
if disambiguate_func is None:
|
||||||
disambiguate_func = disambiguate_pattern_names
|
disambiguate_func = disambiguate_pattern_names
|
||||||
|
assert(disambiguate_func is not None)
|
||||||
|
|
||||||
if not modify_originals:
|
if not modify_originals:
|
||||||
pattern = pattern.deepcopy().deepunlock()
|
pattern = pattern.deepcopy().deepunlock()
|
||||||
@ -125,8 +122,7 @@ def writefile(pattern: Pattern,
|
|||||||
open_func = open
|
open_func = open
|
||||||
|
|
||||||
with open_func(path, mode='wt') as stream:
|
with open_func(path, mode='wt') as stream:
|
||||||
results = write(pattern, stream, *args, **kwargs)
|
write(pattern, stream, *args, **kwargs)
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def readfile(filename: Union[str, pathlib.Path],
|
def readfile(filename: Union[str, pathlib.Path],
|
||||||
@ -204,25 +200,26 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
|
|||||||
else:
|
else:
|
||||||
points = numpy.array(tuple(element.points()))
|
points = numpy.array(tuple(element.points()))
|
||||||
attr = element.dxfattribs()
|
attr = element.dxfattribs()
|
||||||
args = {'layer': attr.get('layer', DEFAULT_LAYER),
|
layer = attr.get('layer', DEFAULT_LAYER)
|
||||||
}
|
|
||||||
|
|
||||||
if points.shape[1] == 2:
|
if points.shape[1] == 2:
|
||||||
shape = Polygon(**args)
|
raise PatternError('Invalid or unimplemented polygon?')
|
||||||
|
#shape = Polygon(layer=layer)
|
||||||
elif points.shape[1] > 2:
|
elif points.shape[1] > 2:
|
||||||
if (points[0, 2] != points[:, 2]).any():
|
if (points[0, 2] != points[:, 2]).any():
|
||||||
raise PatternError('PolyLine has non-constant width (not yet representable in masque!)')
|
raise PatternError('PolyLine has non-constant width (not yet representable in masque!)')
|
||||||
elif points.shape[1] == 4 and (points[:, 3] != 0).any():
|
elif points.shape[1] == 4 and (points[:, 3] != 0).any():
|
||||||
raise PatternError('LWPolyLine has bulge (not yet representable in masque!)')
|
raise PatternError('LWPolyLine has bulge (not yet representable in masque!)')
|
||||||
else:
|
|
||||||
width = points[0, 2]
|
width = points[0, 2]
|
||||||
if width == 0:
|
if width == 0:
|
||||||
width = attr.get('const_width', 0)
|
width = attr.get('const_width', 0)
|
||||||
|
|
||||||
|
shape: Union[Path, Polygon]
|
||||||
if width == 0 and numpy.array_equal(points[0], points[-1]):
|
if width == 0 and numpy.array_equal(points[0], points[-1]):
|
||||||
shape = Polygon(**args, vertices=points[:-1, :2])
|
shape = Polygon(layer=layer, vertices=points[:-1, :2])
|
||||||
else:
|
else:
|
||||||
shape = Path(**args, width=width, vertices=points[:, :2])
|
shape = Path(layer=layer, width=width, vertices=points[:, :2])
|
||||||
|
|
||||||
if clean_vertices:
|
if clean_vertices:
|
||||||
try:
|
try:
|
||||||
@ -237,7 +234,7 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
|
|||||||
'layer': element.dxfattribs().get('layer', DEFAULT_LAYER),
|
'layer': element.dxfattribs().get('layer', DEFAULT_LAYER),
|
||||||
}
|
}
|
||||||
string = element.dxfattribs().get('text', '')
|
string = element.dxfattribs().get('text', '')
|
||||||
height = element.dxfattribs().get('height', 0)
|
# height = element.dxfattribs().get('height', 0)
|
||||||
# if height != 0:
|
# if height != 0:
|
||||||
# logger.warning('Interpreting DXF TEXT as a label despite nonzero height. '
|
# logger.warning('Interpreting DXF TEXT as a label despite nonzero height. '
|
||||||
# 'This could be changed in the future by setting a font path in the masque DXF code.')
|
# 'This could be changed in the future by setting a font path in the masque DXF code.')
|
||||||
@ -252,7 +249,7 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
|
|||||||
logger.warning('Masque does not support per-axis scaling; using x-scaling only!')
|
logger.warning('Masque does not support per-axis scaling; using x-scaling only!')
|
||||||
scale = abs(xscale)
|
scale = abs(xscale)
|
||||||
mirrored = (yscale < 0, xscale < 0)
|
mirrored = (yscale < 0, xscale < 0)
|
||||||
rotation = attr.get('rotation', 0) * pi/180
|
rotation = numpy.deg2rad(attr.get('rotation', 0))
|
||||||
|
|
||||||
offset = attr.get('insert', (0, 0, 0))[:2]
|
offset = attr.get('insert', (0, 0, 0))[:2]
|
||||||
|
|
||||||
@ -266,8 +263,7 @@ def _read_block(block, clean_vertices: bool) -> Pattern:
|
|||||||
}
|
}
|
||||||
|
|
||||||
if 'column_count' in attr:
|
if 'column_count' in attr:
|
||||||
args['repetition'] = Grid(
|
args['repetition'] = Grid(a_vector=(attr['column_spacing'], 0),
|
||||||
a_vector=(attr['column_spacing'], 0),
|
|
||||||
b_vector=(0, attr['row_spacing']),
|
b_vector=(0, attr['row_spacing']),
|
||||||
a_count=attr['column_count'],
|
a_count=attr['column_count'],
|
||||||
b_count=attr['row_count'])
|
b_count=attr['row_count'])
|
||||||
@ -356,11 +352,11 @@ def _mlayer2dxf(layer: layer_t) -> str:
|
|||||||
def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
||||||
max_name_length: int = 32,
|
max_name_length: int = 32,
|
||||||
suffix_length: int = 6,
|
suffix_length: int = 6,
|
||||||
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name
|
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
|
||||||
) -> None:
|
) -> None:
|
||||||
used_names = []
|
used_names = []
|
||||||
for pat in patterns:
|
for pat in patterns:
|
||||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name)
|
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
suffixed_name = sanitized_name
|
suffixed_name = sanitized_name
|
||||||
@ -374,15 +370,15 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
|||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
elif suffixed_name != sanitized_name:
|
||||||
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
||||||
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' +
|
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
|
||||||
f' renaming to "{suffixed_name}"')
|
+ f' renaming to "{suffixed_name}"')
|
||||||
|
|
||||||
if len(suffixed_name) == 0:
|
if len(suffixed_name) == 0:
|
||||||
# Should never happen since zero-length names are replaced
|
# Should never happen since zero-length names are replaced
|
||||||
raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"')
|
raise PatternError(f'Zero-length name after sanitize,\n originally "{pat.name}"')
|
||||||
if len(suffixed_name) > max_name_length:
|
if len(suffixed_name) > max_name_length:
|
||||||
raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n' +
|
raise PatternError(f'Pattern name "{suffixed_name!r}" length > {max_name_length} after encode,\n'
|
||||||
f' originally "{pat.name}"')
|
+ f' originally "{pat.name}"')
|
||||||
|
|
||||||
pat.name = suffixed_name
|
pat.name = suffixed_name
|
||||||
used_names.append(suffixed_name)
|
used_names.append(suffixed_name)
|
||||||
|
@ -17,8 +17,8 @@ Notes:
|
|||||||
* ELFLAGS are not supported
|
* ELFLAGS are not supported
|
||||||
* GDS does not support library- or structure-level annotations
|
* GDS does not support library- or structure-level annotations
|
||||||
"""
|
"""
|
||||||
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional
|
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
|
||||||
from typing import Sequence, Mapping
|
from typing import Sequence
|
||||||
import re
|
import re
|
||||||
import io
|
import io
|
||||||
import copy
|
import copy
|
||||||
@ -34,13 +34,11 @@ import gdsii.library
|
|||||||
import gdsii.structure
|
import gdsii.structure
|
||||||
import gdsii.elements
|
import gdsii.elements
|
||||||
|
|
||||||
from .utils import mangle_name, make_dose_table, dose2dtype, dtype2dose, clean_pattern_vertices
|
from .utils import clean_pattern_vertices, is_gzipped
|
||||||
from .utils import is_gzipped
|
|
||||||
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
||||||
from ..shapes import Polygon, Path
|
from ..shapes import Polygon, Path
|
||||||
from ..repetition import Grid
|
from ..repetition import Grid
|
||||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
from ..utils import get_bit, set_bit, layer_t, normalize_mirror, annotations_t
|
||||||
from ..utils import remove_colinear_vertices, normalize_mirror, annotations_t
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -262,8 +260,7 @@ def read(stream: io.BufferedIOBase,
|
|||||||
string=element.string.decode('ASCII'))
|
string=element.string.decode('ASCII'))
|
||||||
pat.labels.append(label)
|
pat.labels.append(label)
|
||||||
|
|
||||||
elif (isinstance(element, gdsii.elements.SRef) or
|
elif isinstance(element, (gdsii.elements.SRef, gdsii.elements.ARef)):
|
||||||
isinstance(element, gdsii.elements.ARef)):
|
|
||||||
pat.subpatterns.append(_ref_to_subpat(element))
|
pat.subpatterns.append(_ref_to_subpat(element))
|
||||||
|
|
||||||
if clean_vertices:
|
if clean_vertices:
|
||||||
@ -437,7 +434,7 @@ def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -
|
|||||||
for key, vals in annotations.items():
|
for key, vals in annotations.items():
|
||||||
try:
|
try:
|
||||||
i = int(key)
|
i = int(key)
|
||||||
except:
|
except ValueError:
|
||||||
raise PatternError(f'Annotation key {key} is not convertable to an integer')
|
raise PatternError(f'Annotation key {key} is not convertable to an integer')
|
||||||
if not (0 < i < 126):
|
if not (0 < i < 126):
|
||||||
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
|
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
|
||||||
@ -502,7 +499,7 @@ def _labels_to_texts(labels: List[Label]) -> List[gdsii.elements.Text]:
|
|||||||
def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
||||||
max_name_length: int = 32,
|
max_name_length: int = 32,
|
||||||
suffix_length: int = 6,
|
suffix_length: int = 6,
|
||||||
dup_warn_filter: Optional[Callable[[str,], bool]] = None,
|
dup_warn_filter: Optional[Callable[[str], bool]] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
@ -519,13 +516,13 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
|||||||
# Shorten names which already exceed max-length
|
# Shorten names which already exceed max-length
|
||||||
if len(pat.name) > max_name_length:
|
if len(pat.name) > max_name_length:
|
||||||
shortened_name = pat.name[:max_name_length - suffix_length]
|
shortened_name = pat.name[:max_name_length - suffix_length]
|
||||||
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n' +
|
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
|
||||||
f' shortening to "{shortened_name}" before generating suffix')
|
+ f' shortening to "{shortened_name}" before generating suffix')
|
||||||
else:
|
else:
|
||||||
shortened_name = pat.name
|
shortened_name = pat.name
|
||||||
|
|
||||||
# Remove invalid characters
|
# Remove invalid characters
|
||||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
|
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
|
||||||
|
|
||||||
# Add a suffix that makes the name unique
|
# Add a suffix that makes the name unique
|
||||||
i = 0
|
i = 0
|
||||||
@ -540,8 +537,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
|||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
elif suffixed_name != sanitized_name:
|
||||||
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
||||||
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' +
|
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
|
||||||
f' renaming to "{suffixed_name}"')
|
+ f' renaming to "{suffixed_name}"')
|
||||||
|
|
||||||
# Encode into a byte-string and perform some final checks
|
# Encode into a byte-string and perform some final checks
|
||||||
encoded_name = suffixed_name.encode('ASCII')
|
encoded_name = suffixed_name.encode('ASCII')
|
||||||
@ -549,8 +546,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
|||||||
# Should never happen since zero-length names are replaced
|
# Should never happen since zero-length names are replaced
|
||||||
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
|
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
|
||||||
if len(encoded_name) > max_name_length:
|
if len(encoded_name) > max_name_length:
|
||||||
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n' +
|
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
|
||||||
f' originally "{pat.name}"')
|
+ f' originally "{pat.name}"')
|
||||||
|
|
||||||
pat.name = suffixed_name
|
pat.name = suffixed_name
|
||||||
used_names.append(suffixed_name)
|
used_names.append(suffixed_name)
|
||||||
|
@ -18,8 +18,8 @@ Notes:
|
|||||||
* GDS does not support library- or structure-level annotations
|
* GDS does not support library- or structure-level annotations
|
||||||
* Creation/modification/access times are set to 1900-01-01 for reproducibility.
|
* Creation/modification/access times are set to 1900-01-01 for reproducibility.
|
||||||
"""
|
"""
|
||||||
from typing import List, Any, Dict, Tuple, Callable, Union, Sequence, Iterable, Optional
|
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
|
||||||
from typing import Sequence, Mapping, BinaryIO
|
from typing import Sequence, BinaryIO
|
||||||
import re
|
import re
|
||||||
import io
|
import io
|
||||||
import mmap
|
import mmap
|
||||||
@ -29,18 +29,16 @@ import struct
|
|||||||
import logging
|
import logging
|
||||||
import pathlib
|
import pathlib
|
||||||
import gzip
|
import gzip
|
||||||
from itertools import chain
|
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
import klamath
|
import klamath
|
||||||
from klamath import records
|
from klamath import records
|
||||||
|
|
||||||
from .utils import mangle_name, make_dose_table, dose2dtype, dtype2dose, is_gzipped
|
from .utils import is_gzipped
|
||||||
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
||||||
from ..shapes import Polygon, Path
|
from ..shapes import Polygon, Path
|
||||||
from ..repetition import Grid
|
from ..repetition import Grid
|
||||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
from ..utils import layer_t, normalize_mirror, annotations_t
|
||||||
from ..utils import remove_colinear_vertices, normalize_mirror, annotations_t
|
|
||||||
from ..library import Library
|
from ..library import Library
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -144,15 +142,15 @@ def writefile(patterns: Union[Sequence[Pattern], Pattern],
|
|||||||
**kwargs,
|
**kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Wrapper for `masque.file.gdsii.write()` that takes a filename or path instead of a stream.
|
Wrapper for `write()` that takes a filename or path instead of a stream.
|
||||||
|
|
||||||
Will automatically compress the file if it has a .gz suffix.
|
Will automatically compress the file if it has a .gz suffix.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
patterns: `Pattern` or list of patterns to save
|
patterns: `Pattern` or list of patterns to save
|
||||||
filename: Filename to save to.
|
filename: Filename to save to.
|
||||||
*args: passed to `masque.file.gdsii.write`
|
*args: passed to `write()`
|
||||||
**kwargs: passed to `masque.file.gdsii.write`
|
**kwargs: passed to `write()`
|
||||||
"""
|
"""
|
||||||
path = pathlib.Path(filename)
|
path = pathlib.Path(filename)
|
||||||
if path.suffix == '.gz':
|
if path.suffix == '.gz':
|
||||||
@ -169,14 +167,14 @@ def readfile(filename: Union[str, pathlib.Path],
|
|||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
) -> Tuple[Dict[str, Pattern], Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Wrapper for `masque.file.gdsii.read()` that takes a filename or path instead of a stream.
|
Wrapper for `read()` that takes a filename or path instead of a stream.
|
||||||
|
|
||||||
Will automatically decompress gzipped files.
|
Will automatically decompress gzipped files.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
filename: Filename to save to.
|
filename: Filename to save to.
|
||||||
*args: passed to `masque.file.gdsii.read`
|
*args: passed to `read()`
|
||||||
**kwargs: passed to `masque.file.gdsii.read`
|
**kwargs: passed to `read()`
|
||||||
"""
|
"""
|
||||||
path = pathlib.Path(filename)
|
path = pathlib.Path(filename)
|
||||||
if is_gzipped(path):
|
if is_gzipped(path):
|
||||||
@ -185,7 +183,7 @@ def readfile(filename: Union[str, pathlib.Path],
|
|||||||
open_func = open
|
open_func = open
|
||||||
|
|
||||||
with io.BufferedReader(open_func(path, mode='rb')) as stream:
|
with io.BufferedReader(open_func(path, mode='rb')) as stream:
|
||||||
results = read(stream)#, *args, **kwargs)
|
results = read(stream, *args, **kwargs)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
@ -216,7 +214,7 @@ def read(stream: BinaryIO,
|
|||||||
found_struct = records.BGNSTR.skip_past(stream)
|
found_struct = records.BGNSTR.skip_past(stream)
|
||||||
while found_struct:
|
while found_struct:
|
||||||
name = records.STRNAME.skip_and_read(stream)
|
name = records.STRNAME.skip_and_read(stream)
|
||||||
pat = read_elements(stream, name=name.decode('ASCII'))
|
pat = read_elements(stream, name=name.decode('ASCII'), raw_mode=raw_mode)
|
||||||
patterns.append(pat)
|
patterns.append(pat)
|
||||||
found_struct = records.BGNSTR.skip_past(stream)
|
found_struct = records.BGNSTR.skip_past(stream)
|
||||||
|
|
||||||
@ -412,7 +410,7 @@ def _annotations_to_properties(annotations: annotations_t, max_len: int = 126) -
|
|||||||
for key, vals in annotations.items():
|
for key, vals in annotations.items():
|
||||||
try:
|
try:
|
||||||
i = int(key)
|
i = int(key)
|
||||||
except:
|
except ValueError:
|
||||||
raise PatternError(f'Annotation key {key} is not convertable to an integer')
|
raise PatternError(f'Annotation key {key} is not convertable to an integer')
|
||||||
if not (0 < i < 126):
|
if not (0 < i < 126):
|
||||||
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
|
raise PatternError(f'Annotation key {key} converts to {i} (must be in the range [1,125])')
|
||||||
@ -496,7 +494,7 @@ def _labels_to_texts(labels: List[Label]) -> List[klamath.elements.Text]:
|
|||||||
def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
||||||
max_name_length: int = 32,
|
max_name_length: int = 32,
|
||||||
suffix_length: int = 6,
|
suffix_length: int = 6,
|
||||||
dup_warn_filter: Optional[Callable[[str,], bool]] = None,
|
dup_warn_filter: Optional[Callable[[str], bool]] = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
@ -513,13 +511,13 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
|||||||
# Shorten names which already exceed max-length
|
# Shorten names which already exceed max-length
|
||||||
if len(pat.name) > max_name_length:
|
if len(pat.name) > max_name_length:
|
||||||
shortened_name = pat.name[:max_name_length - suffix_length]
|
shortened_name = pat.name[:max_name_length - suffix_length]
|
||||||
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n' +
|
logger.warning(f'Pattern name "{pat.name}" is too long ({len(pat.name)}/{max_name_length} chars),\n'
|
||||||
f' shortening to "{shortened_name}" before generating suffix')
|
+ f' shortening to "{shortened_name}" before generating suffix')
|
||||||
else:
|
else:
|
||||||
shortened_name = pat.name
|
shortened_name = pat.name
|
||||||
|
|
||||||
# Remove invalid characters
|
# Remove invalid characters
|
||||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
|
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', shortened_name)
|
||||||
|
|
||||||
# Add a suffix that makes the name unique
|
# Add a suffix that makes the name unique
|
||||||
i = 0
|
i = 0
|
||||||
@ -534,8 +532,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
|||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
elif suffixed_name != sanitized_name:
|
||||||
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
||||||
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' +
|
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
|
||||||
f' renaming to "{suffixed_name}"')
|
+ f' renaming to "{suffixed_name}"')
|
||||||
|
|
||||||
# Encode into a byte-string and perform some final checks
|
# Encode into a byte-string and perform some final checks
|
||||||
encoded_name = suffixed_name.encode('ASCII')
|
encoded_name = suffixed_name.encode('ASCII')
|
||||||
@ -543,8 +541,8 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
|
|||||||
# Should never happen since zero-length names are replaced
|
# Should never happen since zero-length names are replaced
|
||||||
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
|
raise PatternError(f'Zero-length name after sanitize+encode,\n originally "{pat.name}"')
|
||||||
if len(encoded_name) > max_name_length:
|
if len(encoded_name) > max_name_length:
|
||||||
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n' +
|
raise PatternError(f'Pattern name "{encoded_name!r}" length > {max_name_length} after encode,\n'
|
||||||
f' originally "{pat.name}"')
|
+ f' originally "{pat.name}"')
|
||||||
|
|
||||||
pat.name = suffixed_name
|
pat.name = suffixed_name
|
||||||
used_names.append(suffixed_name)
|
used_names.append(suffixed_name)
|
||||||
@ -576,7 +574,8 @@ def load_library(stream: BinaryIO,
|
|||||||
Additional library info (dict, same format as from `read`).
|
Additional library info (dict, same format as from `read`).
|
||||||
"""
|
"""
|
||||||
if is_secondary is None:
|
if is_secondary is None:
|
||||||
is_secondary = lambda k: False
|
def is_secondary(k: str):
|
||||||
|
return False
|
||||||
|
|
||||||
stream.seek(0)
|
stream.seek(0)
|
||||||
library_info = _read_header(stream)
|
library_info = _read_header(stream)
|
||||||
@ -592,7 +591,7 @@ def load_library(stream: BinaryIO,
|
|||||||
|
|
||||||
lib.set_value(name, tag, mkstruct, secondary=is_secondary(name))
|
lib.set_value(name, tag, mkstruct, secondary=is_secondary(name))
|
||||||
|
|
||||||
return lib
|
return lib, library_info
|
||||||
|
|
||||||
|
|
||||||
def load_libraryfile(filename: Union[str, pathlib.Path],
|
def load_libraryfile(filename: Union[str, pathlib.Path],
|
||||||
|
@ -22,17 +22,15 @@ import pathlib
|
|||||||
import gzip
|
import gzip
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
from numpy import pi
|
|
||||||
import fatamorgana
|
import fatamorgana
|
||||||
import fatamorgana.records as fatrec
|
import fatamorgana.records as fatrec
|
||||||
from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference
|
from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringReference
|
||||||
|
|
||||||
from .utils import mangle_name, make_dose_table, clean_pattern_vertices, is_gzipped
|
from .utils import clean_pattern_vertices, is_gzipped
|
||||||
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
||||||
from ..shapes import Polygon, Path, Circle
|
from ..shapes import Polygon, Path, Circle
|
||||||
from ..repetition import Grid, Arbitrary, Repetition
|
from ..repetition import Grid, Arbitrary, Repetition
|
||||||
from ..utils import rotation_matrix_2d, get_bit, set_bit, vector2, is_scalar, layer_t
|
from ..utils import layer_t, normalize_mirror, annotations_t
|
||||||
from ..utils import remove_colinear_vertices, normalize_mirror, annotations_t
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -472,7 +470,7 @@ def _mlayer2oas(mlayer: layer_t) -> Tuple[int, int]:
|
|||||||
data_type = 0
|
data_type = 0
|
||||||
else:
|
else:
|
||||||
raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be '
|
raise PatternError(f'Invalid layer for OASIS: {layer}. Note that OASIS layers cannot be '
|
||||||
'strings unless a layer map is provided.')
|
f'strings unless a layer map is provided.')
|
||||||
return layer, data_type
|
return layer, data_type
|
||||||
|
|
||||||
|
|
||||||
@ -490,7 +488,7 @@ def _placement_to_subpat(placement: fatrec.Placement, lib: fatamorgana.OasisLayo
|
|||||||
subpat = SubPattern(offset=xy,
|
subpat = SubPattern(offset=xy,
|
||||||
pattern=None,
|
pattern=None,
|
||||||
mirrored=(placement.flip, False),
|
mirrored=(placement.flip, False),
|
||||||
rotation=float(placement.angle * pi/180),
|
rotation=numpy.deg2rad(placement.angle),
|
||||||
scale=float(mag),
|
scale=float(mag),
|
||||||
identifier=(name,),
|
identifier=(name,),
|
||||||
repetition=repetition_fata2masq(placement.repetition),
|
repetition=repetition_fata2masq(placement.repetition),
|
||||||
@ -598,11 +596,11 @@ def _labels_to_texts(labels: List[Label],
|
|||||||
|
|
||||||
|
|
||||||
def disambiguate_pattern_names(patterns,
|
def disambiguate_pattern_names(patterns,
|
||||||
dup_warn_filter: Callable[[str,], bool] = None, # If returns False, don't warn about this name
|
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
|
||||||
):
|
):
|
||||||
used_names = []
|
used_names = []
|
||||||
for pat in patterns:
|
for pat in patterns:
|
||||||
sanitized_name = re.compile('[^A-Za-z0-9_\?\$]').sub('_', pat.name)
|
sanitized_name = re.compile(r'[^A-Za-z0-9_\?\$]').sub('_', pat.name)
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
suffixed_name = sanitized_name
|
suffixed_name = sanitized_name
|
||||||
@ -616,8 +614,8 @@ def disambiguate_pattern_names(patterns,
|
|||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
elif suffixed_name != sanitized_name:
|
||||||
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
if dup_warn_filter is None or dup_warn_filter(pat.name):
|
||||||
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n' +
|
logger.warning(f'Pattern name "{pat.name}" ({sanitized_name}) appears multiple times;\n'
|
||||||
f' renaming to "{suffixed_name}"')
|
+ f' renaming to "{suffixed_name}"')
|
||||||
|
|
||||||
if len(suffixed_name) == 0:
|
if len(suffixed_name) == 0:
|
||||||
# Should never happen since zero-length names are replaced
|
# Should never happen since zero-length names are replaced
|
||||||
|
@ -13,7 +13,8 @@ from .. import Pattern
|
|||||||
|
|
||||||
def writefile(pattern: Pattern,
|
def writefile(pattern: Pattern,
|
||||||
filename: str,
|
filename: str,
|
||||||
custom_attributes: bool=False):
|
custom_attributes: bool = False,
|
||||||
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Write a Pattern to an SVG file, by first calling .polygonize() on it
|
Write a Pattern to an SVG file, by first calling .polygonize() on it
|
||||||
to change the shapes into polygons, and then writing patterns as SVG
|
to change the shapes into polygons, and then writing patterns as SVG
|
||||||
|
@ -4,7 +4,6 @@ Helper functions for file reading and writing
|
|||||||
from typing import Set, Tuple, List
|
from typing import Set, Tuple, List
|
||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
import gzip
|
|
||||||
import pathlib
|
import pathlib
|
||||||
|
|
||||||
from .. import Pattern, PatternError
|
from .. import Pattern, PatternError
|
||||||
@ -22,7 +21,7 @@ def mangle_name(pattern: Pattern, dose_multiplier: float=1.0) -> str:
|
|||||||
Returns:
|
Returns:
|
||||||
Mangled name.
|
Mangled name.
|
||||||
"""
|
"""
|
||||||
expression = re.compile('[^A-Za-z0-9_\?\$]')
|
expression = re.compile(r'[^A-Za-z0-9_\?\$]')
|
||||||
full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern))
|
full_name = '{}_{}_{}'.format(pattern.name, dose_multiplier, id(pattern))
|
||||||
sanitized_name = expression.sub('_', full_name)
|
sanitized_name = expression.sub('_', full_name)
|
||||||
return sanitized_name
|
return sanitized_name
|
||||||
|
@ -1,10 +1,8 @@
|
|||||||
from typing import List, Tuple, Dict, Optional
|
from typing import Tuple, Dict, Optional
|
||||||
import copy
|
import copy
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
from numpy import pi
|
|
||||||
|
|
||||||
from .repetition import Repetition
|
from .repetition import Repetition
|
||||||
from .error import PatternError, PatternLockedError
|
|
||||||
from .utils import vector2, rotation_matrix_2d, layer_t, AutoSlots, annotations_t
|
from .utils import vector2, rotation_matrix_2d, layer_t, AutoSlots, annotations_t
|
||||||
from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl
|
from .traits import PositionableImpl, LayerableImpl, Copyable, Pivotable, LockableImpl, RepeatableImpl
|
||||||
from .traits import AnnotatableImpl
|
from .traits import AnnotatableImpl
|
||||||
|
@ -2,12 +2,11 @@
|
|||||||
Library class for managing unique name->pattern mappings and
|
Library class for managing unique name->pattern mappings and
|
||||||
deferred loading or creation.
|
deferred loading or creation.
|
||||||
"""
|
"""
|
||||||
from typing import Dict, Callable, TypeVar, Generic, TYPE_CHECKING
|
from typing import Dict, Callable, TypeVar, TYPE_CHECKING
|
||||||
from typing import Any, Tuple, Union, Iterator
|
from typing import Any, Tuple, Union, Iterator
|
||||||
import logging
|
import logging
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import lru_cache
|
|
||||||
|
|
||||||
from ..error import LibraryError
|
from ..error import LibraryError
|
||||||
|
|
||||||
@ -133,13 +132,13 @@ class Library:
|
|||||||
return pat
|
return pat
|
||||||
|
|
||||||
def keys(self) -> Iterator[str]:
|
def keys(self) -> Iterator[str]:
|
||||||
return self.primary.keys()
|
return iter(self.primary.keys())
|
||||||
|
|
||||||
def values(self) -> Iterator['Pattern']:
|
def values(self) -> Iterator['Pattern']:
|
||||||
return (self[key] for key in self.keys())
|
return iter(self[key] for key in self.keys())
|
||||||
|
|
||||||
def items(self) -> Iterator[Tuple[str, 'Pattern']]:
|
def items(self) -> Iterator[Tuple[str, 'Pattern']]:
|
||||||
return ((key, self[key]) for key in self.keys())
|
return iter((key, self[key]) for key in self.keys())
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return '<Library with keys ' + repr(list(self.primary.keys())) + '>'
|
return '<Library with keys ' + repr(list(self.primary.keys())) + '>'
|
||||||
@ -191,7 +190,7 @@ class Library:
|
|||||||
for key in self.primary:
|
for key in self.primary:
|
||||||
_ = self.get_primary(key)
|
_ = self.get_primary(key)
|
||||||
for key2 in self.secondary:
|
for key2 in self.secondary:
|
||||||
_ = self.get_secondary(key2)
|
_ = self.get_secondary(*key2)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def add(self, other: 'Library') -> 'Library':
|
def add(self, other: 'Library') -> 'Library':
|
||||||
|
@ -103,7 +103,8 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
|
|||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Pattern':
|
def __deepcopy__(self, memo: Dict = None) -> 'Pattern':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = Pattern(name=self.name,
|
new = Pattern(
|
||||||
|
name=self.name,
|
||||||
shapes=copy.deepcopy(self.shapes, memo),
|
shapes=copy.deepcopy(self.shapes, memo),
|
||||||
labels=copy.deepcopy(self.labels, memo),
|
labels=copy.deepcopy(self.labels, memo),
|
||||||
subpatterns=copy.deepcopy(self.subpatterns, memo),
|
subpatterns=copy.deepcopy(self.subpatterns, memo),
|
||||||
@ -518,7 +519,6 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
|
|||||||
ids.update(pat.subpatterns_by_id(include_none=include_none))
|
ids.update(pat.subpatterns_by_id(include_none=include_none))
|
||||||
return dict(ids)
|
return dict(ids)
|
||||||
|
|
||||||
|
|
||||||
def get_bounds(self) -> Union[numpy.ndarray, None]:
|
def get_bounds(self) -> Union[numpy.ndarray, None]:
|
||||||
"""
|
"""
|
||||||
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
|
Return a `numpy.ndarray` containing `[[x_min, y_min], [x_max, y_max]]`, corresponding to the
|
||||||
@ -625,7 +625,6 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
|
|||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
|
|
||||||
def translate_elements(self, offset: vector2) -> 'Pattern':
|
def translate_elements(self, offset: vector2) -> 'Pattern':
|
||||||
"""
|
"""
|
||||||
Translates all shapes, label, and subpatterns by the given offset.
|
Translates all shapes, label, and subpatterns by the given offset.
|
||||||
@ -805,9 +804,9 @@ class Pattern(LockableImpl, AnnotatableImpl, metaclass=AutoSlots):
|
|||||||
Returns:
|
Returns:
|
||||||
True if the pattern is contains no shapes, labels, or subpatterns.
|
True if the pattern is contains no shapes, labels, or subpatterns.
|
||||||
"""
|
"""
|
||||||
return (len(self.subpatterns) == 0 and
|
return (len(self.subpatterns) == 0
|
||||||
len(self.shapes) == 0 and
|
and len(self.shapes) == 0
|
||||||
len(self.labels) == 0)
|
and len(self.labels) == 0)
|
||||||
|
|
||||||
def lock(self) -> 'Pattern':
|
def lock(self) -> 'Pattern':
|
||||||
"""
|
"""
|
||||||
|
@ -3,13 +3,13 @@
|
|||||||
instances of an object .
|
instances of an object .
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any
|
from typing import Union, Dict, Optional, Sequence, Any
|
||||||
import copy
|
import copy
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
|
|
||||||
from .error import PatternError, PatternLockedError
|
from .error import PatternError
|
||||||
from .utils import rotation_matrix_2d, vector2, AutoSlots
|
from .utils import rotation_matrix_2d, vector2, AutoSlots
|
||||||
from .traits import LockableImpl, Copyable, Scalable, Rotatable, Mirrorable
|
from .traits import LockableImpl, Copyable, Scalable, Rotatable, Mirrorable
|
||||||
|
|
||||||
@ -170,8 +170,8 @@ class Grid(LockableImpl, Repetition, metaclass=AutoSlots):
|
|||||||
@property
|
@property
|
||||||
def displacements(self) -> numpy.ndarray:
|
def displacements(self) -> numpy.ndarray:
|
||||||
aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij')
|
aa, bb = numpy.meshgrid(numpy.arange(self.a_count), numpy.arange(self.b_count), indexing='ij')
|
||||||
return (aa.flatten()[:, None] * self.a_vector[None, :] +
|
return (aa.flatten()[:, None] * self.a_vector[None, :]
|
||||||
bb.flatten()[:, None] * self.b_vector[None, :])
|
+ bb.flatten()[:, None] * self.b_vector[None, :]) # noqa
|
||||||
|
|
||||||
def rotate(self, rotation: float) -> 'Grid':
|
def rotate(self, rotation: float) -> 'Grid':
|
||||||
"""
|
"""
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from typing import List, Tuple, Dict, Optional, Sequence
|
from typing import List, Dict, Optional, Sequence
|
||||||
import copy
|
import copy
|
||||||
import math
|
import math
|
||||||
|
|
||||||
@ -81,7 +81,7 @@ class Arc(Shape, metaclass=AutoSlots):
|
|||||||
|
|
||||||
# arc start/stop angle properties
|
# arc start/stop angle properties
|
||||||
@property
|
@property
|
||||||
def angles(self) -> numpy.ndarray: #ndarray[float]
|
def angles(self) -> numpy.ndarray:
|
||||||
"""
|
"""
|
||||||
Return the start and stop angles `[a_start, a_stop]`.
|
Return the start and stop angles `[a_start, a_stop]`.
|
||||||
Angles are measured from x-axis after rotation
|
Angles are measured from x-axis after rotation
|
||||||
@ -214,8 +214,8 @@ class Arc(Shape, metaclass=AutoSlots):
|
|||||||
poly_max_arclen = self.poly_max_arclen
|
poly_max_arclen = self.poly_max_arclen
|
||||||
|
|
||||||
if (poly_num_points is None) and (poly_max_arclen is None):
|
if (poly_num_points is None) and (poly_max_arclen is None):
|
||||||
raise PatternError('Max number of points and arclength left unspecified' +
|
raise PatternError('Max number of points and arclength left unspecified'
|
||||||
' (default was also overridden)')
|
+ ' (default was also overridden)')
|
||||||
|
|
||||||
r0, r1 = self.radii
|
r0, r1 = self.radii
|
||||||
|
|
||||||
@ -356,9 +356,9 @@ class Arc(Shape, metaclass=AutoSlots):
|
|||||||
rotation %= 2 * pi
|
rotation %= 2 * pi
|
||||||
width = self.width
|
width = self.width
|
||||||
|
|
||||||
return (type(self), radii, angles, width/norm_value, self.layer), \
|
return ((type(self), radii, angles, width / norm_value, self.layer),
|
||||||
(self.offset, scale/norm_value, rotation, False, self.dose), \
|
(self.offset, scale / norm_value, rotation, False, self.dose),
|
||||||
lambda: Arc(radii=radii*norm_value, angles=angles, width=width*norm_value, layer=self.layer)
|
lambda: Arc(radii=radii * norm_value, angles=angles, width=width * norm_value, layer=self.layer))
|
||||||
|
|
||||||
def get_cap_edges(self) -> numpy.ndarray:
|
def get_cap_edges(self) -> numpy.ndarray:
|
||||||
'''
|
'''
|
||||||
@ -424,8 +424,8 @@ class Arc(Shape, metaclass=AutoSlots):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
angles = f' a°{self.angles*180/pi}'
|
angles = f' a°{numpy.rad2deg(self.angles)}'
|
||||||
rotation = f' r°{self.rotation*180/pi:g}' if self.rotation != 0 else ''
|
rotation = f' r°{numpy.rad2deg(self.rotation):g}' if self.rotation != 0 else ''
|
||||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||||
locked = ' L' if self.locked else ''
|
locked = ' L' if self.locked else ''
|
||||||
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>'
|
return f'<Arc l{self.layer} o{self.offset} r{self.radii}{angles} w{self.width:g}{rotation}{dose}{locked}>'
|
||||||
|
@ -127,9 +127,9 @@ class Circle(Shape, metaclass=AutoSlots):
|
|||||||
def normalized_form(self, norm_value) -> normalized_shape_tuple:
|
def normalized_form(self, norm_value) -> normalized_shape_tuple:
|
||||||
rotation = 0.0
|
rotation = 0.0
|
||||||
magnitude = self.radius / norm_value
|
magnitude = self.radius / norm_value
|
||||||
return (type(self), self.layer), \
|
return ((type(self), self.layer),
|
||||||
(self.offset, magnitude, rotation, False, self.dose), \
|
(self.offset, magnitude, rotation, False, self.dose),
|
||||||
lambda: Circle(radius=norm_value, layer=self.layer)
|
lambda: Circle(radius=norm_value, layer=self.layer))
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
dose = f' d{self.dose:g}' if self.dose != 1 else ''
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from typing import List, Tuple, Dict, Sequence, Optional
|
from typing import List, Dict, Sequence, Optional
|
||||||
import copy
|
import copy
|
||||||
import math
|
import math
|
||||||
|
|
||||||
@ -198,9 +198,9 @@ class Ellipse(Shape, metaclass=AutoSlots):
|
|||||||
radii = self.radii[::-1] / self.radius_y
|
radii = self.radii[::-1] / self.radius_y
|
||||||
scale = self.radius_y
|
scale = self.radius_y
|
||||||
angle = (self.rotation + pi / 2) % pi
|
angle = (self.rotation + pi / 2) % pi
|
||||||
return (type(self), radii, self.layer), \
|
return ((type(self), radii, self.layer),
|
||||||
(self.offset, scale/norm_value, angle, False, self.dose), \
|
(self.offset, scale / norm_value, angle, False, self.dose),
|
||||||
lambda: Ellipse(radii=radii*norm_value, layer=self.layer)
|
lambda: Ellipse(radii=radii * norm_value, layer=self.layer))
|
||||||
|
|
||||||
def lock(self) -> 'Ellipse':
|
def lock(self) -> 'Ellipse':
|
||||||
self.radii.flags.writeable = False
|
self.radii.flags.writeable = False
|
||||||
|
@ -18,7 +18,7 @@ class PathCap(Enum):
|
|||||||
Circle = 1 # Path extends past final vertices with a semicircle of radius width/2
|
Circle = 1 # Path extends past final vertices with a semicircle of radius width/2
|
||||||
Square = 2 # Path extends past final vertices with a width-by-width/2 rectangle
|
Square = 2 # Path extends past final vertices with a width-by-width/2 rectangle
|
||||||
SquareCustom = 4 # Path extends past final vertices with a rectangle of length
|
SquareCustom = 4 # Path extends past final vertices with a rectangle of length
|
||||||
# defined by path.cap_extensions
|
# # defined by path.cap_extensions
|
||||||
|
|
||||||
|
|
||||||
class Path(Shape, metaclass=AutoSlots):
|
class Path(Shape, metaclass=AutoSlots):
|
||||||
@ -199,7 +199,7 @@ class Path(Shape, metaclass=AutoSlots):
|
|||||||
def travel(travel_pairs: Tuple[Tuple[float, float]],
|
def travel(travel_pairs: Tuple[Tuple[float, float]],
|
||||||
width: float = 0.0,
|
width: float = 0.0,
|
||||||
cap: PathCap = PathCap.Flush,
|
cap: PathCap = PathCap.Flush,
|
||||||
cap_extensions = None,
|
cap_extensions: Optional[Tuple[float, float]] = None,
|
||||||
offset: vector2 = (0.0, 0.0),
|
offset: vector2 = (0.0, 0.0),
|
||||||
rotation: float = 0,
|
rotation: float = 0,
|
||||||
mirrored: Sequence[bool] = (False, False),
|
mirrored: Sequence[bool] = (False, False),
|
||||||
@ -370,10 +370,10 @@ class Path(Shape, metaclass=AutoSlots):
|
|||||||
|
|
||||||
width0 = self.width / norm_value
|
width0 = self.width / norm_value
|
||||||
|
|
||||||
return (type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer), \
|
return ((type(self), reordered_vertices.data.tobytes(), width0, self.cap, self.layer),
|
||||||
(offset, scale/norm_value, rotation, False, self.dose), \
|
(offset, scale / norm_value, rotation, False, self.dose),
|
||||||
lambda: Path(reordered_vertices * norm_value, width=self.width * norm_value,
|
lambda: Path(reordered_vertices * norm_value, width=self.width * norm_value,
|
||||||
cap=self.cap, layer=self.layer)
|
cap=self.cap, layer=self.layer))
|
||||||
|
|
||||||
def clean_vertices(self) -> 'Path':
|
def clean_vertices(self) -> 'Path':
|
||||||
"""
|
"""
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from typing import List, Tuple, Dict, Optional, Sequence
|
from typing import List, Dict, Optional, Sequence
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
@ -269,7 +269,6 @@ class Polygon(Shape, metaclass=AutoSlots):
|
|||||||
layer=layer, dose=dose)
|
layer=layer, dose=dose)
|
||||||
return poly
|
return poly
|
||||||
|
|
||||||
|
|
||||||
def to_polygons(self,
|
def to_polygons(self,
|
||||||
poly_num_points: int = None, # unused
|
poly_num_points: int = None, # unused
|
||||||
poly_max_arclen: float = None, # unused
|
poly_max_arclen: float = None, # unused
|
||||||
@ -316,9 +315,9 @@ class Polygon(Shape, metaclass=AutoSlots):
|
|||||||
|
|
||||||
# TODO: normalize mirroring?
|
# TODO: normalize mirroring?
|
||||||
|
|
||||||
return (type(self), reordered_vertices.data.tobytes(), self.layer), \
|
return ((type(self), reordered_vertices.data.tobytes(), self.layer),
|
||||||
(offset, scale/norm_value, rotation, False, self.dose), \
|
(offset, scale / norm_value, rotation, False, self.dose),
|
||||||
lambda: Polygon(reordered_vertices*norm_value, layer=self.layer)
|
lambda: Polygon(reordered_vertices * norm_value, layer=self.layer))
|
||||||
|
|
||||||
def clean_vertices(self) -> 'Polygon':
|
def clean_vertices(self) -> 'Polygon':
|
||||||
"""
|
"""
|
||||||
|
@ -1,11 +1,8 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING
|
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
|
||||||
from ..utils import rotation_matrix_2d, vector2, layer_t
|
|
||||||
from ..traits import (PositionableImpl, LayerableImpl, DoseableImpl,
|
from ..traits import (PositionableImpl, LayerableImpl, DoseableImpl,
|
||||||
Rotatable, Mirrorable, Copyable, Scalable,
|
Rotatable, Mirrorable, Copyable, Scalable,
|
||||||
PivotableImpl, LockableImpl, RepeatableImpl,
|
PivotableImpl, LockableImpl, RepeatableImpl,
|
||||||
@ -142,7 +139,6 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
|
|||||||
if err_xmax >= 0.5:
|
if err_xmax >= 0.5:
|
||||||
gxi_max += 1
|
gxi_max += 1
|
||||||
|
|
||||||
|
|
||||||
if abs(dv[0]) < 1e-20:
|
if abs(dv[0]) < 1e-20:
|
||||||
# Vertical line, don't calculate slope
|
# Vertical line, don't calculate slope
|
||||||
xi = [gxi_min, gxi_max - 1]
|
xi = [gxi_min, gxi_max - 1]
|
||||||
@ -156,7 +152,8 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
m = dv[1] / dv[0]
|
m = dv[1] / dv[0]
|
||||||
def get_grid_inds(xes):
|
|
||||||
|
def get_grid_inds(xes: numpy.ndarray) -> numpy.ndarray:
|
||||||
ys = m * (xes - v[0]) + v[1]
|
ys = m * (xes - v[0]) + v[1]
|
||||||
|
|
||||||
# (inds - 1) is the index of the y-grid line below the edge's intersection with the x-grid
|
# (inds - 1) is the index of the y-grid line below the edge's intersection with the x-grid
|
||||||
@ -202,7 +199,6 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
|
|||||||
|
|
||||||
return manhattan_polygons
|
return manhattan_polygons
|
||||||
|
|
||||||
|
|
||||||
def manhattanize(self,
|
def manhattanize(self,
|
||||||
grid_x: numpy.ndarray,
|
grid_x: numpy.ndarray,
|
||||||
grid_y: numpy.ndarray
|
grid_y: numpy.ndarray
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from typing import List, Tuple, Dict, Sequence, Optional, MutableSequence
|
from typing import List, Tuple, Dict, Sequence, Optional
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
@ -144,14 +144,14 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
|
|||||||
mirror_x, rotation = normalize_mirror(self.mirrored)
|
mirror_x, rotation = normalize_mirror(self.mirrored)
|
||||||
rotation += self.rotation
|
rotation += self.rotation
|
||||||
rotation %= 2 * pi
|
rotation %= 2 * pi
|
||||||
return (type(self), self.string, self.font_path, self.layer), \
|
return ((type(self), self.string, self.font_path, self.layer),
|
||||||
(self.offset, self.height / norm_value, rotation, mirror_x, self.dose), \
|
(self.offset, self.height / norm_value, rotation, mirror_x, self.dose),
|
||||||
lambda: Text(string=self.string,
|
lambda: Text(string=self.string,
|
||||||
height=self.height * norm_value,
|
height=self.height * norm_value,
|
||||||
font_path=self.font_path,
|
font_path=self.font_path,
|
||||||
rotation=rotation,
|
rotation=rotation,
|
||||||
mirrored=(mirror_x, False),
|
mirrored=(mirror_x, False),
|
||||||
layer=self.layer)
|
layer=self.layer))
|
||||||
|
|
||||||
def get_bounds(self) -> numpy.ndarray:
|
def get_bounds(self) -> numpy.ndarray:
|
||||||
# rotation makes this a huge pain when using slot.advance and glyph.bbox(), so
|
# rotation makes this a huge pain when using slot.advance and glyph.bbox(), so
|
||||||
|
@ -4,14 +4,14 @@
|
|||||||
"""
|
"""
|
||||||
#TODO more top-level documentation
|
#TODO more top-level documentation
|
||||||
|
|
||||||
from typing import Union, List, Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any
|
from typing import Dict, Tuple, Optional, Sequence, TYPE_CHECKING, Any
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
from numpy import pi
|
from numpy import pi
|
||||||
|
|
||||||
from .error import PatternError, PatternLockedError
|
from .error import PatternError
|
||||||
from .utils import is_scalar, rotation_matrix_2d, vector2, AutoSlots, annotations_t
|
from .utils import is_scalar, vector2, AutoSlots, annotations_t
|
||||||
from .repetition import Repetition
|
from .repetition import Repetition
|
||||||
from .traits import (PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl,
|
from .traits import (PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl,
|
||||||
Mirrorable, PivotableImpl, Copyable, LockableImpl, RepeatableImpl,
|
Mirrorable, PivotableImpl, Copyable, LockableImpl, RepeatableImpl,
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
from typing import TypeVar
|
from typing import TypeVar
|
||||||
from types import MappingProxyType
|
#from types import MappingProxyType
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
from ..utils import annotations_t
|
from ..utils import annotations_t
|
||||||
from ..error import PatternError
|
from ..error import PatternError
|
||||||
@ -44,10 +43,10 @@ class AnnotatableImpl(Annotatable, metaclass=ABCMeta):
|
|||||||
'''
|
'''
|
||||||
@property
|
@property
|
||||||
def annotations(self) -> annotations_t:
|
def annotations(self) -> annotations_t:
|
||||||
|
return self._annotations
|
||||||
# # TODO: Find a way to make sure the subclass implements Lockable without dealing with diamond inheritance or this extra hasattr
|
# # TODO: Find a way to make sure the subclass implements Lockable without dealing with diamond inheritance or this extra hasattr
|
||||||
# if hasattr(self, 'is_locked') and self.is_locked():
|
# if hasattr(self, 'is_locked') and self.is_locked():
|
||||||
# return MappingProxyType(self._annotations)
|
# return MappingProxyType(self._annotations)
|
||||||
return self._annotations
|
|
||||||
|
|
||||||
@annotations.setter
|
@annotations.setter
|
||||||
def annotations(self, annotations: annotations_t):
|
def annotations(self, annotations: annotations_t):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta
|
||||||
import copy
|
import copy
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
from ..error import PatternError
|
||||||
from ..utils import is_scalar
|
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T', bound='Doseable')
|
T = TypeVar('T', bound='Doseable')
|
||||||
@ -70,7 +68,6 @@ class DoseableImpl(Doseable, metaclass=ABCMeta):
|
|||||||
raise PatternError('Dose must be non-negative')
|
raise PatternError('Dose must be non-negative')
|
||||||
self._dose = val
|
self._dose = val
|
||||||
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
---- Non-abstract methods
|
---- Non-abstract methods
|
||||||
'''
|
'''
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
|
||||||
from ..utils import layer_t
|
from ..utils import layer_t
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
from ..error import PatternLockedError
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T', bound='Lockable')
|
T = TypeVar('T', bound='Lockable')
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T', bound='Mirrorable')
|
T = TypeVar('T', bound='Mirrorable')
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
# TODO top-level comment about how traits should set __slots__ = (), and how to use AutoSlots
|
# TODO top-level comment about how traits should set __slots__ = (), and how to use AutoSlots
|
||||||
|
|
||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
from ..error import PatternError
|
||||||
from ..utils import is_scalar, rotation_matrix_2d, vector2
|
from ..utils import vector2
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T', bound='Positionable')
|
T = TypeVar('T', bound='Positionable')
|
||||||
@ -101,7 +100,6 @@ class PositionableImpl(Positionable, metaclass=ABCMeta):
|
|||||||
raise PatternError('Offset must be convertible to size-2 ndarray')
|
raise PatternError('Offset must be convertible to size-2 ndarray')
|
||||||
self._offset = val.flatten()
|
self._offset = val.flatten()
|
||||||
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
---- Methods
|
---- Methods
|
||||||
'''
|
'''
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional, TYPE_CHECKING
|
from typing import TypeVar, Optional, TYPE_CHECKING
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
from ..error import PatternError
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
import numpy # type: ignore
|
import numpy # type: ignore
|
||||||
from numpy import pi
|
from numpy import pi
|
||||||
|
|
||||||
from .positionable import Positionable
|
#from .positionable import Positionable
|
||||||
from ..error import PatternError, PatternLockedError
|
from ..error import PatternError
|
||||||
from ..utils import is_scalar, rotation_matrix_2d, vector2
|
from ..utils import is_scalar, rotation_matrix_2d, vector2
|
||||||
|
|
||||||
T = TypeVar('T', bound='Rotatable')
|
T = TypeVar('T', bound='Rotatable')
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
from typing import List, Tuple, Callable, TypeVar, Optional
|
from typing import TypeVar
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
import copy
|
|
||||||
|
|
||||||
from ..error import PatternError, PatternLockedError
|
from ..error import PatternError
|
||||||
from ..utils import is_scalar
|
from ..utils import is_scalar
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user