wip again
This commit is contained in:
parent
58894fa596
commit
1741cfb755
@ -33,8 +33,8 @@ from .label import Label
|
||||
from .subpattern import SubPattern
|
||||
from .pattern import Pattern
|
||||
from .utils import layer_t, annotations_t
|
||||
from .library import Library, PatternGenerator
|
||||
from .builder import DeviceLibrary, LibDeviceLibrary, Device, Port
|
||||
from .library import Library, MutableLibrary, WrapROLibrary, WrapLibrary, LazyLibrary
|
||||
from .builder import LazyDeviceLibrary, LibDeviceLibrary, Device, DeviceRef, Port, PortList
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
@ -1,4 +1,4 @@
|
||||
from .devices import Port, Device
|
||||
from .devices import Port, PortList, Device, DeviceRef
|
||||
from .utils import ell
|
||||
from .tools import Tool
|
||||
from .device_library import DeviceLibrary, LibDeviceLibrary
|
||||
from .device_library import LazyDeviceLibrary, LibDeviceLibrary
|
||||
|
@ -10,110 +10,15 @@ from abc import ABCMeta, abstractmethod
|
||||
|
||||
from ..error import DeviceLibraryError
|
||||
from ..library import Library
|
||||
from ..builder import Device
|
||||
from ..builder import Device, DeviceRef
|
||||
from .. import Pattern
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
D = TypeVar('D', bound='DeviceLibrary')
|
||||
L = TypeVar('L', bound='LibDeviceLibrary')
|
||||
|
||||
|
||||
class DeviceLibrary(Mapping[str, Device], metaclass=ABCMeta):
|
||||
# inherited abstract functions
|
||||
#def __getitem__(self, key: str) -> Device:
|
||||
#def __iter__(self) -> Iterator[str]:
|
||||
#def __len__(self) -> int:
|
||||
|
||||
#__contains__, keys, items, values, get, __eq__, __ne__ supplied by Mapping
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return '<DeviceLibrary with keys ' + repr(list(self.keys())) + '>'
|
||||
|
||||
@abstractmethod
|
||||
def get_name(
|
||||
self,
|
||||
name: str = '__',
|
||||
sanitize: bool = True,
|
||||
max_length: int = 32,
|
||||
quiet: bool = False,
|
||||
) -> str:
|
||||
"""
|
||||
Find a unique name for the device.
|
||||
|
||||
This function may be overridden in a subclass or monkey-patched to fit the caller's requirements.
|
||||
|
||||
Args:
|
||||
name: Preferred name for the pattern. Default '__'.
|
||||
sanitize: Allows only alphanumeric charaters and _?$. Replaces invalid characters with underscores.
|
||||
max_length: Names longer than this will be truncated.
|
||||
quiet: If `True`, suppress log messages.
|
||||
|
||||
Returns:
|
||||
Name, unique within this library.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class MutableDeviceLibrary(DeviceLibrary, metaclass=ABCMeta):
|
||||
# inherited abstract functions
|
||||
#def __getitem__(self, key: str) -> 'Device':
|
||||
#def __iter__(self) -> Iterator[str]:
|
||||
#def __len__(self) -> int:
|
||||
|
||||
@abstractmethod
|
||||
def __setitem__(self, key: str, value: VVV) -> None: #TODO
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def __delitem__(self, key: str) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _set(self, key: str, value: Device) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _merge(self: MDL, other: DL, key: str) -> None:
|
||||
pass
|
||||
|
||||
def add(
|
||||
self: MDL,
|
||||
other: DL,
|
||||
use_ours: Callable[[str], bool] = lambda name: False,
|
||||
use_theirs: Callable[[str], bool] = lambda name: False,
|
||||
) -> MDL:
|
||||
"""
|
||||
Add keys from another library into this one.
|
||||
|
||||
There must be no conflicting keys.
|
||||
|
||||
Args:
|
||||
other: The library to insert keys from
|
||||
use_ours: Decision function for name conflicts. Will be called with duplicate cell names.
|
||||
Should return `True` if the value from `self` should be used.
|
||||
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
|
||||
Should return `True` if the value from `other` should be used.
|
||||
`use_ours` takes priority over `use_theirs`.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
duplicates = set(self.keys()) & set(other.keys())
|
||||
keep_ours = set(name for name in duplicates if use_ours(name))
|
||||
keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
|
||||
conflicts = duplicates - keep_ours - keep_theirs
|
||||
if conflicts:
|
||||
raise DeviceLibraryError('Duplicate keys encountered in DeviceLibrary merge: '
|
||||
+ pformat(conflicts))
|
||||
|
||||
for name in set(other.keys()) - keep_ours:
|
||||
self._merge(other, name)
|
||||
return self
|
||||
|
||||
|
||||
DL = TypeVar('DL', bound='LazyDeviceLibrary')
|
||||
LDL = TypeVar('LDL', bound='LibDeviceLibrary')
|
||||
|
||||
|
||||
class LazyDeviceLibrary:
|
||||
@ -147,15 +52,9 @@ class LazyDeviceLibrary:
|
||||
if key in self.cache:
|
||||
del self.cache[key]
|
||||
|
||||
def __getitem__(self, key: str) -> Device:
|
||||
if self.enable_cache and key in self.cache:
|
||||
logger.debug(f'found {key} in cache')
|
||||
return self.cache[key]
|
||||
|
||||
logger.debug(f'loading {key}')
|
||||
dev = self.generators[key]()
|
||||
self.cache[key] = dev
|
||||
return dev
|
||||
def __getitem__(self, key: str) -> DeviceRef:
|
||||
dev = self.get_device(key)
|
||||
return DeviceRef(name=key, ports=dev.ports)
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return iter(self.keys())
|
||||
@ -163,16 +62,16 @@ class LazyDeviceLibrary:
|
||||
def __repr__(self) -> str:
|
||||
return '<LazyDeviceLibrary with keys ' + repr(list(self.generators.keys())) + '>'
|
||||
|
||||
def _set(self, key: str, value: Device) -> None:
|
||||
self[key] = lambda: value
|
||||
def get_device(self, key: str) -> Device:
|
||||
if self.enable_cache and key in self.cache:
|
||||
logger.debug(f'found {key} in cache')
|
||||
dev = self.cache[key]
|
||||
return dev
|
||||
|
||||
def _merge(self: MDL, other: DL, key: str) -> None:
|
||||
if type(self) is type(other):
|
||||
self.generators[name] = other.generators[name]
|
||||
if name in other.cache:
|
||||
self.cache[name] = other.cache[name]
|
||||
else:
|
||||
self._set(key, other[name])
|
||||
logger.debug(f'loading {key}')
|
||||
dev = self.generators[key]()
|
||||
self.cache[key] = dev
|
||||
return dev
|
||||
|
||||
def clear_cache(self: LDL) -> LDL:
|
||||
"""
|
||||
@ -254,10 +153,46 @@ class LazyDeviceLibrary:
|
||||
|
||||
self[name] = build_wrapped_dev
|
||||
|
||||
|
||||
class LibDeviceLibrary(DeviceLibrary):
|
||||
def add(
|
||||
self: DL,
|
||||
other: DL2,
|
||||
use_ours: Callable[[str], bool] = lambda name: False,
|
||||
use_theirs: Callable[[str], bool] = lambda name: False,
|
||||
) -> DL:
|
||||
"""
|
||||
Extends `DeviceLibrary`, enabling it to ingest `Library` objects
|
||||
Add keys from another library into this one.
|
||||
|
||||
There must be no conflicting keys.
|
||||
|
||||
Args:
|
||||
other: The library to insert keys from
|
||||
use_ours: Decision function for name conflicts. Will be called with duplicate cell names.
|
||||
Should return `True` if the value from `self` should be used.
|
||||
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
|
||||
Should return `True` if the value from `other` should be used.
|
||||
`use_ours` takes priority over `use_theirs`.
|
||||
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
duplicates = set(self.keys()) & set(other.keys())
|
||||
keep_ours = set(name for name in duplicates if use_ours(name))
|
||||
keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
|
||||
conflicts = duplicates - keep_ours - keep_theirs
|
||||
if conflicts:
|
||||
raise DeviceLibraryError('Duplicate keys encountered in DeviceLibrary merge: '
|
||||
+ pformat(conflicts))
|
||||
|
||||
for name in set(other.keys()) - keep_ours:
|
||||
self.generators[name] = other.generators[name]
|
||||
if name in other.cache:
|
||||
self.cache[name] = other.cache[name]
|
||||
return self
|
||||
|
||||
|
||||
class LibDeviceLibrary(LazyDeviceLibrary):
|
||||
"""
|
||||
Extends `LazyDeviceLibrary`, enabling it to ingest `Library` objects
|
||||
(e.g. obtained by loading a GDS file).
|
||||
|
||||
Each `Library` object must be accompanied by a `pat2dev` function,
|
||||
@ -269,11 +204,11 @@ class LibDeviceLibrary(DeviceLibrary):
|
||||
`Library` which is kept in sync with the `DeviceLibrary` when
|
||||
devices are removed (or new libraries added via `add_library()`).
|
||||
"""
|
||||
underlying: Library
|
||||
underlying: LazyLibrary
|
||||
|
||||
def __init__(self) -> None:
|
||||
DeviceLibrary.__init__(self)
|
||||
self.underlying = Library()
|
||||
LazyDeviceLibrary.__init__(self)
|
||||
self.underlying = LazyLibrary()
|
||||
|
||||
def __setitem__(self, key: str, value: Callable[[], Device]) -> None:
|
||||
self.generators[key] = value
|
||||
@ -286,25 +221,24 @@ class LibDeviceLibrary(DeviceLibrary):
|
||||
# wrapped device. To avoid that, we need to set ourselves as the "true" source of
|
||||
# the `Pattern` named `key`.
|
||||
if key in self.underlying:
|
||||
raise DeviceLibraryError(f'Device name {key} already exists in underlying Library!'
|
||||
' Demote or delete it first.')
|
||||
raise DeviceLibraryError(f'Device name {key} already exists in underlying Library!')
|
||||
|
||||
# NOTE that this means the `Device` may be cached without the `Pattern` being in
|
||||
# the `underlying` cache yet!
|
||||
self.underlying.set_value(key, '__DeviceLibrary', lambda: self[key].pattern)
|
||||
self.underlying[key] = lambda: self.get_device(key).pattern
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
DeviceLibrary.__delitem__(self, key)
|
||||
LazyDeviceLibrary.__delitem__(self, key)
|
||||
if key in self.underlying:
|
||||
del self.underlying[key]
|
||||
|
||||
def add_library(
|
||||
self: L,
|
||||
lib: Library,
|
||||
self: LDL,
|
||||
lib: Mapping[str, Pattern],
|
||||
pat2dev: Callable[[Pattern], Device],
|
||||
use_ours: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
|
||||
use_theirs: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
|
||||
) -> L:
|
||||
) -> LDL:
|
||||
"""
|
||||
Add a pattern `Library` into this `LibDeviceLibrary`.
|
||||
|
||||
@ -335,12 +269,11 @@ class LibDeviceLibrary(DeviceLibrary):
|
||||
if bad_duplicates:
|
||||
raise DeviceLibraryError('Duplicate devices (no action specified): ' + pformat(bad_duplicates))
|
||||
|
||||
# No 'bad' duplicates, so all duplicates should be overwritten
|
||||
for name in keep_theirs:
|
||||
self.underlying.demote(name)
|
||||
|
||||
self.underlying.add(lib, use_ours, use_theirs)
|
||||
|
||||
for name in lib:
|
||||
self.generators[name] = lambda name=name: pat2dev(self.underlying[name])
|
||||
def gen(name=name):
|
||||
return pat2dev(self.underlying[name])
|
||||
|
||||
self.generators[name] = gen
|
||||
return self
|
||||
|
@ -5,6 +5,7 @@ import warnings
|
||||
import traceback
|
||||
import logging
|
||||
from collections import Counter
|
||||
from abc import ABCMeta
|
||||
|
||||
import numpy
|
||||
from numpy import pi
|
||||
@ -23,10 +24,10 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
P = TypeVar('P', bound='Port')
|
||||
D = TypeVar('D', bound='Device')
|
||||
DR = TypeVar('DR', bound='DeviceRef')
|
||||
PL = TypeVar('PL', bound='PortList')
|
||||
PL2 = TypeVar('PL2', bound='PortList')
|
||||
D = TypeVar('D', bound='Device')
|
||||
DR = TypeVar('DR', bound='DeviceRef')
|
||||
|
||||
|
||||
class Port(PositionableImpl, Rotatable, PivotableImpl, Copyable, Mirrorable, metaclass=AutoSlots):
|
||||
@ -399,7 +400,7 @@ class PortList(Copyable, Mirrorable, metaclass=ABCMeta):
|
||||
|
||||
|
||||
class DeviceRef(PortList):
|
||||
__slots__ = ('name', 'ports')
|
||||
__slots__ = ('name',)
|
||||
|
||||
name: str
|
||||
""" Name of the pattern this device references """
|
||||
@ -425,7 +426,7 @@ class DeviceRef(PortList):
|
||||
"""
|
||||
pat = Pattern()
|
||||
pat.addsp(self.name)
|
||||
new = Device(pat, ports=self.ports, tools=self.tools)
|
||||
new = Device(pat, ports=self.ports, tools=self.tools) # TODO should DeviceRef have tools?
|
||||
return new
|
||||
|
||||
# TODO do we want to store a SubPattern instead of just a name? then we can translate/rotate/mirror...
|
||||
@ -491,7 +492,7 @@ class Device(PortList):
|
||||
renamed to 'gnd' so that further routing can use this signal or net name
|
||||
rather than the port name on the original `pad` device.
|
||||
"""
|
||||
__slots__ = ('pattern', 'ports', 'tools', '_dead')
|
||||
__slots__ = ('pattern', 'tools', '_dead')
|
||||
|
||||
pattern: Pattern
|
||||
""" Layout of this device """
|
||||
|
@ -5,7 +5,7 @@ Functions for writing port data into a Pattern (`dev2pat`) and retrieving it (`p
|
||||
the port locations. This particular approach is just a sensible default; feel free to
|
||||
to write equivalent functions for your own format or alternate storage methods.
|
||||
"""
|
||||
from typing import Sequence
|
||||
from typing import Sequence, Optional, Mapping
|
||||
import logging
|
||||
|
||||
import numpy
|
||||
@ -50,6 +50,7 @@ def dev2pat(device: Device, layer: layer_t) -> Pattern:
|
||||
def pat2dev(
|
||||
pattern: Pattern,
|
||||
layers: Sequence[layer_t],
|
||||
library: Optional[Mapping[str, Pattern]] = None,
|
||||
max_depth: int = 999_999,
|
||||
skip_subcells: bool = True,
|
||||
) -> Device:
|
||||
@ -102,11 +103,11 @@ def pat2dev(
|
||||
angle = numpy.deg2rad(angle_deg) * mirr_factor[0] * mirr_factor[1] + transform[2]
|
||||
|
||||
if name in ports:
|
||||
logger.info(f'Duplicate port {name} in pattern {pattern.name}')
|
||||
logger.info(f'Duplicate port {name} in pattern {pattern.name}') # TODO DFS should include name?
|
||||
|
||||
ports[name] = Port(offset=xy_global, rotation=angle, ptype=ptype)
|
||||
|
||||
return pat
|
||||
|
||||
pattern.dfs(visit_before=find_ports_each, transform=True)
|
||||
pattern.dfs(visit_before=find_ports_each, transform=True) #TODO: don't check Library if there are ports in top level
|
||||
return Device(pattern, ports)
|
||||
|
@ -35,7 +35,6 @@ def write(
|
||||
*,
|
||||
modify_originals: bool = False,
|
||||
dxf_version='AC1024',
|
||||
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes
|
||||
@ -73,20 +72,15 @@ def write(
|
||||
WARNING: No additional error checking is performed on the results.
|
||||
"""
|
||||
#TODO consider supporting DXF arcs?
|
||||
if disambiguate_func is None:
|
||||
disambiguate_func = lambda pats: disambiguate_pattern_names(pats)
|
||||
assert(disambiguate_func is not None)
|
||||
|
||||
#TODO name checking
|
||||
bad_keys = check_valid_names(library.keys())
|
||||
|
||||
if not modify_originals:
|
||||
library = library.deepcopy()
|
||||
|
||||
pattern = library[top_name]
|
||||
|
||||
old_names = list(library.keys())
|
||||
new_names = disambiguate_func(old_names)
|
||||
renamed_lib = {new_name: library[old_name]
|
||||
for old_name, new_name in zip(old_names, new_names)}
|
||||
|
||||
# Create library
|
||||
lib = ezdxf.new(dxf_version, setup=True)
|
||||
msp = lib.modelspace()
|
||||
@ -95,7 +89,7 @@ def write(
|
||||
_subpatterns_to_refs(msp, pattern.subpatterns)
|
||||
|
||||
# Now create a block for each referenced pattern, and add in any shapes
|
||||
for name, pat in renamed_lib.items():
|
||||
for name, pat in library.items():
|
||||
assert(pat is not None)
|
||||
block = lib.blocks.new(name=name)
|
||||
|
||||
@ -388,10 +382,6 @@ def disambiguate_pattern_names(
|
||||
|
||||
if sanitized_name == '':
|
||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||
elif suffixed_name != sanitized_name:
|
||||
if dup_warn_filter is None or dup_warn_filter(name):
|
||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
||||
+ f' renaming to "{suffixed_name}"')
|
||||
|
||||
if len(suffixed_name) == 0:
|
||||
# Should never happen since zero-length names are replaced
|
||||
|
@ -19,7 +19,7 @@ Notes:
|
||||
* Creation/modification/access times are set to 1900-01-01 for reproducibility.
|
||||
"""
|
||||
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
|
||||
from typing import Sequence, BinaryIO
|
||||
from typing import Sequence, BinaryIO, Mapping
|
||||
import re
|
||||
import io
|
||||
import mmap
|
||||
@ -40,7 +40,8 @@ from .. import Pattern, SubPattern, PatternError, Label, Shape
|
||||
from ..shapes import Polygon, Path
|
||||
from ..repetition import Grid
|
||||
from ..utils import layer_t, normalize_mirror, annotations_t
|
||||
from ..library import Library
|
||||
from ..library import LazyLibrary, WrapLibrary, MutableLibrary
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -65,7 +66,6 @@ def write(
|
||||
library_name: str = 'masque-klamath',
|
||||
*,
|
||||
modify_originals: bool = False,
|
||||
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Convert a library to a GDSII stream, mapping data as follows:
|
||||
@ -100,24 +100,22 @@ def write(
|
||||
modify_originals: If `True`, the original pattern is modified as part of the writing
|
||||
process. Otherwise, a copy is made.
|
||||
Default `False`.
|
||||
disambiguate_func: Function which takes a list of pattern names and returns a list of names
|
||||
altered to be valid and unique. Default is `disambiguate_pattern_names`, which
|
||||
attempts to adhere to the GDSII standard reasonably well.
|
||||
WARNING: No additional error checking is performed on the results.
|
||||
"""
|
||||
if disambiguate_func is None:
|
||||
disambiguate_func = disambiguate_pattern_names
|
||||
# TODO check name errors
|
||||
bad_keys = check_valid_names(library.keys())
|
||||
|
||||
# TODO check all hierarchy present
|
||||
|
||||
if not modify_originals:
|
||||
library = copy.deepcopy(library)
|
||||
library = library.deepcopy() #TODO figure out best approach e.g. if lazy
|
||||
|
||||
for p in library.values():
|
||||
library.add(p.wrap_repeated_shapes())
|
||||
if not isinstance(library, MutableLibrary):
|
||||
if isinstance(library, dict):
|
||||
library = WrapLibrary(library)
|
||||
else:
|
||||
library = WrapLibrary(dict(library))
|
||||
|
||||
old_names = list(library.keys())
|
||||
new_names = disambiguate_func(old_names)
|
||||
renamed_lib = {new_name: library[old_name]
|
||||
for old_name, new_name in zip(old_names, new_names)}
|
||||
library.wrap_repeated_shapes()
|
||||
|
||||
# Create library
|
||||
header = klamath.library.FileHeader(
|
||||
@ -128,7 +126,7 @@ def write(
|
||||
header.write(stream)
|
||||
|
||||
# Now create a structure for each pattern, and add in any Boundary and SREF elements
|
||||
for name, pat in renamed_lib.items():
|
||||
for name, pat in library.items():
|
||||
elements: List[klamath.elements.Element] = []
|
||||
elements += _shapes_to_elements(pat.shapes)
|
||||
elements += _labels_to_texts(pat.labels)
|
||||
@ -162,7 +160,7 @@ def writefile(
|
||||
open_func = open
|
||||
|
||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||
write(patterns, stream, *args, **kwargs)
|
||||
write(library, stream, *args, **kwargs)
|
||||
|
||||
|
||||
def readfile(
|
||||
@ -310,7 +308,7 @@ def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern:
|
||||
a_count=a_count, b_count=b_count)
|
||||
|
||||
subpat = SubPattern(
|
||||
pattern=ref.struct_name.decode('ASCII'),
|
||||
target=ref.struct_name.decode('ASCII'),
|
||||
offset=offset,
|
||||
rotation=numpy.deg2rad(ref.angle_deg),
|
||||
scale=ref.mag,
|
||||
@ -547,10 +545,6 @@ def disambiguate_pattern_names(
|
||||
|
||||
if sanitized_name == '':
|
||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||
elif suffixed_name != sanitized_name:
|
||||
if dup_warn_filter is None or dup_warn_filter(name):
|
||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
||||
+ f' renaming to "{suffixed_name}"')
|
||||
|
||||
# Encode into a byte-string and perform some final checks
|
||||
encoded_name = suffixed_name.encode('ASCII')
|
||||
@ -569,7 +563,7 @@ def load_library(
|
||||
stream: BinaryIO,
|
||||
*,
|
||||
full_load: bool = False,
|
||||
) -> Tuple[Library, Dict[str, Any]]:
|
||||
) -> Tuple[LazyLibrary, Dict[str, Any]]:
|
||||
"""
|
||||
Scan a GDSII stream to determine what structures are present, and create
|
||||
a library from them. This enables deferred reading of structures
|
||||
@ -586,11 +580,11 @@ def load_library(
|
||||
will be faster than using the resulting library's `precache` method.
|
||||
|
||||
Returns:
|
||||
Library object, allowing for deferred load of structures.
|
||||
LazyLibrary object, allowing for deferred load of structures.
|
||||
Additional library info (dict, same format as from `read`).
|
||||
"""
|
||||
stream.seek(0)
|
||||
lib = Library()
|
||||
lib = LazyLibrary()
|
||||
|
||||
if full_load:
|
||||
# Full load approach (immediately load everything)
|
||||
@ -620,7 +614,7 @@ def load_libraryfile(
|
||||
*,
|
||||
use_mmap: bool = True,
|
||||
full_load: bool = False,
|
||||
) -> Tuple[Library, Dict[str, Any]]:
|
||||
) -> Tuple[LazyLibrary, Dict[str, Any]]:
|
||||
"""
|
||||
Wrapper for `load_library()` that takes a filename or path instead of a stream.
|
||||
|
||||
@ -638,7 +632,7 @@ def load_libraryfile(
|
||||
full_load: If `True`, immediately loads all data. See `load_library`.
|
||||
|
||||
Returns:
|
||||
Library object, allowing for deferred load of structures.
|
||||
LazyLibrary object, allowing for deferred load of structures.
|
||||
Additional library info (dict, same format as from `read`).
|
||||
"""
|
||||
path = pathlib.Path(filename)
|
||||
|
@ -29,6 +29,7 @@ from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringR
|
||||
|
||||
from .utils import is_gzipped
|
||||
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
||||
from ..library import WrapLibrary, MutableLibrary
|
||||
from ..shapes import Polygon, Path, Circle
|
||||
from ..repetition import Grid, Arbitrary, Repetition
|
||||
from ..utils import layer_t, normalize_mirror, annotations_t
|
||||
@ -57,7 +58,6 @@ def build(
|
||||
units_per_micron: int,
|
||||
layer_map: Optional[Dict[str, Union[int, Tuple[int, int]]]] = None,
|
||||
*,
|
||||
disambiguate_func: Optional[Callable[[Iterable[str]], List[str]]] = None,
|
||||
annotations: Optional[annotations_t] = None,
|
||||
) -> fatamorgana.OasisLayout:
|
||||
"""
|
||||
@ -90,15 +90,22 @@ def build(
|
||||
into numbers, omit this argument, and manually generate the required
|
||||
`fatamorgana.records.LayerName` entries.
|
||||
Default is an empty dict (no names provided).
|
||||
disambiguate_func: Function which takes a list of pattern names and returns a list of names
|
||||
altered to be valid and unique. Default is `disambiguate_pattern_names`.
|
||||
annotations: dictionary of key-value pairs which are saved as library-level properties
|
||||
|
||||
Returns:
|
||||
`fatamorgana.OasisLayout`
|
||||
"""
|
||||
if isinstance(patterns, Pattern):
|
||||
patterns = [patterns]
|
||||
|
||||
# TODO check names
|
||||
bad_keys = check_valid_names(library.keys())
|
||||
|
||||
# TODO check all hierarchy present
|
||||
|
||||
if not isinstance(library, MutableLibrary):
|
||||
if isinstance(library, dict):
|
||||
library = WrapLibrary(library)
|
||||
else:
|
||||
library = WrapLibrary(dict(library))
|
||||
|
||||
if layer_map is None:
|
||||
layer_map = {}
|
||||
@ -132,13 +139,8 @@ def build(
|
||||
else:
|
||||
layer2oas = _mlayer2oas
|
||||
|
||||
old_names = list(library.keys())
|
||||
new_names = disambiguate_func(old_names)
|
||||
renamed_lib = {new_name: library[old_name]
|
||||
for old_name, new_name in zip(old_names, new_names)}
|
||||
|
||||
# Now create a structure for each pattern
|
||||
for name, pat in renamed_lib.items():
|
||||
for name, pat in library.items():
|
||||
structure = fatamorgana.Cell(name=name)
|
||||
lib.cells.append(structure)
|
||||
|
||||
@ -152,7 +154,7 @@ def build(
|
||||
|
||||
|
||||
def write(
|
||||
patterns: Union[Sequence[Pattern], Pattern],
|
||||
library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
|
||||
stream: io.BufferedIOBase,
|
||||
*args,
|
||||
**kwargs,
|
||||
@ -162,17 +164,17 @@ def write(
|
||||
for details.
|
||||
|
||||
Args:
|
||||
patterns: A Pattern or list of patterns to write to file.
|
||||
library: A {name: Pattern} mapping of patterns to write.
|
||||
stream: Stream to write to.
|
||||
*args: passed to `oasis.build()`
|
||||
**kwargs: passed to `oasis.build()`
|
||||
"""
|
||||
lib = build(patterns, *args, **kwargs)
|
||||
lib = build(library, *args, **kwargs)
|
||||
lib.write(stream)
|
||||
|
||||
|
||||
def writefile(
|
||||
patterns: Union[Sequence[Pattern], Pattern],
|
||||
library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
|
||||
filename: Union[str, pathlib.Path],
|
||||
*args,
|
||||
**kwargs,
|
||||
@ -183,7 +185,7 @@ def writefile(
|
||||
Will automatically compress the file if it has a .gz suffix.
|
||||
|
||||
Args:
|
||||
patterns: `Pattern` or list of patterns to save
|
||||
library: A {name: Pattern} mapping of patterns to write.
|
||||
filename: Filename to save to.
|
||||
*args: passed to `oasis.write`
|
||||
**kwargs: passed to `oasis.write`
|
||||
@ -195,7 +197,7 @@ def writefile(
|
||||
open_func = open
|
||||
|
||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||
write(patterns, stream, *args, **kwargs)
|
||||
write(library, stream, *args, **kwargs)
|
||||
|
||||
|
||||
def readfile(
|
||||
@ -278,11 +280,13 @@ def read(
|
||||
if isinstance(element, fatrec.Polygon):
|
||||
vertices = numpy.cumsum(numpy.vstack(((0, 0), element.get_point_list())), axis=0)
|
||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||
poly = Polygon(vertices=vertices,
|
||||
poly = Polygon(
|
||||
vertices=vertices,
|
||||
layer=element.get_layer_tuple(),
|
||||
offset=element.get_xy(),
|
||||
annotations=annotations,
|
||||
repetition=repetition)
|
||||
repetition=repetition,
|
||||
)
|
||||
|
||||
pat.shapes.append(poly)
|
||||
|
||||
@ -301,14 +305,16 @@ def read(
|
||||
element.get_extension_end()[1]))
|
||||
|
||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||
path = Path(vertices=vertices,
|
||||
path = Path(
|
||||
vertices=vertices,
|
||||
layer=element.get_layer_tuple(),
|
||||
offset=element.get_xy(),
|
||||
repetition=repetition,
|
||||
annotations=annotations,
|
||||
width=element.get_half_width() * 2,
|
||||
cap=cap,
|
||||
**path_args)
|
||||
**path_args,
|
||||
)
|
||||
|
||||
pat.shapes.append(path)
|
||||
|
||||
@ -316,7 +322,8 @@ def read(
|
||||
width = element.get_width()
|
||||
height = element.get_height()
|
||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||
rect = Polygon(layer=element.get_layer_tuple(),
|
||||
rect = Polygon(
|
||||
layer=element.get_layer_tuple(),
|
||||
offset=element.get_xy(),
|
||||
repetition=repetition,
|
||||
vertices=numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (width, height),
|
||||
@ -405,7 +412,8 @@ def read(
|
||||
vertices[0, 1] += width
|
||||
|
||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||
ctrapz = Polygon(layer=element.get_layer_tuple(),
|
||||
ctrapz = Polygon(
|
||||
layer=element.get_layer_tuple(),
|
||||
offset=element.get_xy(),
|
||||
repetition=repetition,
|
||||
vertices=vertices,
|
||||
@ -415,11 +423,13 @@ def read(
|
||||
|
||||
elif isinstance(element, fatrec.Circle):
|
||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||
circle = Circle(layer=element.get_layer_tuple(),
|
||||
circle = Circle(
|
||||
layer=element.get_layer_tuple(),
|
||||
offset=element.get_xy(),
|
||||
repetition=repetition,
|
||||
annotations=annotations,
|
||||
radius=float(element.get_radius()))
|
||||
radius=float(element.get_radius()),
|
||||
)
|
||||
pat.shapes.append(circle)
|
||||
|
||||
elif isinstance(element, fatrec.Text):
|
||||
@ -429,11 +439,13 @@ def read(
|
||||
string = lib.textstrings[str_or_ref].string
|
||||
else:
|
||||
string = str_or_ref.string
|
||||
label = Label(layer=element.get_layer_tuple(),
|
||||
label = Label(
|
||||
layer=element.get_layer_tuple(),
|
||||
offset=element.get_xy(),
|
||||
repetition=repetition,
|
||||
annotations=annotations,
|
||||
string=string)
|
||||
string=string,
|
||||
)
|
||||
pat.labels.append(label)
|
||||
|
||||
else:
|
||||
@ -443,7 +455,7 @@ def read(
|
||||
for placement in cell.placements:
|
||||
pat.subpatterns.append(_placement_to_subpat(placement, lib))
|
||||
|
||||
patterns_dict[name] = pat
|
||||
patterns_dict[cell_name] = pat
|
||||
|
||||
return patterns_dict, library_info
|
||||
|
||||
@ -513,7 +525,8 @@ def _subpatterns_to_placements(
|
||||
properties=annotations_to_properties(subpat.annotations),
|
||||
x=offset[0],
|
||||
y=offset[1],
|
||||
repetition=frep)
|
||||
repetition=frep,
|
||||
)
|
||||
|
||||
refs.append(ref)
|
||||
return refs
|
||||
@ -602,7 +615,6 @@ def _labels_to_texts(
|
||||
|
||||
def disambiguate_pattern_names(
|
||||
names: Iterable[str],
|
||||
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
|
||||
) -> List[str]:
|
||||
new_names = []
|
||||
for name in names:
|
||||
@ -618,10 +630,6 @@ def disambiguate_pattern_names(
|
||||
|
||||
if sanitized_name == '':
|
||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||
elif suffixed_name != sanitized_name:
|
||||
if dup_warn_filter is None or dup_warn_filter(name):
|
||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
||||
+ f' renaming to "{suffixed_name}"')
|
||||
|
||||
if len(suffixed_name) == 0:
|
||||
# Should never happen since zero-length names are replaced
|
||||
|
@ -65,7 +65,6 @@ def build(
|
||||
library_name: str = 'masque-gdsii-write',
|
||||
*,
|
||||
modify_originals: bool = False,
|
||||
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
|
||||
) -> gdsii.library.Library:
|
||||
"""
|
||||
Convert a `Pattern` or list of patterns to a GDSII stream, by first calling
|
||||
@ -97,22 +96,20 @@ def build(
|
||||
modify_originals: If `True`, the original pattern is modified as part of the writing
|
||||
process. Otherwise, a copy is made.
|
||||
Default `False`.
|
||||
disambiguate_func: Function which takes a list of pattern names and returns a list of names
|
||||
altered to be valid and unique. Default is `disambiguate_pattern_names`, which
|
||||
attempts to adhere to the GDSII standard reasonably well.
|
||||
WARNING: No additional error checking is performed on the results.
|
||||
|
||||
Returns:
|
||||
`gdsii.library.Library`
|
||||
"""
|
||||
if disambiguate_func is None:
|
||||
disambiguate_func = disambiguate_pattern_names
|
||||
# TODO check name errors
|
||||
bad_keys = check_valid_names(library.keys())
|
||||
|
||||
# TODO check all hierarchy present
|
||||
|
||||
|
||||
if not modify_originals:
|
||||
library = copy.deepcopy(library)
|
||||
library = library.deepcopy() #TODO figure out best approach e.g. if lazy
|
||||
|
||||
for p in library.values():
|
||||
library.add(p.wrap_repeated_shapes())
|
||||
library.wrap_repeated_shapes()
|
||||
|
||||
old_names = list(library.keys())
|
||||
new_names = disambiguate_func(old_names)
|
||||
@ -181,7 +178,7 @@ def writefile(
|
||||
open_func = open
|
||||
|
||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||
write(patterns, stream, *args, **kwargs)
|
||||
write(library, stream, *args, **kwargs)
|
||||
|
||||
|
||||
def readfile(
|
||||
@ -248,7 +245,7 @@ def read(
|
||||
patterns_dict = {}
|
||||
for structure in lib:
|
||||
pat = Pattern()
|
||||
name=structure.name.decode('ASCII')
|
||||
name = structure.name.decode('ASCII')
|
||||
for element in structure:
|
||||
# Switch based on element type:
|
||||
if isinstance(element, gdsii.elements.Boundary):
|
||||
@ -260,9 +257,11 @@ def read(
|
||||
pat.shapes.append(path)
|
||||
|
||||
elif isinstance(element, gdsii.elements.Text):
|
||||
label = Label(offset=element.xy.astype(float),
|
||||
label = Label(
|
||||
offset=element.xy.astype(float),
|
||||
layer=(element.layer, element.text_type),
|
||||
string=element.string.decode('ASCII'))
|
||||
string=element.string.decode('ASCII'),
|
||||
)
|
||||
pat.labels.append(label)
|
||||
|
||||
elif isinstance(element, (gdsii.elements.SRef, gdsii.elements.ARef)):
|
||||
@ -296,7 +295,7 @@ def _ref_to_subpat(
|
||||
gdsii.elements.ARef]
|
||||
) -> SubPattern:
|
||||
"""
|
||||
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.target to struct_name.
|
||||
Helper function to create a SubPattern from an SREF or AREF. Sets `subpat.target` to `element.struct_name`.
|
||||
|
||||
NOTE: "Absolute" means not affected by parent elements.
|
||||
That's not currently supported by masque at all (and not planned).
|
||||
@ -330,13 +329,15 @@ def _ref_to_subpat(
|
||||
repetition = Grid(a_vector=a_vector, b_vector=b_vector,
|
||||
a_count=a_count, b_count=b_count)
|
||||
|
||||
subpat = SubPattern(pattern=None,
|
||||
subpat = SubPattern(
|
||||
target=element.struct_name,
|
||||
offset=offset,
|
||||
rotation=rotation,
|
||||
scale=scale,
|
||||
mirrored=(mirror_across_x, False),
|
||||
annotations=_properties_to_annotations(element.properties),
|
||||
repetition=repetition)
|
||||
repetition=repetition,
|
||||
)
|
||||
return subpat
|
||||
|
||||
|
||||
@ -346,7 +347,8 @@ def _gpath_to_mpath(element: gdsii.elements.Path, raw_mode: bool) -> Path:
|
||||
else:
|
||||
raise PatternError(f'Unrecognized path type: {element.path_type}')
|
||||
|
||||
args = {'vertices': element.xy.astype(float),
|
||||
args = {
|
||||
'vertices': element.xy.astype(float),
|
||||
'layer': (element.layer, element.data_type),
|
||||
'width': element.width if element.width is not None else 0.0,
|
||||
'cap': cap,
|
||||
@ -511,7 +513,6 @@ def disambiguate_pattern_names(
|
||||
names: Iterable[str],
|
||||
max_name_length: int = 32,
|
||||
suffix_length: int = 6,
|
||||
dup_warn_filter: Optional[Callable[[str], bool]] = None,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Args:
|
||||
@ -519,9 +520,6 @@ def disambiguate_pattern_names(
|
||||
max_name_length: Names longer than this will be truncated
|
||||
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
|
||||
leave room for a suffix if one is necessary.
|
||||
dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives
|
||||
the cell name and returns `False` if the warning should be suppressed and `True` if it should
|
||||
be displayed. Default displays all warnings.
|
||||
"""
|
||||
new_names = []
|
||||
for name in names:
|
||||
@ -547,10 +545,6 @@ def disambiguate_pattern_names(
|
||||
|
||||
if sanitized_name == '':
|
||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||
elif suffixed_name != sanitized_name:
|
||||
if dup_warn_filter is None or dup_warn_filter(name):
|
||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
||||
+ f' renaming to "{suffixed_name}"')
|
||||
|
||||
# Encode into a byte-string and perform some final checks
|
||||
encoded_name = suffixed_name.encode('ASCII')
|
||||
|
@ -114,7 +114,7 @@ def writefile_inverted(
|
||||
pattern = library[top]
|
||||
|
||||
# Polygonize and flatten pattern
|
||||
pattern.polygonize().flatten()
|
||||
pattern.polygonize().flatten(library)
|
||||
|
||||
bounds = pattern.get_bounds(library=library)
|
||||
if bounds is None:
|
||||
|
@ -5,11 +5,16 @@ from typing import Set, Tuple, List, Iterable, Mapping
|
||||
import re
|
||||
import copy
|
||||
import pathlib
|
||||
import logging
|
||||
|
||||
from .. import Pattern, PatternError
|
||||
from ..library import Library, WrapROLibrary
|
||||
from ..shapes import Polygon, Path
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def mangle_name(name: str, dose_multiplier: float = 1.0) -> str:
|
||||
"""
|
||||
Create a new name using `name` and the `dose_multiplier`.
|
||||
@ -58,7 +63,7 @@ def make_dose_table(
|
||||
top_names: Iterable[str],
|
||||
library: Mapping[str, Pattern],
|
||||
dose_multiplier: float = 1.0,
|
||||
) -> Set[Tuple[int, float]]:
|
||||
) -> Set[Tuple[str, float]]:
|
||||
"""
|
||||
Create a set containing `(name, written_dose)` for each pattern (including subpatterns)
|
||||
|
||||
@ -104,7 +109,7 @@ def dtype2dose(pattern: Pattern) -> Pattern:
|
||||
|
||||
|
||||
def dose2dtype(
|
||||
library: List[Pattern],
|
||||
library: Mapping[str, Pattern],
|
||||
) -> Tuple[List[Pattern], List[float]]:
|
||||
"""
|
||||
For each shape in each pattern, set shape.layer to the tuple
|
||||
@ -128,6 +133,10 @@ def dose2dtype(
|
||||
and dose (float, list entry).
|
||||
"""
|
||||
logger.warning('TODO: dose2dtype() needs to be tested!')
|
||||
|
||||
if not isinstance(library, Library):
|
||||
library = WrapROLibrary(library)
|
||||
|
||||
# Get a table of (id(pat), written_dose) for each pattern and subpattern
|
||||
sd_table = make_dose_table(library.find_topcells(), library)
|
||||
|
||||
@ -161,8 +170,8 @@ def dose2dtype(
|
||||
|
||||
pat = old_pat.deepcopy()
|
||||
|
||||
if len(encoded_name) == 0:
|
||||
raise PatternError('Zero-length name after mangle+encode, originally "{name}"'.format(pat.name))
|
||||
if len(mangled_name) == 0:
|
||||
raise PatternError(f'Zero-length name after mangle, originally "{name}"')
|
||||
|
||||
for shape in pat.shapes:
|
||||
data_type = dose_vals_list.index(shape.dose * pat_dose)
|
||||
|
@ -67,7 +67,7 @@ class Label(PositionableImpl, LayerableImpl, RepeatableImpl, AnnotatableImpl,
|
||||
identifier=self.identifier,
|
||||
)
|
||||
|
||||
def __deepcopy__(self: L, memo: Dict = None) -> L:
|
||||
def __deepcopy__(self: L, memo: Optional[Dict] = None) -> L:
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
|
@ -31,7 +31,7 @@ logger = logging.getLogger(__name__)
|
||||
visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, NDArray[numpy.float64]], 'Pattern']
|
||||
L = TypeVar('L', bound='Library')
|
||||
ML = TypeVar('ML', bound='MutableLibrary')
|
||||
LL = TypeVar('LL', bound='LazyLibrary')
|
||||
#LL = TypeVar('LL', bound='LazyLibrary')
|
||||
|
||||
|
||||
class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
||||
@ -81,9 +81,9 @@ class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
||||
|
||||
# TODO maybe not for immutable?
|
||||
def subtree(
|
||||
self: L,
|
||||
self,
|
||||
tops: Union[str, Sequence[str]],
|
||||
) -> ML:
|
||||
) -> WrapLibrary:
|
||||
"""
|
||||
Return a new `Library`, containing only the specified patterns and the patterns they
|
||||
reference (recursively).
|
||||
@ -143,9 +143,9 @@ class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
||||
return self
|
||||
|
||||
def flatten(
|
||||
self: L,
|
||||
self,
|
||||
tops: Union[str, Sequence[str]],
|
||||
) -> Dict[str, Pattern]:
|
||||
) -> Dict[str, 'Pattern']:
|
||||
"""
|
||||
Removes all subpatterns and adds equivalent shapes.
|
||||
Also flattens all subpatterns.
|
||||
@ -159,7 +159,7 @@ class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
||||
if isinstance(tops, str):
|
||||
tops = (tops,)
|
||||
|
||||
flattened: Dict[str, Optional[Pattern]] = {}
|
||||
flattened: Dict[str, Optional['Pattern']] = {}
|
||||
|
||||
def flatten_single(name) -> None:
|
||||
flattened[name] = None
|
||||
@ -266,16 +266,16 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _set(self, key: str, value: Pattern) -> None:
|
||||
def _set(self, key: str, value: 'Pattern') -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def _merge(self: ML, other: ML, key: str) -> None:
|
||||
def _merge(self, other: Mapping[str, 'Pattern'], key: str) -> None:
|
||||
pass
|
||||
|
||||
def add(
|
||||
self: ML,
|
||||
other: L,
|
||||
other: Mapping[str, 'Pattern'],
|
||||
use_ours: Callable[[str], bool] = lambda name: False,
|
||||
use_theirs: Callable[[str], bool] = lambda name: False,
|
||||
) -> ML:
|
||||
@ -309,8 +309,8 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
||||
def dfs(
|
||||
self: ML,
|
||||
top: str,
|
||||
visit_before: visitor_function_t = None,
|
||||
visit_after: visitor_function_t = None,
|
||||
visit_before: Optional[visitor_function_t] = None,
|
||||
visit_after: Optional[visitor_function_t] = None,
|
||||
transform: Union[ArrayLike, bool, None] = False,
|
||||
memo: Optional[Dict] = None,
|
||||
hierarchy: Tuple[str, ...] = (),
|
||||
@ -431,7 +431,9 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
||||
self
|
||||
"""
|
||||
# This currently simplifies globally (same shape in different patterns is
|
||||
# merged into the same subpattern target.
|
||||
# merged into the same subpattern target).
|
||||
|
||||
from .pattern import Pattern
|
||||
|
||||
if exclude_types is None:
|
||||
exclude_types = ()
|
||||
@ -517,6 +519,8 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
from .pattern import Pattern
|
||||
|
||||
if name_func is None:
|
||||
name_func = lambda _pat, _shape: self.get_name('_rep')
|
||||
|
||||
@ -569,11 +573,11 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
||||
|
||||
|
||||
class WrapROLibrary(Library):
|
||||
mapping: Mapping[str, Pattern]
|
||||
mapping: Mapping[str, 'Pattern']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mapping: Mapping[str, Pattern],
|
||||
mapping: Mapping[str, 'Pattern'],
|
||||
) -> None:
|
||||
self.mapping = mapping
|
||||
|
||||
@ -591,11 +595,11 @@ class WrapROLibrary(Library):
|
||||
|
||||
|
||||
class WrapLibrary(MutableLibrary):
|
||||
mapping: MutableMapping[str, Pattern]
|
||||
mapping: MutableMapping[str, 'Pattern']
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
mapping: MutableMapping[str, Pattern],
|
||||
mapping: MutableMapping[str, 'Pattern'],
|
||||
) -> None:
|
||||
self.mapping = mapping
|
||||
|
||||
@ -608,16 +612,16 @@ class WrapLibrary(MutableLibrary):
|
||||
def __len__(self) -> int:
|
||||
return len(self.mapping)
|
||||
|
||||
def __setitem__(self, key: str, value: Pattern) -> None:
|
||||
def __setitem__(self, key: str, value: 'Pattern') -> None:
|
||||
self.mapping[key] = value
|
||||
|
||||
def __delitem__(self, key: str) -> None:
|
||||
del self.mapping[key]
|
||||
|
||||
def _set(self, key: str, value: Pattern) -> None:
|
||||
def _set(self, key: str, value: 'Pattern') -> None:
|
||||
self[key] = value
|
||||
|
||||
def _merge(self: ML, other: L, key: str) -> None:
|
||||
def _merge(self, other: Mapping[str, 'Pattern'], key: str) -> None:
|
||||
self[key] = other[key]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
@ -631,7 +635,7 @@ class LazyLibrary(MutableLibrary):
|
||||
|
||||
The cache can be disabled by setting the `enable_cache` attribute to `False`.
|
||||
"""
|
||||
dict: Dict[str, Callable[[], Pattern]]
|
||||
dict: Dict[str, Callable[[], 'Pattern']]
|
||||
cache: Dict[str, 'Pattern']
|
||||
enable_cache: bool = True
|
||||
|
||||
@ -639,7 +643,7 @@ class LazyLibrary(MutableLibrary):
|
||||
self.dict = {}
|
||||
self.cache = {}
|
||||
|
||||
def __setitem__(self, key: str, value: Callable[[], Pattern]) -> None:
|
||||
def __setitem__(self, key: str, value: Callable[[], 'Pattern']) -> None:
|
||||
self.dict[key] = value
|
||||
if key in self.cache:
|
||||
del self.cache[key]
|
||||
@ -666,11 +670,11 @@ class LazyLibrary(MutableLibrary):
|
||||
def __len__(self) -> int:
|
||||
return len(self.dict)
|
||||
|
||||
def _set(self, key: str, value: Pattern) -> None:
|
||||
def _set(self, key: str, value: 'Pattern') -> None:
|
||||
self[key] = lambda: value
|
||||
|
||||
def _merge(self: LL, other: L, key: str) -> None:
|
||||
if type(self) is type(other):
|
||||
def _merge(self, other: Mapping[str, 'Pattern'], key: str) -> None:
|
||||
if isinstance(other, LazyLibrary):
|
||||
self.dict[key] = other.dict[key]
|
||||
if key in other.cache:
|
||||
self.cache[key] = other.cache[key]
|
||||
@ -703,5 +707,5 @@ class LazyLibrary(MutableLibrary):
|
||||
self.cache.clear()
|
||||
return self
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'LazyLibrary':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'LazyLibrary':
|
||||
raise LibraryError('LazyLibrary cannot be deepcopied (deepcopy doesn\'t descend into closures)')
|
||||
|
@ -80,7 +80,7 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
||||
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
|
||||
def __copy__(self, memo: Dict = None) -> 'Pattern':
|
||||
def __copy__(self) -> 'Pattern':
|
||||
return Pattern(
|
||||
shapes=copy.deepcopy(self.shapes),
|
||||
labels=copy.deepcopy(self.labels),
|
||||
@ -88,7 +88,7 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
||||
annotations=copy.deepcopy(self.annotations),
|
||||
)
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Pattern':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Pattern':
|
||||
memo = {} if memo is None else memo
|
||||
new = Pattern(
|
||||
shapes=copy.deepcopy(self.shapes, memo),
|
||||
@ -116,9 +116,9 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
||||
|
||||
def subset(
|
||||
self,
|
||||
shapes: Callable[[Shape], bool] = None,
|
||||
labels: Callable[[Label], bool] = None,
|
||||
subpatterns: Callable[[SubPattern], bool] = None,
|
||||
shapes: Optional[Callable[[Shape], bool]] = None,
|
||||
labels: Optional[Callable[[Label], bool]] = None,
|
||||
subpatterns: Optional[Callable[[SubPattern], bool]] = None,
|
||||
) -> 'Pattern':
|
||||
"""
|
||||
Returns a Pattern containing only the entities (e.g. shapes) for which the
|
||||
@ -295,6 +295,7 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
||||
Returns:
|
||||
self
|
||||
"""
|
||||
entry: Scalable
|
||||
for entry in chain(self.shapes, self.subpatterns):
|
||||
entry.scale_by(c)
|
||||
return self
|
||||
|
@ -133,7 +133,7 @@ class Grid(Repetition, metaclass=AutoSlots):
|
||||
)
|
||||
return new
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Grid':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Grid':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
return new
|
||||
@ -239,7 +239,7 @@ class Grid(Repetition, metaclass=AutoSlots):
|
||||
a_extent = self.a_vector * self.a_count
|
||||
b_extent = self.b_vector * self.b_count if (self.b_vector is not None) else 0 # type: Union[NDArray[numpy.float64], float]
|
||||
|
||||
corners = ((0, 0), a_extent, b_extent, a_extent + b_extent)
|
||||
corners = numpy.stack(((0, 0), a_extent, b_extent, a_extent + b_extent))
|
||||
xy_min = numpy.min(corners, axis=0)
|
||||
xy_max = numpy.max(corners, axis=0)
|
||||
return numpy.array((xy_min, xy_max))
|
||||
|
@ -195,7 +195,7 @@ class Arc(Shape, metaclass=AutoSlots):
|
||||
self.poly_max_arclen = poly_max_arclen
|
||||
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Arc':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Arc':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
|
@ -74,7 +74,7 @@ class Circle(Shape, metaclass=AutoSlots):
|
||||
self.poly_num_points = poly_num_points
|
||||
self.poly_max_arclen = poly_max_arclen
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Circle':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Circle':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
|
@ -125,7 +125,7 @@ class Ellipse(Shape, metaclass=AutoSlots):
|
||||
self.poly_num_points = poly_num_points
|
||||
self.poly_max_arclen = poly_max_arclen
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Ellipse':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Ellipse':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
|
@ -185,7 +185,7 @@ class Path(Shape, metaclass=AutoSlots):
|
||||
self.rotate(rotation)
|
||||
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Path':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Path':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
@ -245,8 +245,8 @@ class Path(Shape, metaclass=AutoSlots):
|
||||
|
||||
def to_polygons(
|
||||
self,
|
||||
poly_num_points: int = None,
|
||||
poly_max_arclen: float = None,
|
||||
poly_num_points: Optional[int] = None,
|
||||
poly_max_arclen: Optional[float] = None,
|
||||
) -> List['Polygon']:
|
||||
extensions = self._calculate_cap_extensions()
|
||||
|
||||
|
@ -345,8 +345,8 @@ class Polygon(Shape, metaclass=AutoSlots):
|
||||
|
||||
def to_polygons(
|
||||
self,
|
||||
poly_num_points: int = None, # unused
|
||||
poly_max_arclen: float = None, # unused
|
||||
poly_num_points: Optional[int] = None, # unused
|
||||
poly_max_arclen: Optional[float] = None, # unused
|
||||
) -> List['Polygon']:
|
||||
return [copy.deepcopy(self)]
|
||||
|
||||
|
@ -176,7 +176,7 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
|
||||
return inds
|
||||
|
||||
# Find the y indices on all x gridlines
|
||||
xs = gx[gxi_min:gxi_max]
|
||||
xs = gx[int(gxi_min):int(gxi_max)]
|
||||
inds = get_grid_inds(xs)
|
||||
|
||||
# Find y-intersections for x-midpoints
|
||||
|
@ -100,7 +100,7 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
|
||||
self.annotations = annotations if annotations is not None else {}
|
||||
self.font_path = font_path
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'Text':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Text':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new._offset = self._offset.copy()
|
||||
|
@ -94,7 +94,7 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
|
||||
)
|
||||
return new
|
||||
|
||||
def __deepcopy__(self, memo: Dict = None) -> 'SubPattern':
|
||||
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'SubPattern':
|
||||
memo = {} if memo is None else memo
|
||||
new = copy.copy(self)
|
||||
new.repetition = copy.deepcopy(self.repetition, memo)
|
||||
|
@ -33,6 +33,7 @@ class Doseable(metaclass=ABCMeta):
|
||||
'''
|
||||
---- Methods
|
||||
'''
|
||||
@abstractmethod
|
||||
def set_dose(self: T, dose: float) -> T:
|
||||
"""
|
||||
Set the dose
|
||||
@ -63,7 +64,7 @@ class DoseableImpl(Doseable, metaclass=ABCMeta):
|
||||
return self._dose
|
||||
|
||||
@dose.setter
|
||||
def dose(self, val: float):
|
||||
def dose(self, val: float) -> None:
|
||||
if not val >= 0:
|
||||
raise MasqueError('Dose must be non-negative')
|
||||
self._dose = val
|
||||
|
@ -32,6 +32,7 @@ class Layerable(metaclass=ABCMeta):
|
||||
'''
|
||||
---- Methods
|
||||
'''
|
||||
@abstractmethod
|
||||
def set_layer(self: T, layer: layer_t) -> T:
|
||||
"""
|
||||
Set the layer
|
||||
|
@ -1,7 +1,7 @@
|
||||
from typing import TypeVar, Dict, Tuple, Any
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
from ..error import PatternLockedError
|
||||
#from ..error import PatternLockedError
|
||||
|
||||
|
||||
T = TypeVar('T', bound='Lockable')
|
||||
|
Loading…
Reference in New Issue
Block a user