wip again
This commit is contained in:
parent
58894fa596
commit
1741cfb755
@ -33,8 +33,8 @@ from .label import Label
|
|||||||
from .subpattern import SubPattern
|
from .subpattern import SubPattern
|
||||||
from .pattern import Pattern
|
from .pattern import Pattern
|
||||||
from .utils import layer_t, annotations_t
|
from .utils import layer_t, annotations_t
|
||||||
from .library import Library, PatternGenerator
|
from .library import Library, MutableLibrary, WrapROLibrary, WrapLibrary, LazyLibrary
|
||||||
from .builder import DeviceLibrary, LibDeviceLibrary, Device, Port
|
from .builder import LazyDeviceLibrary, LibDeviceLibrary, Device, DeviceRef, Port, PortList
|
||||||
|
|
||||||
|
|
||||||
__author__ = 'Jan Petykiewicz'
|
__author__ = 'Jan Petykiewicz'
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
from .devices import Port, Device
|
from .devices import Port, PortList, Device, DeviceRef
|
||||||
from .utils import ell
|
from .utils import ell
|
||||||
from .tools import Tool
|
from .tools import Tool
|
||||||
from .device_library import DeviceLibrary, LibDeviceLibrary
|
from .device_library import LazyDeviceLibrary, LibDeviceLibrary
|
||||||
|
@ -10,110 +10,15 @@ from abc import ABCMeta, abstractmethod
|
|||||||
|
|
||||||
from ..error import DeviceLibraryError
|
from ..error import DeviceLibraryError
|
||||||
from ..library import Library
|
from ..library import Library
|
||||||
from ..builder import Device
|
from ..builder import Device, DeviceRef
|
||||||
from .. import Pattern
|
from .. import Pattern
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
D = TypeVar('D', bound='DeviceLibrary')
|
DL = TypeVar('DL', bound='LazyDeviceLibrary')
|
||||||
L = TypeVar('L', bound='LibDeviceLibrary')
|
LDL = TypeVar('LDL', bound='LibDeviceLibrary')
|
||||||
|
|
||||||
|
|
||||||
class DeviceLibrary(Mapping[str, Device], metaclass=ABCMeta):
|
|
||||||
# inherited abstract functions
|
|
||||||
#def __getitem__(self, key: str) -> Device:
|
|
||||||
#def __iter__(self) -> Iterator[str]:
|
|
||||||
#def __len__(self) -> int:
|
|
||||||
|
|
||||||
#__contains__, keys, items, values, get, __eq__, __ne__ supplied by Mapping
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
return '<DeviceLibrary with keys ' + repr(list(self.keys())) + '>'
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def get_name(
|
|
||||||
self,
|
|
||||||
name: str = '__',
|
|
||||||
sanitize: bool = True,
|
|
||||||
max_length: int = 32,
|
|
||||||
quiet: bool = False,
|
|
||||||
) -> str:
|
|
||||||
"""
|
|
||||||
Find a unique name for the device.
|
|
||||||
|
|
||||||
This function may be overridden in a subclass or monkey-patched to fit the caller's requirements.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: Preferred name for the pattern. Default '__'.
|
|
||||||
sanitize: Allows only alphanumeric charaters and _?$. Replaces invalid characters with underscores.
|
|
||||||
max_length: Names longer than this will be truncated.
|
|
||||||
quiet: If `True`, suppress log messages.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Name, unique within this library.
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class MutableDeviceLibrary(DeviceLibrary, metaclass=ABCMeta):
|
|
||||||
# inherited abstract functions
|
|
||||||
#def __getitem__(self, key: str) -> 'Device':
|
|
||||||
#def __iter__(self) -> Iterator[str]:
|
|
||||||
#def __len__(self) -> int:
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __setitem__(self, key: str, value: VVV) -> None: #TODO
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def __delitem__(self, key: str) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def _set(self, key: str, value: Device) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def _merge(self: MDL, other: DL, key: str) -> None:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def add(
|
|
||||||
self: MDL,
|
|
||||||
other: DL,
|
|
||||||
use_ours: Callable[[str], bool] = lambda name: False,
|
|
||||||
use_theirs: Callable[[str], bool] = lambda name: False,
|
|
||||||
) -> MDL:
|
|
||||||
"""
|
|
||||||
Add keys from another library into this one.
|
|
||||||
|
|
||||||
There must be no conflicting keys.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
other: The library to insert keys from
|
|
||||||
use_ours: Decision function for name conflicts. Will be called with duplicate cell names.
|
|
||||||
Should return `True` if the value from `self` should be used.
|
|
||||||
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
|
|
||||||
Should return `True` if the value from `other` should be used.
|
|
||||||
`use_ours` takes priority over `use_theirs`.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
self
|
|
||||||
"""
|
|
||||||
duplicates = set(self.keys()) & set(other.keys())
|
|
||||||
keep_ours = set(name for name in duplicates if use_ours(name))
|
|
||||||
keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
|
|
||||||
conflicts = duplicates - keep_ours - keep_theirs
|
|
||||||
if conflicts:
|
|
||||||
raise DeviceLibraryError('Duplicate keys encountered in DeviceLibrary merge: '
|
|
||||||
+ pformat(conflicts))
|
|
||||||
|
|
||||||
for name in set(other.keys()) - keep_ours:
|
|
||||||
self._merge(other, name)
|
|
||||||
return self
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class LazyDeviceLibrary:
|
class LazyDeviceLibrary:
|
||||||
@ -147,15 +52,9 @@ class LazyDeviceLibrary:
|
|||||||
if key in self.cache:
|
if key in self.cache:
|
||||||
del self.cache[key]
|
del self.cache[key]
|
||||||
|
|
||||||
def __getitem__(self, key: str) -> Device:
|
def __getitem__(self, key: str) -> DeviceRef:
|
||||||
if self.enable_cache and key in self.cache:
|
dev = self.get_device(key)
|
||||||
logger.debug(f'found {key} in cache')
|
return DeviceRef(name=key, ports=dev.ports)
|
||||||
return self.cache[key]
|
|
||||||
|
|
||||||
logger.debug(f'loading {key}')
|
|
||||||
dev = self.generators[key]()
|
|
||||||
self.cache[key] = dev
|
|
||||||
return dev
|
|
||||||
|
|
||||||
def __iter__(self) -> Iterator[str]:
|
def __iter__(self) -> Iterator[str]:
|
||||||
return iter(self.keys())
|
return iter(self.keys())
|
||||||
@ -163,16 +62,16 @@ class LazyDeviceLibrary:
|
|||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return '<LazyDeviceLibrary with keys ' + repr(list(self.generators.keys())) + '>'
|
return '<LazyDeviceLibrary with keys ' + repr(list(self.generators.keys())) + '>'
|
||||||
|
|
||||||
def _set(self, key: str, value: Device) -> None:
|
def get_device(self, key: str) -> Device:
|
||||||
self[key] = lambda: value
|
if self.enable_cache and key in self.cache:
|
||||||
|
logger.debug(f'found {key} in cache')
|
||||||
|
dev = self.cache[key]
|
||||||
|
return dev
|
||||||
|
|
||||||
def _merge(self: MDL, other: DL, key: str) -> None:
|
logger.debug(f'loading {key}')
|
||||||
if type(self) is type(other):
|
dev = self.generators[key]()
|
||||||
self.generators[name] = other.generators[name]
|
self.cache[key] = dev
|
||||||
if name in other.cache:
|
return dev
|
||||||
self.cache[name] = other.cache[name]
|
|
||||||
else:
|
|
||||||
self._set(key, other[name])
|
|
||||||
|
|
||||||
def clear_cache(self: LDL) -> LDL:
|
def clear_cache(self: LDL) -> LDL:
|
||||||
"""
|
"""
|
||||||
@ -254,10 +153,46 @@ class LazyDeviceLibrary:
|
|||||||
|
|
||||||
self[name] = build_wrapped_dev
|
self[name] = build_wrapped_dev
|
||||||
|
|
||||||
|
def add(
|
||||||
|
self: DL,
|
||||||
|
other: DL2,
|
||||||
|
use_ours: Callable[[str], bool] = lambda name: False,
|
||||||
|
use_theirs: Callable[[str], bool] = lambda name: False,
|
||||||
|
) -> DL:
|
||||||
|
"""
|
||||||
|
Add keys from another library into this one.
|
||||||
|
|
||||||
class LibDeviceLibrary(DeviceLibrary):
|
There must be no conflicting keys.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
other: The library to insert keys from
|
||||||
|
use_ours: Decision function for name conflicts. Will be called with duplicate cell names.
|
||||||
|
Should return `True` if the value from `self` should be used.
|
||||||
|
use_theirs: Decision function for name conflicts. Same format as `use_ours`.
|
||||||
|
Should return `True` if the value from `other` should be used.
|
||||||
|
`use_ours` takes priority over `use_theirs`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
self
|
||||||
|
"""
|
||||||
|
duplicates = set(self.keys()) & set(other.keys())
|
||||||
|
keep_ours = set(name for name in duplicates if use_ours(name))
|
||||||
|
keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
|
||||||
|
conflicts = duplicates - keep_ours - keep_theirs
|
||||||
|
if conflicts:
|
||||||
|
raise DeviceLibraryError('Duplicate keys encountered in DeviceLibrary merge: '
|
||||||
|
+ pformat(conflicts))
|
||||||
|
|
||||||
|
for name in set(other.keys()) - keep_ours:
|
||||||
|
self.generators[name] = other.generators[name]
|
||||||
|
if name in other.cache:
|
||||||
|
self.cache[name] = other.cache[name]
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class LibDeviceLibrary(LazyDeviceLibrary):
|
||||||
"""
|
"""
|
||||||
Extends `DeviceLibrary`, enabling it to ingest `Library` objects
|
Extends `LazyDeviceLibrary`, enabling it to ingest `Library` objects
|
||||||
(e.g. obtained by loading a GDS file).
|
(e.g. obtained by loading a GDS file).
|
||||||
|
|
||||||
Each `Library` object must be accompanied by a `pat2dev` function,
|
Each `Library` object must be accompanied by a `pat2dev` function,
|
||||||
@ -269,11 +204,11 @@ class LibDeviceLibrary(DeviceLibrary):
|
|||||||
`Library` which is kept in sync with the `DeviceLibrary` when
|
`Library` which is kept in sync with the `DeviceLibrary` when
|
||||||
devices are removed (or new libraries added via `add_library()`).
|
devices are removed (or new libraries added via `add_library()`).
|
||||||
"""
|
"""
|
||||||
underlying: Library
|
underlying: LazyLibrary
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
DeviceLibrary.__init__(self)
|
LazyDeviceLibrary.__init__(self)
|
||||||
self.underlying = Library()
|
self.underlying = LazyLibrary()
|
||||||
|
|
||||||
def __setitem__(self, key: str, value: Callable[[], Device]) -> None:
|
def __setitem__(self, key: str, value: Callable[[], Device]) -> None:
|
||||||
self.generators[key] = value
|
self.generators[key] = value
|
||||||
@ -286,25 +221,24 @@ class LibDeviceLibrary(DeviceLibrary):
|
|||||||
# wrapped device. To avoid that, we need to set ourselves as the "true" source of
|
# wrapped device. To avoid that, we need to set ourselves as the "true" source of
|
||||||
# the `Pattern` named `key`.
|
# the `Pattern` named `key`.
|
||||||
if key in self.underlying:
|
if key in self.underlying:
|
||||||
raise DeviceLibraryError(f'Device name {key} already exists in underlying Library!'
|
raise DeviceLibraryError(f'Device name {key} already exists in underlying Library!')
|
||||||
' Demote or delete it first.')
|
|
||||||
|
|
||||||
# NOTE that this means the `Device` may be cached without the `Pattern` being in
|
# NOTE that this means the `Device` may be cached without the `Pattern` being in
|
||||||
# the `underlying` cache yet!
|
# the `underlying` cache yet!
|
||||||
self.underlying.set_value(key, '__DeviceLibrary', lambda: self[key].pattern)
|
self.underlying[key] = lambda: self.get_device(key).pattern
|
||||||
|
|
||||||
def __delitem__(self, key: str) -> None:
|
def __delitem__(self, key: str) -> None:
|
||||||
DeviceLibrary.__delitem__(self, key)
|
LazyDeviceLibrary.__delitem__(self, key)
|
||||||
if key in self.underlying:
|
if key in self.underlying:
|
||||||
del self.underlying[key]
|
del self.underlying[key]
|
||||||
|
|
||||||
def add_library(
|
def add_library(
|
||||||
self: L,
|
self: LDL,
|
||||||
lib: Library,
|
lib: Mapping[str, Pattern],
|
||||||
pat2dev: Callable[[Pattern], Device],
|
pat2dev: Callable[[Pattern], Device],
|
||||||
use_ours: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
|
use_ours: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
|
||||||
use_theirs: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
|
use_theirs: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False,
|
||||||
) -> L:
|
) -> LDL:
|
||||||
"""
|
"""
|
||||||
Add a pattern `Library` into this `LibDeviceLibrary`.
|
Add a pattern `Library` into this `LibDeviceLibrary`.
|
||||||
|
|
||||||
@ -335,12 +269,11 @@ class LibDeviceLibrary(DeviceLibrary):
|
|||||||
if bad_duplicates:
|
if bad_duplicates:
|
||||||
raise DeviceLibraryError('Duplicate devices (no action specified): ' + pformat(bad_duplicates))
|
raise DeviceLibraryError('Duplicate devices (no action specified): ' + pformat(bad_duplicates))
|
||||||
|
|
||||||
# No 'bad' duplicates, so all duplicates should be overwritten
|
|
||||||
for name in keep_theirs:
|
|
||||||
self.underlying.demote(name)
|
|
||||||
|
|
||||||
self.underlying.add(lib, use_ours, use_theirs)
|
self.underlying.add(lib, use_ours, use_theirs)
|
||||||
|
|
||||||
for name in lib:
|
for name in lib:
|
||||||
self.generators[name] = lambda name=name: pat2dev(self.underlying[name])
|
def gen(name=name):
|
||||||
|
return pat2dev(self.underlying[name])
|
||||||
|
|
||||||
|
self.generators[name] = gen
|
||||||
return self
|
return self
|
||||||
|
@ -5,6 +5,7 @@ import warnings
|
|||||||
import traceback
|
import traceback
|
||||||
import logging
|
import logging
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
from abc import ABCMeta
|
||||||
|
|
||||||
import numpy
|
import numpy
|
||||||
from numpy import pi
|
from numpy import pi
|
||||||
@ -23,10 +24,10 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
P = TypeVar('P', bound='Port')
|
P = TypeVar('P', bound='Port')
|
||||||
D = TypeVar('D', bound='Device')
|
|
||||||
DR = TypeVar('DR', bound='DeviceRef')
|
|
||||||
PL = TypeVar('PL', bound='PortList')
|
PL = TypeVar('PL', bound='PortList')
|
||||||
PL2 = TypeVar('PL2', bound='PortList')
|
PL2 = TypeVar('PL2', bound='PortList')
|
||||||
|
D = TypeVar('D', bound='Device')
|
||||||
|
DR = TypeVar('DR', bound='DeviceRef')
|
||||||
|
|
||||||
|
|
||||||
class Port(PositionableImpl, Rotatable, PivotableImpl, Copyable, Mirrorable, metaclass=AutoSlots):
|
class Port(PositionableImpl, Rotatable, PivotableImpl, Copyable, Mirrorable, metaclass=AutoSlots):
|
||||||
@ -399,7 +400,7 @@ class PortList(Copyable, Mirrorable, metaclass=ABCMeta):
|
|||||||
|
|
||||||
|
|
||||||
class DeviceRef(PortList):
|
class DeviceRef(PortList):
|
||||||
__slots__ = ('name', 'ports')
|
__slots__ = ('name',)
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
""" Name of the pattern this device references """
|
""" Name of the pattern this device references """
|
||||||
@ -425,7 +426,7 @@ class DeviceRef(PortList):
|
|||||||
"""
|
"""
|
||||||
pat = Pattern()
|
pat = Pattern()
|
||||||
pat.addsp(self.name)
|
pat.addsp(self.name)
|
||||||
new = Device(pat, ports=self.ports, tools=self.tools)
|
new = Device(pat, ports=self.ports, tools=self.tools) # TODO should DeviceRef have tools?
|
||||||
return new
|
return new
|
||||||
|
|
||||||
# TODO do we want to store a SubPattern instead of just a name? then we can translate/rotate/mirror...
|
# TODO do we want to store a SubPattern instead of just a name? then we can translate/rotate/mirror...
|
||||||
@ -491,7 +492,7 @@ class Device(PortList):
|
|||||||
renamed to 'gnd' so that further routing can use this signal or net name
|
renamed to 'gnd' so that further routing can use this signal or net name
|
||||||
rather than the port name on the original `pad` device.
|
rather than the port name on the original `pad` device.
|
||||||
"""
|
"""
|
||||||
__slots__ = ('pattern', 'ports', 'tools', '_dead')
|
__slots__ = ('pattern', 'tools', '_dead')
|
||||||
|
|
||||||
pattern: Pattern
|
pattern: Pattern
|
||||||
""" Layout of this device """
|
""" Layout of this device """
|
||||||
|
@ -5,7 +5,7 @@ Functions for writing port data into a Pattern (`dev2pat`) and retrieving it (`p
|
|||||||
the port locations. This particular approach is just a sensible default; feel free to
|
the port locations. This particular approach is just a sensible default; feel free to
|
||||||
to write equivalent functions for your own format or alternate storage methods.
|
to write equivalent functions for your own format or alternate storage methods.
|
||||||
"""
|
"""
|
||||||
from typing import Sequence
|
from typing import Sequence, Optional, Mapping
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import numpy
|
import numpy
|
||||||
@ -50,6 +50,7 @@ def dev2pat(device: Device, layer: layer_t) -> Pattern:
|
|||||||
def pat2dev(
|
def pat2dev(
|
||||||
pattern: Pattern,
|
pattern: Pattern,
|
||||||
layers: Sequence[layer_t],
|
layers: Sequence[layer_t],
|
||||||
|
library: Optional[Mapping[str, Pattern]] = None,
|
||||||
max_depth: int = 999_999,
|
max_depth: int = 999_999,
|
||||||
skip_subcells: bool = True,
|
skip_subcells: bool = True,
|
||||||
) -> Device:
|
) -> Device:
|
||||||
@ -102,11 +103,11 @@ def pat2dev(
|
|||||||
angle = numpy.deg2rad(angle_deg) * mirr_factor[0] * mirr_factor[1] + transform[2]
|
angle = numpy.deg2rad(angle_deg) * mirr_factor[0] * mirr_factor[1] + transform[2]
|
||||||
|
|
||||||
if name in ports:
|
if name in ports:
|
||||||
logger.info(f'Duplicate port {name} in pattern {pattern.name}')
|
logger.info(f'Duplicate port {name} in pattern {pattern.name}') # TODO DFS should include name?
|
||||||
|
|
||||||
ports[name] = Port(offset=xy_global, rotation=angle, ptype=ptype)
|
ports[name] = Port(offset=xy_global, rotation=angle, ptype=ptype)
|
||||||
|
|
||||||
return pat
|
return pat
|
||||||
|
|
||||||
pattern.dfs(visit_before=find_ports_each, transform=True)
|
pattern.dfs(visit_before=find_ports_each, transform=True) #TODO: don't check Library if there are ports in top level
|
||||||
return Device(pattern, ports)
|
return Device(pattern, ports)
|
||||||
|
@ -35,7 +35,6 @@ def write(
|
|||||||
*,
|
*,
|
||||||
modify_originals: bool = False,
|
modify_originals: bool = False,
|
||||||
dxf_version='AC1024',
|
dxf_version='AC1024',
|
||||||
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes
|
Write a `Pattern` to a DXF file, by first calling `.polygonize()` to change the shapes
|
||||||
@ -73,20 +72,15 @@ def write(
|
|||||||
WARNING: No additional error checking is performed on the results.
|
WARNING: No additional error checking is performed on the results.
|
||||||
"""
|
"""
|
||||||
#TODO consider supporting DXF arcs?
|
#TODO consider supporting DXF arcs?
|
||||||
if disambiguate_func is None:
|
|
||||||
disambiguate_func = lambda pats: disambiguate_pattern_names(pats)
|
#TODO name checking
|
||||||
assert(disambiguate_func is not None)
|
bad_keys = check_valid_names(library.keys())
|
||||||
|
|
||||||
if not modify_originals:
|
if not modify_originals:
|
||||||
library = library.deepcopy()
|
library = library.deepcopy()
|
||||||
|
|
||||||
pattern = library[top_name]
|
pattern = library[top_name]
|
||||||
|
|
||||||
old_names = list(library.keys())
|
|
||||||
new_names = disambiguate_func(old_names)
|
|
||||||
renamed_lib = {new_name: library[old_name]
|
|
||||||
for old_name, new_name in zip(old_names, new_names)}
|
|
||||||
|
|
||||||
# Create library
|
# Create library
|
||||||
lib = ezdxf.new(dxf_version, setup=True)
|
lib = ezdxf.new(dxf_version, setup=True)
|
||||||
msp = lib.modelspace()
|
msp = lib.modelspace()
|
||||||
@ -95,7 +89,7 @@ def write(
|
|||||||
_subpatterns_to_refs(msp, pattern.subpatterns)
|
_subpatterns_to_refs(msp, pattern.subpatterns)
|
||||||
|
|
||||||
# Now create a block for each referenced pattern, and add in any shapes
|
# Now create a block for each referenced pattern, and add in any shapes
|
||||||
for name, pat in renamed_lib.items():
|
for name, pat in library.items():
|
||||||
assert(pat is not None)
|
assert(pat is not None)
|
||||||
block = lib.blocks.new(name=name)
|
block = lib.blocks.new(name=name)
|
||||||
|
|
||||||
@ -388,10 +382,6 @@ def disambiguate_pattern_names(
|
|||||||
|
|
||||||
if sanitized_name == '':
|
if sanitized_name == '':
|
||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
|
||||||
if dup_warn_filter is None or dup_warn_filter(name):
|
|
||||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
|
||||||
+ f' renaming to "{suffixed_name}"')
|
|
||||||
|
|
||||||
if len(suffixed_name) == 0:
|
if len(suffixed_name) == 0:
|
||||||
# Should never happen since zero-length names are replaced
|
# Should never happen since zero-length names are replaced
|
||||||
|
@ -19,7 +19,7 @@ Notes:
|
|||||||
* Creation/modification/access times are set to 1900-01-01 for reproducibility.
|
* Creation/modification/access times are set to 1900-01-01 for reproducibility.
|
||||||
"""
|
"""
|
||||||
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
|
from typing import List, Any, Dict, Tuple, Callable, Union, Iterable, Optional
|
||||||
from typing import Sequence, BinaryIO
|
from typing import Sequence, BinaryIO, Mapping
|
||||||
import re
|
import re
|
||||||
import io
|
import io
|
||||||
import mmap
|
import mmap
|
||||||
@ -40,7 +40,8 @@ from .. import Pattern, SubPattern, PatternError, Label, Shape
|
|||||||
from ..shapes import Polygon, Path
|
from ..shapes import Polygon, Path
|
||||||
from ..repetition import Grid
|
from ..repetition import Grid
|
||||||
from ..utils import layer_t, normalize_mirror, annotations_t
|
from ..utils import layer_t, normalize_mirror, annotations_t
|
||||||
from ..library import Library
|
from ..library import LazyLibrary, WrapLibrary, MutableLibrary
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -65,7 +66,6 @@ def write(
|
|||||||
library_name: str = 'masque-klamath',
|
library_name: str = 'masque-klamath',
|
||||||
*,
|
*,
|
||||||
modify_originals: bool = False,
|
modify_originals: bool = False,
|
||||||
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Convert a library to a GDSII stream, mapping data as follows:
|
Convert a library to a GDSII stream, mapping data as follows:
|
||||||
@ -100,24 +100,22 @@ def write(
|
|||||||
modify_originals: If `True`, the original pattern is modified as part of the writing
|
modify_originals: If `True`, the original pattern is modified as part of the writing
|
||||||
process. Otherwise, a copy is made.
|
process. Otherwise, a copy is made.
|
||||||
Default `False`.
|
Default `False`.
|
||||||
disambiguate_func: Function which takes a list of pattern names and returns a list of names
|
|
||||||
altered to be valid and unique. Default is `disambiguate_pattern_names`, which
|
|
||||||
attempts to adhere to the GDSII standard reasonably well.
|
|
||||||
WARNING: No additional error checking is performed on the results.
|
|
||||||
"""
|
"""
|
||||||
if disambiguate_func is None:
|
# TODO check name errors
|
||||||
disambiguate_func = disambiguate_pattern_names
|
bad_keys = check_valid_names(library.keys())
|
||||||
|
|
||||||
|
# TODO check all hierarchy present
|
||||||
|
|
||||||
if not modify_originals:
|
if not modify_originals:
|
||||||
library = copy.deepcopy(library)
|
library = library.deepcopy() #TODO figure out best approach e.g. if lazy
|
||||||
|
|
||||||
for p in library.values():
|
if not isinstance(library, MutableLibrary):
|
||||||
library.add(p.wrap_repeated_shapes())
|
if isinstance(library, dict):
|
||||||
|
library = WrapLibrary(library)
|
||||||
|
else:
|
||||||
|
library = WrapLibrary(dict(library))
|
||||||
|
|
||||||
old_names = list(library.keys())
|
library.wrap_repeated_shapes()
|
||||||
new_names = disambiguate_func(old_names)
|
|
||||||
renamed_lib = {new_name: library[old_name]
|
|
||||||
for old_name, new_name in zip(old_names, new_names)}
|
|
||||||
|
|
||||||
# Create library
|
# Create library
|
||||||
header = klamath.library.FileHeader(
|
header = klamath.library.FileHeader(
|
||||||
@ -128,7 +126,7 @@ def write(
|
|||||||
header.write(stream)
|
header.write(stream)
|
||||||
|
|
||||||
# Now create a structure for each pattern, and add in any Boundary and SREF elements
|
# Now create a structure for each pattern, and add in any Boundary and SREF elements
|
||||||
for name, pat in renamed_lib.items():
|
for name, pat in library.items():
|
||||||
elements: List[klamath.elements.Element] = []
|
elements: List[klamath.elements.Element] = []
|
||||||
elements += _shapes_to_elements(pat.shapes)
|
elements += _shapes_to_elements(pat.shapes)
|
||||||
elements += _labels_to_texts(pat.labels)
|
elements += _labels_to_texts(pat.labels)
|
||||||
@ -162,7 +160,7 @@ def writefile(
|
|||||||
open_func = open
|
open_func = open
|
||||||
|
|
||||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||||
write(patterns, stream, *args, **kwargs)
|
write(library, stream, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def readfile(
|
def readfile(
|
||||||
@ -310,7 +308,7 @@ def _ref_to_subpat(ref: klamath.library.Reference) -> SubPattern:
|
|||||||
a_count=a_count, b_count=b_count)
|
a_count=a_count, b_count=b_count)
|
||||||
|
|
||||||
subpat = SubPattern(
|
subpat = SubPattern(
|
||||||
pattern=ref.struct_name.decode('ASCII'),
|
target=ref.struct_name.decode('ASCII'),
|
||||||
offset=offset,
|
offset=offset,
|
||||||
rotation=numpy.deg2rad(ref.angle_deg),
|
rotation=numpy.deg2rad(ref.angle_deg),
|
||||||
scale=ref.mag,
|
scale=ref.mag,
|
||||||
@ -547,10 +545,6 @@ def disambiguate_pattern_names(
|
|||||||
|
|
||||||
if sanitized_name == '':
|
if sanitized_name == '':
|
||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
|
||||||
if dup_warn_filter is None or dup_warn_filter(name):
|
|
||||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
|
||||||
+ f' renaming to "{suffixed_name}"')
|
|
||||||
|
|
||||||
# Encode into a byte-string and perform some final checks
|
# Encode into a byte-string and perform some final checks
|
||||||
encoded_name = suffixed_name.encode('ASCII')
|
encoded_name = suffixed_name.encode('ASCII')
|
||||||
@ -569,7 +563,7 @@ def load_library(
|
|||||||
stream: BinaryIO,
|
stream: BinaryIO,
|
||||||
*,
|
*,
|
||||||
full_load: bool = False,
|
full_load: bool = False,
|
||||||
) -> Tuple[Library, Dict[str, Any]]:
|
) -> Tuple[LazyLibrary, Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Scan a GDSII stream to determine what structures are present, and create
|
Scan a GDSII stream to determine what structures are present, and create
|
||||||
a library from them. This enables deferred reading of structures
|
a library from them. This enables deferred reading of structures
|
||||||
@ -586,11 +580,11 @@ def load_library(
|
|||||||
will be faster than using the resulting library's `precache` method.
|
will be faster than using the resulting library's `precache` method.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Library object, allowing for deferred load of structures.
|
LazyLibrary object, allowing for deferred load of structures.
|
||||||
Additional library info (dict, same format as from `read`).
|
Additional library info (dict, same format as from `read`).
|
||||||
"""
|
"""
|
||||||
stream.seek(0)
|
stream.seek(0)
|
||||||
lib = Library()
|
lib = LazyLibrary()
|
||||||
|
|
||||||
if full_load:
|
if full_load:
|
||||||
# Full load approach (immediately load everything)
|
# Full load approach (immediately load everything)
|
||||||
@ -620,7 +614,7 @@ def load_libraryfile(
|
|||||||
*,
|
*,
|
||||||
use_mmap: bool = True,
|
use_mmap: bool = True,
|
||||||
full_load: bool = False,
|
full_load: bool = False,
|
||||||
) -> Tuple[Library, Dict[str, Any]]:
|
) -> Tuple[LazyLibrary, Dict[str, Any]]:
|
||||||
"""
|
"""
|
||||||
Wrapper for `load_library()` that takes a filename or path instead of a stream.
|
Wrapper for `load_library()` that takes a filename or path instead of a stream.
|
||||||
|
|
||||||
@ -638,7 +632,7 @@ def load_libraryfile(
|
|||||||
full_load: If `True`, immediately loads all data. See `load_library`.
|
full_load: If `True`, immediately loads all data. See `load_library`.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Library object, allowing for deferred load of structures.
|
LazyLibrary object, allowing for deferred load of structures.
|
||||||
Additional library info (dict, same format as from `read`).
|
Additional library info (dict, same format as from `read`).
|
||||||
"""
|
"""
|
||||||
path = pathlib.Path(filename)
|
path = pathlib.Path(filename)
|
||||||
|
@ -29,6 +29,7 @@ from fatamorgana.basic import PathExtensionScheme, AString, NString, PropStringR
|
|||||||
|
|
||||||
from .utils import is_gzipped
|
from .utils import is_gzipped
|
||||||
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
from .. import Pattern, SubPattern, PatternError, Label, Shape
|
||||||
|
from ..library import WrapLibrary, MutableLibrary
|
||||||
from ..shapes import Polygon, Path, Circle
|
from ..shapes import Polygon, Path, Circle
|
||||||
from ..repetition import Grid, Arbitrary, Repetition
|
from ..repetition import Grid, Arbitrary, Repetition
|
||||||
from ..utils import layer_t, normalize_mirror, annotations_t
|
from ..utils import layer_t, normalize_mirror, annotations_t
|
||||||
@ -57,7 +58,6 @@ def build(
|
|||||||
units_per_micron: int,
|
units_per_micron: int,
|
||||||
layer_map: Optional[Dict[str, Union[int, Tuple[int, int]]]] = None,
|
layer_map: Optional[Dict[str, Union[int, Tuple[int, int]]]] = None,
|
||||||
*,
|
*,
|
||||||
disambiguate_func: Optional[Callable[[Iterable[str]], List[str]]] = None,
|
|
||||||
annotations: Optional[annotations_t] = None,
|
annotations: Optional[annotations_t] = None,
|
||||||
) -> fatamorgana.OasisLayout:
|
) -> fatamorgana.OasisLayout:
|
||||||
"""
|
"""
|
||||||
@ -90,15 +90,22 @@ def build(
|
|||||||
into numbers, omit this argument, and manually generate the required
|
into numbers, omit this argument, and manually generate the required
|
||||||
`fatamorgana.records.LayerName` entries.
|
`fatamorgana.records.LayerName` entries.
|
||||||
Default is an empty dict (no names provided).
|
Default is an empty dict (no names provided).
|
||||||
disambiguate_func: Function which takes a list of pattern names and returns a list of names
|
|
||||||
altered to be valid and unique. Default is `disambiguate_pattern_names`.
|
|
||||||
annotations: dictionary of key-value pairs which are saved as library-level properties
|
annotations: dictionary of key-value pairs which are saved as library-level properties
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
`fatamorgana.OasisLayout`
|
`fatamorgana.OasisLayout`
|
||||||
"""
|
"""
|
||||||
if isinstance(patterns, Pattern):
|
|
||||||
patterns = [patterns]
|
# TODO check names
|
||||||
|
bad_keys = check_valid_names(library.keys())
|
||||||
|
|
||||||
|
# TODO check all hierarchy present
|
||||||
|
|
||||||
|
if not isinstance(library, MutableLibrary):
|
||||||
|
if isinstance(library, dict):
|
||||||
|
library = WrapLibrary(library)
|
||||||
|
else:
|
||||||
|
library = WrapLibrary(dict(library))
|
||||||
|
|
||||||
if layer_map is None:
|
if layer_map is None:
|
||||||
layer_map = {}
|
layer_map = {}
|
||||||
@ -132,13 +139,8 @@ def build(
|
|||||||
else:
|
else:
|
||||||
layer2oas = _mlayer2oas
|
layer2oas = _mlayer2oas
|
||||||
|
|
||||||
old_names = list(library.keys())
|
|
||||||
new_names = disambiguate_func(old_names)
|
|
||||||
renamed_lib = {new_name: library[old_name]
|
|
||||||
for old_name, new_name in zip(old_names, new_names)}
|
|
||||||
|
|
||||||
# Now create a structure for each pattern
|
# Now create a structure for each pattern
|
||||||
for name, pat in renamed_lib.items():
|
for name, pat in library.items():
|
||||||
structure = fatamorgana.Cell(name=name)
|
structure = fatamorgana.Cell(name=name)
|
||||||
lib.cells.append(structure)
|
lib.cells.append(structure)
|
||||||
|
|
||||||
@ -152,7 +154,7 @@ def build(
|
|||||||
|
|
||||||
|
|
||||||
def write(
|
def write(
|
||||||
patterns: Union[Sequence[Pattern], Pattern],
|
library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
|
||||||
stream: io.BufferedIOBase,
|
stream: io.BufferedIOBase,
|
||||||
*args,
|
*args,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
@ -162,17 +164,17 @@ def write(
|
|||||||
for details.
|
for details.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
patterns: A Pattern or list of patterns to write to file.
|
library: A {name: Pattern} mapping of patterns to write.
|
||||||
stream: Stream to write to.
|
stream: Stream to write to.
|
||||||
*args: passed to `oasis.build()`
|
*args: passed to `oasis.build()`
|
||||||
**kwargs: passed to `oasis.build()`
|
**kwargs: passed to `oasis.build()`
|
||||||
"""
|
"""
|
||||||
lib = build(patterns, *args, **kwargs)
|
lib = build(library, *args, **kwargs)
|
||||||
lib.write(stream)
|
lib.write(stream)
|
||||||
|
|
||||||
|
|
||||||
def writefile(
|
def writefile(
|
||||||
patterns: Union[Sequence[Pattern], Pattern],
|
library: Mapping[str, Pattern], # NOTE: Pattern here should be treated as immutable!
|
||||||
filename: Union[str, pathlib.Path],
|
filename: Union[str, pathlib.Path],
|
||||||
*args,
|
*args,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
@ -183,7 +185,7 @@ def writefile(
|
|||||||
Will automatically compress the file if it has a .gz suffix.
|
Will automatically compress the file if it has a .gz suffix.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
patterns: `Pattern` or list of patterns to save
|
library: A {name: Pattern} mapping of patterns to write.
|
||||||
filename: Filename to save to.
|
filename: Filename to save to.
|
||||||
*args: passed to `oasis.write`
|
*args: passed to `oasis.write`
|
||||||
**kwargs: passed to `oasis.write`
|
**kwargs: passed to `oasis.write`
|
||||||
@ -195,7 +197,7 @@ def writefile(
|
|||||||
open_func = open
|
open_func = open
|
||||||
|
|
||||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||||
write(patterns, stream, *args, **kwargs)
|
write(library, stream, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def readfile(
|
def readfile(
|
||||||
@ -278,11 +280,13 @@ def read(
|
|||||||
if isinstance(element, fatrec.Polygon):
|
if isinstance(element, fatrec.Polygon):
|
||||||
vertices = numpy.cumsum(numpy.vstack(((0, 0), element.get_point_list())), axis=0)
|
vertices = numpy.cumsum(numpy.vstack(((0, 0), element.get_point_list())), axis=0)
|
||||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||||
poly = Polygon(vertices=vertices,
|
poly = Polygon(
|
||||||
layer=element.get_layer_tuple(),
|
vertices=vertices,
|
||||||
offset=element.get_xy(),
|
layer=element.get_layer_tuple(),
|
||||||
annotations=annotations,
|
offset=element.get_xy(),
|
||||||
repetition=repetition)
|
annotations=annotations,
|
||||||
|
repetition=repetition,
|
||||||
|
)
|
||||||
|
|
||||||
pat.shapes.append(poly)
|
pat.shapes.append(poly)
|
||||||
|
|
||||||
@ -301,14 +305,16 @@ def read(
|
|||||||
element.get_extension_end()[1]))
|
element.get_extension_end()[1]))
|
||||||
|
|
||||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||||
path = Path(vertices=vertices,
|
path = Path(
|
||||||
layer=element.get_layer_tuple(),
|
vertices=vertices,
|
||||||
offset=element.get_xy(),
|
layer=element.get_layer_tuple(),
|
||||||
repetition=repetition,
|
offset=element.get_xy(),
|
||||||
annotations=annotations,
|
repetition=repetition,
|
||||||
width=element.get_half_width() * 2,
|
annotations=annotations,
|
||||||
cap=cap,
|
width=element.get_half_width() * 2,
|
||||||
**path_args)
|
cap=cap,
|
||||||
|
**path_args,
|
||||||
|
)
|
||||||
|
|
||||||
pat.shapes.append(path)
|
pat.shapes.append(path)
|
||||||
|
|
||||||
@ -316,12 +322,13 @@ def read(
|
|||||||
width = element.get_width()
|
width = element.get_width()
|
||||||
height = element.get_height()
|
height = element.get_height()
|
||||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||||
rect = Polygon(layer=element.get_layer_tuple(),
|
rect = Polygon(
|
||||||
offset=element.get_xy(),
|
layer=element.get_layer_tuple(),
|
||||||
repetition=repetition,
|
offset=element.get_xy(),
|
||||||
vertices=numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (width, height),
|
repetition=repetition,
|
||||||
annotations=annotations,
|
vertices=numpy.array(((0, 0), (1, 0), (1, 1), (0, 1))) * (width, height),
|
||||||
)
|
annotations=annotations,
|
||||||
|
)
|
||||||
pat.shapes.append(rect)
|
pat.shapes.append(rect)
|
||||||
|
|
||||||
elif isinstance(element, fatrec.Trapezoid):
|
elif isinstance(element, fatrec.Trapezoid):
|
||||||
@ -405,21 +412,24 @@ def read(
|
|||||||
vertices[0, 1] += width
|
vertices[0, 1] += width
|
||||||
|
|
||||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||||
ctrapz = Polygon(layer=element.get_layer_tuple(),
|
ctrapz = Polygon(
|
||||||
offset=element.get_xy(),
|
layer=element.get_layer_tuple(),
|
||||||
repetition=repetition,
|
offset=element.get_xy(),
|
||||||
vertices=vertices,
|
repetition=repetition,
|
||||||
annotations=annotations,
|
vertices=vertices,
|
||||||
)
|
annotations=annotations,
|
||||||
|
)
|
||||||
pat.shapes.append(ctrapz)
|
pat.shapes.append(ctrapz)
|
||||||
|
|
||||||
elif isinstance(element, fatrec.Circle):
|
elif isinstance(element, fatrec.Circle):
|
||||||
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
annotations = properties_to_annotations(element.properties, lib.propnames, lib.propstrings)
|
||||||
circle = Circle(layer=element.get_layer_tuple(),
|
circle = Circle(
|
||||||
offset=element.get_xy(),
|
layer=element.get_layer_tuple(),
|
||||||
repetition=repetition,
|
offset=element.get_xy(),
|
||||||
annotations=annotations,
|
repetition=repetition,
|
||||||
radius=float(element.get_radius()))
|
annotations=annotations,
|
||||||
|
radius=float(element.get_radius()),
|
||||||
|
)
|
||||||
pat.shapes.append(circle)
|
pat.shapes.append(circle)
|
||||||
|
|
||||||
elif isinstance(element, fatrec.Text):
|
elif isinstance(element, fatrec.Text):
|
||||||
@ -429,11 +439,13 @@ def read(
|
|||||||
string = lib.textstrings[str_or_ref].string
|
string = lib.textstrings[str_or_ref].string
|
||||||
else:
|
else:
|
||||||
string = str_or_ref.string
|
string = str_or_ref.string
|
||||||
label = Label(layer=element.get_layer_tuple(),
|
label = Label(
|
||||||
offset=element.get_xy(),
|
layer=element.get_layer_tuple(),
|
||||||
repetition=repetition,
|
offset=element.get_xy(),
|
||||||
annotations=annotations,
|
repetition=repetition,
|
||||||
string=string)
|
annotations=annotations,
|
||||||
|
string=string,
|
||||||
|
)
|
||||||
pat.labels.append(label)
|
pat.labels.append(label)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@ -443,7 +455,7 @@ def read(
|
|||||||
for placement in cell.placements:
|
for placement in cell.placements:
|
||||||
pat.subpatterns.append(_placement_to_subpat(placement, lib))
|
pat.subpatterns.append(_placement_to_subpat(placement, lib))
|
||||||
|
|
||||||
patterns_dict[name] = pat
|
patterns_dict[cell_name] = pat
|
||||||
|
|
||||||
return patterns_dict, library_info
|
return patterns_dict, library_info
|
||||||
|
|
||||||
@ -513,7 +525,8 @@ def _subpatterns_to_placements(
|
|||||||
properties=annotations_to_properties(subpat.annotations),
|
properties=annotations_to_properties(subpat.annotations),
|
||||||
x=offset[0],
|
x=offset[0],
|
||||||
y=offset[1],
|
y=offset[1],
|
||||||
repetition=frep)
|
repetition=frep,
|
||||||
|
)
|
||||||
|
|
||||||
refs.append(ref)
|
refs.append(ref)
|
||||||
return refs
|
return refs
|
||||||
@ -602,7 +615,6 @@ def _labels_to_texts(
|
|||||||
|
|
||||||
def disambiguate_pattern_names(
|
def disambiguate_pattern_names(
|
||||||
names: Iterable[str],
|
names: Iterable[str],
|
||||||
dup_warn_filter: Callable[[str], bool] = None, # If returns False, don't warn about this name
|
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
new_names = []
|
new_names = []
|
||||||
for name in names:
|
for name in names:
|
||||||
@ -618,10 +630,6 @@ def disambiguate_pattern_names(
|
|||||||
|
|
||||||
if sanitized_name == '':
|
if sanitized_name == '':
|
||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
|
||||||
if dup_warn_filter is None or dup_warn_filter(name):
|
|
||||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
|
||||||
+ f' renaming to "{suffixed_name}"')
|
|
||||||
|
|
||||||
if len(suffixed_name) == 0:
|
if len(suffixed_name) == 0:
|
||||||
# Should never happen since zero-length names are replaced
|
# Should never happen since zero-length names are replaced
|
||||||
|
@ -65,7 +65,6 @@ def build(
|
|||||||
library_name: str = 'masque-gdsii-write',
|
library_name: str = 'masque-gdsii-write',
|
||||||
*,
|
*,
|
||||||
modify_originals: bool = False,
|
modify_originals: bool = False,
|
||||||
disambiguate_func: Callable[[Iterable[str]], List[str]] = None,
|
|
||||||
) -> gdsii.library.Library:
|
) -> gdsii.library.Library:
|
||||||
"""
|
"""
|
||||||
Convert a `Pattern` or list of patterns to a GDSII stream, by first calling
|
Convert a `Pattern` or list of patterns to a GDSII stream, by first calling
|
||||||
@ -97,22 +96,20 @@ def build(
|
|||||||
modify_originals: If `True`, the original pattern is modified as part of the writing
|
modify_originals: If `True`, the original pattern is modified as part of the writing
|
||||||
process. Otherwise, a copy is made.
|
process. Otherwise, a copy is made.
|
||||||
Default `False`.
|
Default `False`.
|
||||||
disambiguate_func: Function which takes a list of pattern names and returns a list of names
|
|
||||||
altered to be valid and unique. Default is `disambiguate_pattern_names`, which
|
|
||||||
attempts to adhere to the GDSII standard reasonably well.
|
|
||||||
WARNING: No additional error checking is performed on the results.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
`gdsii.library.Library`
|
`gdsii.library.Library`
|
||||||
"""
|
"""
|
||||||
if disambiguate_func is None:
|
# TODO check name errors
|
||||||
disambiguate_func = disambiguate_pattern_names
|
bad_keys = check_valid_names(library.keys())
|
||||||
|
|
||||||
|
# TODO check all hierarchy present
|
||||||
|
|
||||||
|
|
||||||
if not modify_originals:
|
if not modify_originals:
|
||||||
library = copy.deepcopy(library)
|
library = library.deepcopy() #TODO figure out best approach e.g. if lazy
|
||||||
|
|
||||||
for p in library.values():
|
library.wrap_repeated_shapes()
|
||||||
library.add(p.wrap_repeated_shapes())
|
|
||||||
|
|
||||||
old_names = list(library.keys())
|
old_names = list(library.keys())
|
||||||
new_names = disambiguate_func(old_names)
|
new_names = disambiguate_func(old_names)
|
||||||
@ -181,7 +178,7 @@ def writefile(
|
|||||||
open_func = open
|
open_func = open
|
||||||
|
|
||||||
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
with io.BufferedWriter(open_func(path, mode='wb')) as stream:
|
||||||
write(patterns, stream, *args, **kwargs)
|
write(library, stream, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def readfile(
|
def readfile(
|
||||||
@ -248,7 +245,7 @@ def read(
|
|||||||
patterns_dict = {}
|
patterns_dict = {}
|
||||||
for structure in lib:
|
for structure in lib:
|
||||||
pat = Pattern()
|
pat = Pattern()
|
||||||
name=structure.name.decode('ASCII')
|
name = structure.name.decode('ASCII')
|
||||||
for element in structure:
|
for element in structure:
|
||||||
# Switch based on element type:
|
# Switch based on element type:
|
||||||
if isinstance(element, gdsii.elements.Boundary):
|
if isinstance(element, gdsii.elements.Boundary):
|
||||||
@ -260,9 +257,11 @@ def read(
|
|||||||
pat.shapes.append(path)
|
pat.shapes.append(path)
|
||||||
|
|
||||||
elif isinstance(element, gdsii.elements.Text):
|
elif isinstance(element, gdsii.elements.Text):
|
||||||
label = Label(offset=element.xy.astype(float),
|
label = Label(
|
||||||
layer=(element.layer, element.text_type),
|
offset=element.xy.astype(float),
|
||||||
string=element.string.decode('ASCII'))
|
layer=(element.layer, element.text_type),
|
||||||
|
string=element.string.decode('ASCII'),
|
||||||
|
)
|
||||||
pat.labels.append(label)
|
pat.labels.append(label)
|
||||||
|
|
||||||
elif isinstance(element, (gdsii.elements.SRef, gdsii.elements.ARef)):
|
elif isinstance(element, (gdsii.elements.SRef, gdsii.elements.ARef)):
|
||||||
@ -296,7 +295,7 @@ def _ref_to_subpat(
|
|||||||
gdsii.elements.ARef]
|
gdsii.elements.ARef]
|
||||||
) -> SubPattern:
|
) -> SubPattern:
|
||||||
"""
|
"""
|
||||||
Helper function to create a SubPattern from an SREF or AREF. Sets subpat.target to struct_name.
|
Helper function to create a SubPattern from an SREF or AREF. Sets `subpat.target` to `element.struct_name`.
|
||||||
|
|
||||||
NOTE: "Absolute" means not affected by parent elements.
|
NOTE: "Absolute" means not affected by parent elements.
|
||||||
That's not currently supported by masque at all (and not planned).
|
That's not currently supported by masque at all (and not planned).
|
||||||
@ -330,13 +329,15 @@ def _ref_to_subpat(
|
|||||||
repetition = Grid(a_vector=a_vector, b_vector=b_vector,
|
repetition = Grid(a_vector=a_vector, b_vector=b_vector,
|
||||||
a_count=a_count, b_count=b_count)
|
a_count=a_count, b_count=b_count)
|
||||||
|
|
||||||
subpat = SubPattern(pattern=None,
|
subpat = SubPattern(
|
||||||
offset=offset,
|
target=element.struct_name,
|
||||||
rotation=rotation,
|
offset=offset,
|
||||||
scale=scale,
|
rotation=rotation,
|
||||||
mirrored=(mirror_across_x, False),
|
scale=scale,
|
||||||
annotations=_properties_to_annotations(element.properties),
|
mirrored=(mirror_across_x, False),
|
||||||
repetition=repetition)
|
annotations=_properties_to_annotations(element.properties),
|
||||||
|
repetition=repetition,
|
||||||
|
)
|
||||||
return subpat
|
return subpat
|
||||||
|
|
||||||
|
|
||||||
@ -346,14 +347,15 @@ def _gpath_to_mpath(element: gdsii.elements.Path, raw_mode: bool) -> Path:
|
|||||||
else:
|
else:
|
||||||
raise PatternError(f'Unrecognized path type: {element.path_type}')
|
raise PatternError(f'Unrecognized path type: {element.path_type}')
|
||||||
|
|
||||||
args = {'vertices': element.xy.astype(float),
|
args = {
|
||||||
'layer': (element.layer, element.data_type),
|
'vertices': element.xy.astype(float),
|
||||||
'width': element.width if element.width is not None else 0.0,
|
'layer': (element.layer, element.data_type),
|
||||||
'cap': cap,
|
'width': element.width if element.width is not None else 0.0,
|
||||||
'offset': numpy.zeros(2),
|
'cap': cap,
|
||||||
'annotations': _properties_to_annotations(element.properties),
|
'offset': numpy.zeros(2),
|
||||||
'raw': raw_mode,
|
'annotations': _properties_to_annotations(element.properties),
|
||||||
}
|
'raw': raw_mode,
|
||||||
|
}
|
||||||
|
|
||||||
if cap == Path.Cap.SquareCustom:
|
if cap == Path.Cap.SquareCustom:
|
||||||
args['cap_extensions'] = numpy.zeros(2)
|
args['cap_extensions'] = numpy.zeros(2)
|
||||||
@ -511,7 +513,6 @@ def disambiguate_pattern_names(
|
|||||||
names: Iterable[str],
|
names: Iterable[str],
|
||||||
max_name_length: int = 32,
|
max_name_length: int = 32,
|
||||||
suffix_length: int = 6,
|
suffix_length: int = 6,
|
||||||
dup_warn_filter: Optional[Callable[[str], bool]] = None,
|
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
"""
|
"""
|
||||||
Args:
|
Args:
|
||||||
@ -519,9 +520,6 @@ def disambiguate_pattern_names(
|
|||||||
max_name_length: Names longer than this will be truncated
|
max_name_length: Names longer than this will be truncated
|
||||||
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
|
suffix_length: Names which get truncated are truncated by this many extra characters. This is to
|
||||||
leave room for a suffix if one is necessary.
|
leave room for a suffix if one is necessary.
|
||||||
dup_warn_filter: (optional) Function for suppressing warnings about cell names changing. Receives
|
|
||||||
the cell name and returns `False` if the warning should be suppressed and `True` if it should
|
|
||||||
be displayed. Default displays all warnings.
|
|
||||||
"""
|
"""
|
||||||
new_names = []
|
new_names = []
|
||||||
for name in names:
|
for name in names:
|
||||||
@ -547,10 +545,6 @@ def disambiguate_pattern_names(
|
|||||||
|
|
||||||
if sanitized_name == '':
|
if sanitized_name == '':
|
||||||
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
logger.warning(f'Empty pattern name saved as "{suffixed_name}"')
|
||||||
elif suffixed_name != sanitized_name:
|
|
||||||
if dup_warn_filter is None or dup_warn_filter(name):
|
|
||||||
logger.warning(f'Pattern name "{name}" ({sanitized_name}) appears multiple times;\n'
|
|
||||||
+ f' renaming to "{suffixed_name}"')
|
|
||||||
|
|
||||||
# Encode into a byte-string and perform some final checks
|
# Encode into a byte-string and perform some final checks
|
||||||
encoded_name = suffixed_name.encode('ASCII')
|
encoded_name = suffixed_name.encode('ASCII')
|
||||||
|
@ -114,7 +114,7 @@ def writefile_inverted(
|
|||||||
pattern = library[top]
|
pattern = library[top]
|
||||||
|
|
||||||
# Polygonize and flatten pattern
|
# Polygonize and flatten pattern
|
||||||
pattern.polygonize().flatten()
|
pattern.polygonize().flatten(library)
|
||||||
|
|
||||||
bounds = pattern.get_bounds(library=library)
|
bounds = pattern.get_bounds(library=library)
|
||||||
if bounds is None:
|
if bounds is None:
|
||||||
|
@ -5,11 +5,16 @@ from typing import Set, Tuple, List, Iterable, Mapping
|
|||||||
import re
|
import re
|
||||||
import copy
|
import copy
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import logging
|
||||||
|
|
||||||
from .. import Pattern, PatternError
|
from .. import Pattern, PatternError
|
||||||
|
from ..library import Library, WrapROLibrary
|
||||||
from ..shapes import Polygon, Path
|
from ..shapes import Polygon, Path
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def mangle_name(name: str, dose_multiplier: float = 1.0) -> str:
|
def mangle_name(name: str, dose_multiplier: float = 1.0) -> str:
|
||||||
"""
|
"""
|
||||||
Create a new name using `name` and the `dose_multiplier`.
|
Create a new name using `name` and the `dose_multiplier`.
|
||||||
@ -58,7 +63,7 @@ def make_dose_table(
|
|||||||
top_names: Iterable[str],
|
top_names: Iterable[str],
|
||||||
library: Mapping[str, Pattern],
|
library: Mapping[str, Pattern],
|
||||||
dose_multiplier: float = 1.0,
|
dose_multiplier: float = 1.0,
|
||||||
) -> Set[Tuple[int, float]]:
|
) -> Set[Tuple[str, float]]:
|
||||||
"""
|
"""
|
||||||
Create a set containing `(name, written_dose)` for each pattern (including subpatterns)
|
Create a set containing `(name, written_dose)` for each pattern (including subpatterns)
|
||||||
|
|
||||||
@ -104,7 +109,7 @@ def dtype2dose(pattern: Pattern) -> Pattern:
|
|||||||
|
|
||||||
|
|
||||||
def dose2dtype(
|
def dose2dtype(
|
||||||
library: List[Pattern],
|
library: Mapping[str, Pattern],
|
||||||
) -> Tuple[List[Pattern], List[float]]:
|
) -> Tuple[List[Pattern], List[float]]:
|
||||||
"""
|
"""
|
||||||
For each shape in each pattern, set shape.layer to the tuple
|
For each shape in each pattern, set shape.layer to the tuple
|
||||||
@ -128,6 +133,10 @@ def dose2dtype(
|
|||||||
and dose (float, list entry).
|
and dose (float, list entry).
|
||||||
"""
|
"""
|
||||||
logger.warning('TODO: dose2dtype() needs to be tested!')
|
logger.warning('TODO: dose2dtype() needs to be tested!')
|
||||||
|
|
||||||
|
if not isinstance(library, Library):
|
||||||
|
library = WrapROLibrary(library)
|
||||||
|
|
||||||
# Get a table of (id(pat), written_dose) for each pattern and subpattern
|
# Get a table of (id(pat), written_dose) for each pattern and subpattern
|
||||||
sd_table = make_dose_table(library.find_topcells(), library)
|
sd_table = make_dose_table(library.find_topcells(), library)
|
||||||
|
|
||||||
@ -161,8 +170,8 @@ def dose2dtype(
|
|||||||
|
|
||||||
pat = old_pat.deepcopy()
|
pat = old_pat.deepcopy()
|
||||||
|
|
||||||
if len(encoded_name) == 0:
|
if len(mangled_name) == 0:
|
||||||
raise PatternError('Zero-length name after mangle+encode, originally "{name}"'.format(pat.name))
|
raise PatternError(f'Zero-length name after mangle, originally "{name}"')
|
||||||
|
|
||||||
for shape in pat.shapes:
|
for shape in pat.shapes:
|
||||||
data_type = dose_vals_list.index(shape.dose * pat_dose)
|
data_type = dose_vals_list.index(shape.dose * pat_dose)
|
||||||
|
@ -67,7 +67,7 @@ class Label(PositionableImpl, LayerableImpl, RepeatableImpl, AnnotatableImpl,
|
|||||||
identifier=self.identifier,
|
identifier=self.identifier,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __deepcopy__(self: L, memo: Dict = None) -> L:
|
def __deepcopy__(self: L, memo: Optional[Dict] = None) -> L:
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new._offset = self._offset.copy()
|
new._offset = self._offset.copy()
|
||||||
|
@ -31,7 +31,7 @@ logger = logging.getLogger(__name__)
|
|||||||
visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, NDArray[numpy.float64]], 'Pattern']
|
visitor_function_t = Callable[['Pattern', Tuple['Pattern'], Dict, NDArray[numpy.float64]], 'Pattern']
|
||||||
L = TypeVar('L', bound='Library')
|
L = TypeVar('L', bound='Library')
|
||||||
ML = TypeVar('ML', bound='MutableLibrary')
|
ML = TypeVar('ML', bound='MutableLibrary')
|
||||||
LL = TypeVar('LL', bound='LazyLibrary')
|
#LL = TypeVar('LL', bound='LazyLibrary')
|
||||||
|
|
||||||
|
|
||||||
class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
||||||
@ -81,9 +81,9 @@ class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
|||||||
|
|
||||||
# TODO maybe not for immutable?
|
# TODO maybe not for immutable?
|
||||||
def subtree(
|
def subtree(
|
||||||
self: L,
|
self,
|
||||||
tops: Union[str, Sequence[str]],
|
tops: Union[str, Sequence[str]],
|
||||||
) -> ML:
|
) -> WrapLibrary:
|
||||||
"""
|
"""
|
||||||
Return a new `Library`, containing only the specified patterns and the patterns they
|
Return a new `Library`, containing only the specified patterns and the patterns they
|
||||||
reference (recursively).
|
reference (recursively).
|
||||||
@ -143,9 +143,9 @@ class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def flatten(
|
def flatten(
|
||||||
self: L,
|
self,
|
||||||
tops: Union[str, Sequence[str]],
|
tops: Union[str, Sequence[str]],
|
||||||
) -> Dict[str, Pattern]:
|
) -> Dict[str, 'Pattern']:
|
||||||
"""
|
"""
|
||||||
Removes all subpatterns and adds equivalent shapes.
|
Removes all subpatterns and adds equivalent shapes.
|
||||||
Also flattens all subpatterns.
|
Also flattens all subpatterns.
|
||||||
@ -159,7 +159,7 @@ class Library(Mapping[str, Pattern], metaclass=ABCMeta):
|
|||||||
if isinstance(tops, str):
|
if isinstance(tops, str):
|
||||||
tops = (tops,)
|
tops = (tops,)
|
||||||
|
|
||||||
flattened: Dict[str, Optional[Pattern]] = {}
|
flattened: Dict[str, Optional['Pattern']] = {}
|
||||||
|
|
||||||
def flatten_single(name) -> None:
|
def flatten_single(name) -> None:
|
||||||
flattened[name] = None
|
flattened[name] = None
|
||||||
@ -266,16 +266,16 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _set(self, key: str, value: Pattern) -> None:
|
def _set(self, key: str, value: 'Pattern') -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def _merge(self: ML, other: ML, key: str) -> None:
|
def _merge(self, other: Mapping[str, 'Pattern'], key: str) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def add(
|
def add(
|
||||||
self: ML,
|
self: ML,
|
||||||
other: L,
|
other: Mapping[str, 'Pattern'],
|
||||||
use_ours: Callable[[str], bool] = lambda name: False,
|
use_ours: Callable[[str], bool] = lambda name: False,
|
||||||
use_theirs: Callable[[str], bool] = lambda name: False,
|
use_theirs: Callable[[str], bool] = lambda name: False,
|
||||||
) -> ML:
|
) -> ML:
|
||||||
@ -309,8 +309,8 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
|||||||
def dfs(
|
def dfs(
|
||||||
self: ML,
|
self: ML,
|
||||||
top: str,
|
top: str,
|
||||||
visit_before: visitor_function_t = None,
|
visit_before: Optional[visitor_function_t] = None,
|
||||||
visit_after: visitor_function_t = None,
|
visit_after: Optional[visitor_function_t] = None,
|
||||||
transform: Union[ArrayLike, bool, None] = False,
|
transform: Union[ArrayLike, bool, None] = False,
|
||||||
memo: Optional[Dict] = None,
|
memo: Optional[Dict] = None,
|
||||||
hierarchy: Tuple[str, ...] = (),
|
hierarchy: Tuple[str, ...] = (),
|
||||||
@ -431,7 +431,9 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
|||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
# This currently simplifies globally (same shape in different patterns is
|
# This currently simplifies globally (same shape in different patterns is
|
||||||
# merged into the same subpattern target.
|
# merged into the same subpattern target).
|
||||||
|
|
||||||
|
from .pattern import Pattern
|
||||||
|
|
||||||
if exclude_types is None:
|
if exclude_types is None:
|
||||||
exclude_types = ()
|
exclude_types = ()
|
||||||
@ -517,6 +519,8 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
|||||||
Returns:
|
Returns:
|
||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
|
from .pattern import Pattern
|
||||||
|
|
||||||
if name_func is None:
|
if name_func is None:
|
||||||
name_func = lambda _pat, _shape: self.get_name('_rep')
|
name_func = lambda _pat, _shape: self.get_name('_rep')
|
||||||
|
|
||||||
@ -569,11 +573,11 @@ class MutableLibrary(Library, metaclass=ABCMeta):
|
|||||||
|
|
||||||
|
|
||||||
class WrapROLibrary(Library):
|
class WrapROLibrary(Library):
|
||||||
mapping: Mapping[str, Pattern]
|
mapping: Mapping[str, 'Pattern']
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
mapping: Mapping[str, Pattern],
|
mapping: Mapping[str, 'Pattern'],
|
||||||
) -> None:
|
) -> None:
|
||||||
self.mapping = mapping
|
self.mapping = mapping
|
||||||
|
|
||||||
@ -591,11 +595,11 @@ class WrapROLibrary(Library):
|
|||||||
|
|
||||||
|
|
||||||
class WrapLibrary(MutableLibrary):
|
class WrapLibrary(MutableLibrary):
|
||||||
mapping: MutableMapping[str, Pattern]
|
mapping: MutableMapping[str, 'Pattern']
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
mapping: MutableMapping[str, Pattern],
|
mapping: MutableMapping[str, 'Pattern'],
|
||||||
) -> None:
|
) -> None:
|
||||||
self.mapping = mapping
|
self.mapping = mapping
|
||||||
|
|
||||||
@ -608,16 +612,16 @@ class WrapLibrary(MutableLibrary):
|
|||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return len(self.mapping)
|
return len(self.mapping)
|
||||||
|
|
||||||
def __setitem__(self, key: str, value: Pattern) -> None:
|
def __setitem__(self, key: str, value: 'Pattern') -> None:
|
||||||
self.mapping[key] = value
|
self.mapping[key] = value
|
||||||
|
|
||||||
def __delitem__(self, key: str) -> None:
|
def __delitem__(self, key: str) -> None:
|
||||||
del self.mapping[key]
|
del self.mapping[key]
|
||||||
|
|
||||||
def _set(self, key: str, value: Pattern) -> None:
|
def _set(self, key: str, value: 'Pattern') -> None:
|
||||||
self[key] = value
|
self[key] = value
|
||||||
|
|
||||||
def _merge(self: ML, other: L, key: str) -> None:
|
def _merge(self, other: Mapping[str, 'Pattern'], key: str) -> None:
|
||||||
self[key] = other[key]
|
self[key] = other[key]
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
@ -631,7 +635,7 @@ class LazyLibrary(MutableLibrary):
|
|||||||
|
|
||||||
The cache can be disabled by setting the `enable_cache` attribute to `False`.
|
The cache can be disabled by setting the `enable_cache` attribute to `False`.
|
||||||
"""
|
"""
|
||||||
dict: Dict[str, Callable[[], Pattern]]
|
dict: Dict[str, Callable[[], 'Pattern']]
|
||||||
cache: Dict[str, 'Pattern']
|
cache: Dict[str, 'Pattern']
|
||||||
enable_cache: bool = True
|
enable_cache: bool = True
|
||||||
|
|
||||||
@ -639,7 +643,7 @@ class LazyLibrary(MutableLibrary):
|
|||||||
self.dict = {}
|
self.dict = {}
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
def __setitem__(self, key: str, value: Callable[[], Pattern]) -> None:
|
def __setitem__(self, key: str, value: Callable[[], 'Pattern']) -> None:
|
||||||
self.dict[key] = value
|
self.dict[key] = value
|
||||||
if key in self.cache:
|
if key in self.cache:
|
||||||
del self.cache[key]
|
del self.cache[key]
|
||||||
@ -666,11 +670,11 @@ class LazyLibrary(MutableLibrary):
|
|||||||
def __len__(self) -> int:
|
def __len__(self) -> int:
|
||||||
return len(self.dict)
|
return len(self.dict)
|
||||||
|
|
||||||
def _set(self, key: str, value: Pattern) -> None:
|
def _set(self, key: str, value: 'Pattern') -> None:
|
||||||
self[key] = lambda: value
|
self[key] = lambda: value
|
||||||
|
|
||||||
def _merge(self: LL, other: L, key: str) -> None:
|
def _merge(self, other: Mapping[str, 'Pattern'], key: str) -> None:
|
||||||
if type(self) is type(other):
|
if isinstance(other, LazyLibrary):
|
||||||
self.dict[key] = other.dict[key]
|
self.dict[key] = other.dict[key]
|
||||||
if key in other.cache:
|
if key in other.cache:
|
||||||
self.cache[key] = other.cache[key]
|
self.cache[key] = other.cache[key]
|
||||||
@ -703,5 +707,5 @@ class LazyLibrary(MutableLibrary):
|
|||||||
self.cache.clear()
|
self.cache.clear()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'LazyLibrary':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'LazyLibrary':
|
||||||
raise LibraryError('LazyLibrary cannot be deepcopied (deepcopy doesn\'t descend into closures)')
|
raise LibraryError('LazyLibrary cannot be deepcopied (deepcopy doesn\'t descend into closures)')
|
||||||
|
@ -80,7 +80,7 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
|||||||
|
|
||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations if annotations is not None else {}
|
||||||
|
|
||||||
def __copy__(self, memo: Dict = None) -> 'Pattern':
|
def __copy__(self) -> 'Pattern':
|
||||||
return Pattern(
|
return Pattern(
|
||||||
shapes=copy.deepcopy(self.shapes),
|
shapes=copy.deepcopy(self.shapes),
|
||||||
labels=copy.deepcopy(self.labels),
|
labels=copy.deepcopy(self.labels),
|
||||||
@ -88,7 +88,7 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
|||||||
annotations=copy.deepcopy(self.annotations),
|
annotations=copy.deepcopy(self.annotations),
|
||||||
)
|
)
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Pattern':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Pattern':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = Pattern(
|
new = Pattern(
|
||||||
shapes=copy.deepcopy(self.shapes, memo),
|
shapes=copy.deepcopy(self.shapes, memo),
|
||||||
@ -116,9 +116,9 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
|||||||
|
|
||||||
def subset(
|
def subset(
|
||||||
self,
|
self,
|
||||||
shapes: Callable[[Shape], bool] = None,
|
shapes: Optional[Callable[[Shape], bool]] = None,
|
||||||
labels: Callable[[Label], bool] = None,
|
labels: Optional[Callable[[Label], bool]] = None,
|
||||||
subpatterns: Callable[[SubPattern], bool] = None,
|
subpatterns: Optional[Callable[[SubPattern], bool]] = None,
|
||||||
) -> 'Pattern':
|
) -> 'Pattern':
|
||||||
"""
|
"""
|
||||||
Returns a Pattern containing only the entities (e.g. shapes) for which the
|
Returns a Pattern containing only the entities (e.g. shapes) for which the
|
||||||
@ -295,6 +295,7 @@ class Pattern(AnnotatableImpl, Mirrorable, metaclass=AutoSlots):
|
|||||||
Returns:
|
Returns:
|
||||||
self
|
self
|
||||||
"""
|
"""
|
||||||
|
entry: Scalable
|
||||||
for entry in chain(self.shapes, self.subpatterns):
|
for entry in chain(self.shapes, self.subpatterns):
|
||||||
entry.scale_by(c)
|
entry.scale_by(c)
|
||||||
return self
|
return self
|
||||||
|
@ -133,7 +133,7 @@ class Grid(Repetition, metaclass=AutoSlots):
|
|||||||
)
|
)
|
||||||
return new
|
return new
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Grid':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Grid':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
return new
|
return new
|
||||||
@ -239,7 +239,7 @@ class Grid(Repetition, metaclass=AutoSlots):
|
|||||||
a_extent = self.a_vector * self.a_count
|
a_extent = self.a_vector * self.a_count
|
||||||
b_extent = self.b_vector * self.b_count if (self.b_vector is not None) else 0 # type: Union[NDArray[numpy.float64], float]
|
b_extent = self.b_vector * self.b_count if (self.b_vector is not None) else 0 # type: Union[NDArray[numpy.float64], float]
|
||||||
|
|
||||||
corners = ((0, 0), a_extent, b_extent, a_extent + b_extent)
|
corners = numpy.stack(((0, 0), a_extent, b_extent, a_extent + b_extent))
|
||||||
xy_min = numpy.min(corners, axis=0)
|
xy_min = numpy.min(corners, axis=0)
|
||||||
xy_max = numpy.max(corners, axis=0)
|
xy_max = numpy.max(corners, axis=0)
|
||||||
return numpy.array((xy_min, xy_max))
|
return numpy.array((xy_min, xy_max))
|
||||||
|
@ -195,7 +195,7 @@ class Arc(Shape, metaclass=AutoSlots):
|
|||||||
self.poly_max_arclen = poly_max_arclen
|
self.poly_max_arclen = poly_max_arclen
|
||||||
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Arc':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Arc':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new._offset = self._offset.copy()
|
new._offset = self._offset.copy()
|
||||||
|
@ -74,7 +74,7 @@ class Circle(Shape, metaclass=AutoSlots):
|
|||||||
self.poly_num_points = poly_num_points
|
self.poly_num_points = poly_num_points
|
||||||
self.poly_max_arclen = poly_max_arclen
|
self.poly_max_arclen = poly_max_arclen
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Circle':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Circle':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new._offset = self._offset.copy()
|
new._offset = self._offset.copy()
|
||||||
|
@ -125,7 +125,7 @@ class Ellipse(Shape, metaclass=AutoSlots):
|
|||||||
self.poly_num_points = poly_num_points
|
self.poly_num_points = poly_num_points
|
||||||
self.poly_max_arclen = poly_max_arclen
|
self.poly_max_arclen = poly_max_arclen
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Ellipse':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Ellipse':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new._offset = self._offset.copy()
|
new._offset = self._offset.copy()
|
||||||
|
@ -185,7 +185,7 @@ class Path(Shape, metaclass=AutoSlots):
|
|||||||
self.rotate(rotation)
|
self.rotate(rotation)
|
||||||
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
[self.mirror(a) for a, do in enumerate(mirrored) if do]
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Path':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Path':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new._offset = self._offset.copy()
|
new._offset = self._offset.copy()
|
||||||
@ -245,8 +245,8 @@ class Path(Shape, metaclass=AutoSlots):
|
|||||||
|
|
||||||
def to_polygons(
|
def to_polygons(
|
||||||
self,
|
self,
|
||||||
poly_num_points: int = None,
|
poly_num_points: Optional[int] = None,
|
||||||
poly_max_arclen: float = None,
|
poly_max_arclen: Optional[float] = None,
|
||||||
) -> List['Polygon']:
|
) -> List['Polygon']:
|
||||||
extensions = self._calculate_cap_extensions()
|
extensions = self._calculate_cap_extensions()
|
||||||
|
|
||||||
|
@ -345,8 +345,8 @@ class Polygon(Shape, metaclass=AutoSlots):
|
|||||||
|
|
||||||
def to_polygons(
|
def to_polygons(
|
||||||
self,
|
self,
|
||||||
poly_num_points: int = None, # unused
|
poly_num_points: Optional[int] = None, # unused
|
||||||
poly_max_arclen: float = None, # unused
|
poly_max_arclen: Optional[float] = None, # unused
|
||||||
) -> List['Polygon']:
|
) -> List['Polygon']:
|
||||||
return [copy.deepcopy(self)]
|
return [copy.deepcopy(self)]
|
||||||
|
|
||||||
|
@ -176,7 +176,7 @@ class Shape(PositionableImpl, LayerableImpl, DoseableImpl, Rotatable, Mirrorable
|
|||||||
return inds
|
return inds
|
||||||
|
|
||||||
# Find the y indices on all x gridlines
|
# Find the y indices on all x gridlines
|
||||||
xs = gx[gxi_min:gxi_max]
|
xs = gx[int(gxi_min):int(gxi_max)]
|
||||||
inds = get_grid_inds(xs)
|
inds = get_grid_inds(xs)
|
||||||
|
|
||||||
# Find y-intersections for x-midpoints
|
# Find y-intersections for x-midpoints
|
||||||
|
@ -100,7 +100,7 @@ class Text(RotatableImpl, Shape, metaclass=AutoSlots):
|
|||||||
self.annotations = annotations if annotations is not None else {}
|
self.annotations = annotations if annotations is not None else {}
|
||||||
self.font_path = font_path
|
self.font_path = font_path
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'Text':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'Text':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new._offset = self._offset.copy()
|
new._offset = self._offset.copy()
|
||||||
|
@ -94,7 +94,7 @@ class SubPattern(PositionableImpl, DoseableImpl, RotatableImpl, ScalableImpl, Mi
|
|||||||
)
|
)
|
||||||
return new
|
return new
|
||||||
|
|
||||||
def __deepcopy__(self, memo: Dict = None) -> 'SubPattern':
|
def __deepcopy__(self, memo: Optional[Dict] = None) -> 'SubPattern':
|
||||||
memo = {} if memo is None else memo
|
memo = {} if memo is None else memo
|
||||||
new = copy.copy(self)
|
new = copy.copy(self)
|
||||||
new.repetition = copy.deepcopy(self.repetition, memo)
|
new.repetition = copy.deepcopy(self.repetition, memo)
|
||||||
|
@ -33,6 +33,7 @@ class Doseable(metaclass=ABCMeta):
|
|||||||
'''
|
'''
|
||||||
---- Methods
|
---- Methods
|
||||||
'''
|
'''
|
||||||
|
@abstractmethod
|
||||||
def set_dose(self: T, dose: float) -> T:
|
def set_dose(self: T, dose: float) -> T:
|
||||||
"""
|
"""
|
||||||
Set the dose
|
Set the dose
|
||||||
@ -63,7 +64,7 @@ class DoseableImpl(Doseable, metaclass=ABCMeta):
|
|||||||
return self._dose
|
return self._dose
|
||||||
|
|
||||||
@dose.setter
|
@dose.setter
|
||||||
def dose(self, val: float):
|
def dose(self, val: float) -> None:
|
||||||
if not val >= 0:
|
if not val >= 0:
|
||||||
raise MasqueError('Dose must be non-negative')
|
raise MasqueError('Dose must be non-negative')
|
||||||
self._dose = val
|
self._dose = val
|
||||||
|
@ -32,6 +32,7 @@ class Layerable(metaclass=ABCMeta):
|
|||||||
'''
|
'''
|
||||||
---- Methods
|
---- Methods
|
||||||
'''
|
'''
|
||||||
|
@abstractmethod
|
||||||
def set_layer(self: T, layer: layer_t) -> T:
|
def set_layer(self: T, layer: layer_t) -> T:
|
||||||
"""
|
"""
|
||||||
Set the layer
|
Set the layer
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
from typing import TypeVar, Dict, Tuple, Any
|
from typing import TypeVar, Dict, Tuple, Any
|
||||||
from abc import ABCMeta, abstractmethod
|
from abc import ABCMeta, abstractmethod
|
||||||
|
|
||||||
from ..error import PatternLockedError
|
#from ..error import PatternLockedError
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T', bound='Lockable')
|
T = TypeVar('T', bound='Lockable')
|
||||||
|
Loading…
Reference in New Issue
Block a user