Add Library management functionality

This commit is contained in:
Jan Petykiewicz 2020-09-26 17:37:23 -07:00
parent 7cad46fa46
commit aa5696d884
6 changed files with 371 additions and 0 deletions

View File

@ -25,6 +25,7 @@ from .label import Label
from .subpattern import SubPattern
from .pattern import Pattern
from .utils import layer_t, annotations_t
from .library import Library
__author__ = 'Jan Petykiewicz'

View File

@ -15,3 +15,12 @@ class PatternLockedError(PatternError):
"""
def __init__(self):
PatternError.__init__(self, 'Tried to modify a locked Pattern, subpattern, or shape')
class LibraryError(Exception):
"""
Exception raised by Library classes
"""
pass

View File

@ -539,3 +539,48 @@ def disambiguate_pattern_names(patterns: Sequence[Pattern],
pat.name = suffixed_name
used_names.append(suffixed_name)
def load_library(stream: BinaryIO,
tag: str,
is_secondary: Optional[Callable[[str], bool]] = None,
) -> Tuple[Library, Dict[str, Any]]:
"""
Scan a GDSII file to determine what structures are present, and create
a library from them. This enables deferred reading of structures
on an as-needed basis.
All structures are loaded as secondary
Args:
stream: Seekable stream. Position 0 should be the start of the file.
The caller should leave the stream open while the library
is still in use, since the library will need to access it
in order to read the structure contents.
tag: Unique identifier that will be used to identify this data source
is_secondary: Function which takes a structure name and returns
True if the structure should only be used as a subcell
and not appear in the main Library interface.
Default always returns False.
Returns:
Library object, allowing for deferred load of structures.
Additional library info (dict, same format as from `read`).
"""
if is_secondary is None:
is_secondary = lambda k: False
stream.seek(0)
library_info = _read_header(stream)
structs = klamath.library.scan_structs(stream)
lib = Library()
for name_bytes, pos in structs.items():
name = name_bytes.decode('ASCII')
def mkstruct(pos: int = pos, name: str = name) -> Pattern:
stream.seek(pos)
return read_elements(stream, name, raw_mode=True)
lib.set_value(name, tag, mkstruct, secondary=is_secondary(name))
return lib

View File

@ -0,0 +1 @@
from .library import Library, PatternGenerator

267
masque/library/library.py Normal file
View File

@ -0,0 +1,267 @@
"""
Library class for managing unique name->pattern mappings and
deferred loading or creation.
"""
from typing import Dict, Callable, TypeVar, Generic, TYPE_CHECKING, Any, Tuple, Union
import logging
from pprint import pformat
from dataclasses import dataclass
from functools import lru_cache
from ..error import LibraryError
if TYPE_CHECKING:
from ..pattern import Pattern
logger = logging.getLogger(__name__)
@dataclass
class PatternGenerator:
__slots__ = ('tag', 'gen')
tag: str
""" Unique identifier for the source """
gen: Callable[[], 'Pattern']
""" Function which generates a pattern when called """
L = TypeVar('L', bound='Library')
class Library:
"""
This class is usually used to create a device library by mapping names to
functions which generate or load the relevant `Pattern` object as-needed.
Generated/loaded patterns can have "symbolic" references, where a SubPattern
object `sp` has a `None`-valued `sp.pattern` attribute, in which case the
Library expects `sp.identifier[0]` to contain a string which specifies the
referenced pattern's name.
Patterns can either be "primary" (default) or "secondary". Both get the
same deferred-load behavior, but "secondary" patterns may have conflicting
names and are not accessible through basic []-indexing. They are only used
to fill symbolic references in cases where there is no "primary" pattern
available, and only if both the referencing and referenced pattern-generators'
`tag` values match (i.e., only if they came from the same source).
Primary patterns can be turned into secondary patterns with the `demote`
method, `promote` performs the reverse (secondary -> primary) operation.
The `set_const` and `set_value` methods provide an easy way to transparently
construct PatternGenerator objects and directly set create "secondary"
patterns.
The cache can be disabled by setting the `enable_cache` attribute to `False`.
"""
primary: Dict[str, PatternGenerator]
secondary: Dict[Tuple[str, str], PatternGenerator]
cache: Dict[Union[str, Tuple[str, str]], 'Pattern']
enable_cache: bool = True
def __init__(self) -> None:
self.primary = {}
self.secondary = {}
self.cache = {}
def __setitem__(self, key: str, value: PatternGenerator) -> None:
self.primary[key] = value
if key in self.cache:
del self.cache[key]
def __delitem__(self, key: str) -> None:
if isinstance(key, str):
del self.primary[key]
elif isinstance(key, tuple):
del self.secondary[key]
if key in self.cache:
del self.cache[key]
def __getitem__(self, key: str) -> 'Pattern':
if self.enable_cache and key in self.cache:
logger.debug(f'found {key} in cache')
return self.cache[key]
logger.debug(f'loading {key}')
pg = self.primary[key]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key] = pat
return pat
def get_primary(self, key: str) -> 'Pattern':
return self[key]
def get_secondary(self, key: str, tag: str) -> 'Pattern':
logger.debug(f'get_secondary({key}, {tag})')
key2 = (key, tag)
if self.enable_cache and key2 in self.cache:
return self.cache[key2]
pg = self.secondary[key2]
pat = pg.gen()
self.resolve_subpatterns(pat, pg.tag)
self.cache[key2] = pat
return pat
def resolve_subpatterns(self, pat: 'Pattern', tag: str) -> 'Pattern':
logger.debug(f'Resolving subpatterns in {pat.name}')
for sp in pat.subpatterns:
if sp.pattern is not None:
continue
key = sp.identifier[0]
if key in self.primary:
sp.pattern = self[key]
continue
if (key, tag) in self.secondary:
sp.pattern = self.get_secondary(key, tag)
continue
raise LibraryError(f'Broken reference to {key} (tag {tag})')
return pat
def __repr__(self) -> str:
return '<Library with keys ' + repr(list(self.primary.keys())) + '>'
def set_const(self, key: str, tag: Any, const: 'Pattern', secondary: bool = False) -> None:
"""
Convenience function to avoid having to manually wrap
constant values into callables.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
const: Pattern object to return
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=lambda: const)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def set_value(self, key: str, tag: str, value: Callable[[], 'Pattern'], secondary: bool = False) -> None:
"""
Convenience function to automatically build a PatternGenerator.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
value: Callable which takes no arguments and generates the `Pattern` object
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=value)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def precache(self) -> 'Library':
"""
Force all patterns into the cache
Returns:
self
"""
for key in self.primary:
_ = self.get_primary(key)
for key2 in self.secondary:
_ = self.get_secondary(key2)
return self
def add(self, other: 'Library') -> 'Library':
"""
Add keys from another library into this one.
There must be no conflicting keys.
Args:
other: The library to insert keys from
Returns:
self
"""
conflicts = [key for key in other.primary
if key in self.primary]
if conflicts:
raise LibraryError('Duplicate keys encountered in library merge: ' + pformat(conflicts))
conflicts2 = [key2 for key2 in other.secondary
if key2 in self.secondary]
if conflicts2:
raise LibraryError('Duplicate secondary keys encountered in library merge: ' + pformat(conflicts2))
self.primary.update(other.primary)
self.secondary.update(other.secondary)
self.cache.update(other.cache)
return self
def demote(self, key: str) -> None:
"""
Turn a primary pattern into a secondary one.
It will no longer be accessible through [] indexing and will only be used to
when referenced by other patterns from the same source, and only if no primary
pattern with the same name exists.
Args:
key: Lookup key, usually the cell/pattern name
"""
pg = self.primary[key]
key2 = (key, pg.tag)
self.secondary[key2] = pg
if key in self.cache:
self.cache[key2] = self.cache[key]
del self[key]
def promote(self, key: str, tag: str) -> None:
"""
Turn a secondary pattern into a primary one.
It will become accessible through [] indexing and will be used to satisfy any
reference to a pattern with its key, regardless of tag.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for identifying the pattern's source, used to disambiguate
secondary patterns
"""
if key in self.primary:
raise LibraryError(f'Promoting ({key}, {tag}), but {key} already exists in primary!')
key2 = (key, tag)
pg = self.secondary[key2]
self.primary[key] = pg
if key2 in self.cache:
self.cache[key] = self.cache[key2]
del self.secondary[key2]
del self.cache[key2]
r"""
# Add a filter for names which aren't added
- Registration:
- scanned files (tag=filename, gen_fn[stream, {name: pos}])
- generator functions (tag='fn?', gen_fn[params])
- merge decision function (based on tag and cell name, can be "neither") ??? neither=keep both, load using same tag!
- Load process:
- file:
- read single cell
- check subpat identifiers, and load stuff recursively based on those. If not present, load from same file??
- function:
- generate cell
- traverse and check if we should load any subcells from elsewhere. replace if so.
* should fn generate subcells at all, or register those separately and have us control flow? maybe ask us and generate itself if not present?
- Scan all GDS files, save name -> (file, position). Keep the streams handy.
- Merge all names. This requires subcell merge because we don't know hierarchy.
- possibly include a "neither" option during merge, to deal with subcells. Means: just use parent's file.
"""

48
masque/library/utils.py Normal file
View File

@ -0,0 +1,48 @@
from typing import Callable, TypeVar, Generic
from functools import lru_cache
Key = TypeVar('Key')
Value = TypeVar('Value')
class DeferredDict(dict, Generic[Key, Value]):
"""
This is a modified `dict` which is used to defer loading/generating
values until they are accessed.
```
bignum = my_slow_function() # slow function call, would like to defer this
numbers = Library()
numbers['big'] = my_slow_function # no slow function call here
assert(bignum == numbers['big']) # first access is slow (function called)
assert(bignum == numbers['big']) # second access is fast (result is cached)
```
The `set_const` method is provided for convenience;
`numbers['a'] = lambda: 10` is equivalent to `numbers.set_const('a', 10)`.
"""
def __init__(self, *args, **kwargs) -> None:
dict.__init__(self)
self.update(*args, **kwargs)
def __setitem__(self, key: Key, value: Callable[[], Value]) -> None:
cached_fn = lru_cache(maxsize=1)(value)
dict.__setitem__(self, key, cached_fn)
def __getitem__(self, key: Key) -> Value:
return dict.__getitem__(self, key)()
def update(self, *args, **kwargs) -> None:
for k, v in dict(*args, **kwargs).items():
self[k] = v
def __repr__(self) -> str:
return '<Library with keys ' + repr(set(self.keys())) + '>'
def set_const(self, key: Key, value: Value) -> None:
"""
Convenience function to avoid having to manually wrap
constant values into callables.
"""
self[key] = lambda: value