Remove primary/secondary pattern split

Replacement TBD -- likely some way to rename patterns when merging.
This commit is contained in:
jan 2022-07-07 16:36:43 -07:00
parent bff9940518
commit b6f1af6e09

View File

@ -6,7 +6,6 @@ from typing import Dict, Callable, TypeVar, TYPE_CHECKING
from typing import Any, Tuple, Union, Iterator from typing import Any, Tuple, Union, Iterator
import logging import logging
from pprint import pformat from pprint import pformat
from dataclasses import dataclass
import copy import copy
from ..error import LibraryError from ..error import LibraryError
@ -18,16 +17,6 @@ if TYPE_CHECKING:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@dataclass
class PatternGenerator:
__slots__ = ('tag', 'gen')
tag: str
""" Unique identifier for the source """
gen: Callable[[], 'Pattern']
""" Function which generates a pattern when called """
L = TypeVar('L', bound='Library') L = TypeVar('L', bound='Library')
@ -41,44 +30,25 @@ class Library:
Library expects `sp.identifier[0]` to contain a string which specifies the Library expects `sp.identifier[0]` to contain a string which specifies the
referenced pattern's name. referenced pattern's name.
Patterns can either be "primary" (default) or "secondary". Both get the
same deferred-load behavior, but "secondary" patterns may have conflicting
names and are not accessible through basic []-indexing. They are only used
to fill symbolic references in cases where there is no "primary" pattern
available, and only if both the referencing and referenced pattern-generators'
`tag` values match (i.e., only if they came from the same source).
Primary patterns can be turned into secondary patterns with the `demote`
method, `promote` performs the reverse (secondary -> primary) operation.
The `set_const` and `set_value` methods provide an easy way to transparently
construct PatternGenerator objects and directly set create "secondary"
patterns.
The cache can be disabled by setting the `enable_cache` attribute to `False`. The cache can be disabled by setting the `enable_cache` attribute to `False`.
""" """
primary: Dict[str, PatternGenerator] generators: Dict[str, Callable[[], 'Pattern']]
secondary: Dict[Tuple[str, str], PatternGenerator] cache: Dict[str, 'Pattern']
cache: Dict[Union[str, Tuple[str, str]], 'Pattern']
enable_cache: bool = True enable_cache: bool = True
def __init__(self) -> None: def __init__(self) -> None:
self.primary = {} self.generators = {}
self.secondary = {}
self.cache = {} self.cache = {}
def __setitem__(self, key: str, value: PatternGenerator) -> None: def __setitem__(self, key: str, value: Callable[[], 'Pattern']) -> None:
self.primary[key] = value self.generators[key] = value
if key in self.cache: if key in self.cache:
logger.warning(f'Replaced library item "{key}" & existing cache entry.' logger.warning(f'Replaced library item "{key}" & existing cache entry.'
' Previously-generated Pattern will *not* be updated!') ' Previously-generated Pattern will *not* be updated!')
del self.cache[key] del self.cache[key]
def __delitem__(self, key: str) -> None: def __delitem__(self, key: str) -> None:
if isinstance(key, str): del self.generators[key]
del self.primary[key]
elif isinstance(key, tuple):
del self.secondary[key]
if key in self.cache: if key in self.cache:
logger.warning(f'Deleting library item "{key}" & existing cache entry.' logger.warning(f'Deleting library item "{key}" & existing cache entry.'
@ -86,44 +56,21 @@ class Library:
del self.cache[key] del self.cache[key]
def __getitem__(self, key: str) -> 'Pattern': def __getitem__(self, key: str) -> 'Pattern':
return self.get_primary(key)
def __iter__(self) -> Iterator[str]:
return iter(self.keys())
def __contains__(self, key: str) -> bool:
return key in self.primary
def get_primary(self, key: str) -> 'Pattern':
if self.enable_cache and key in self.cache: if self.enable_cache and key in self.cache:
logger.debug(f'found {key} in cache') logger.debug(f'found {key} in cache')
return self.cache[key] return self.cache[key]
logger.debug(f'loading {key}') logger.debug(f'loading {key}')
pg = self.primary[key] pat = self.generators[key]()
pat = pg.gen() self.resolve_subpatterns(pat)
self.resolve_subpatterns(pat, pg.tag)
self.cache[key] = pat self.cache[key] = pat
return pat return pat
def get_secondary(self, key: str, tag: str) -> 'Pattern': def __iter__(self) -> Iterator[str]:
logger.debug(f'get_secondary({key}, {tag})') return iter(self.keys())
key2 = (key, tag)
if self.enable_cache and key2 in self.cache:
return self.cache[key2]
pg = self.secondary[key2] def __contains__(self, key: str) -> bool:
pat = pg.gen() return key in self.generators
self.resolve_subpatterns(pat, pg.tag)
self.cache[key2] = pat
return pat
def set_secondary(self, key: str, tag: str, value: PatternGenerator) -> None:
self.secondary[(key, tag)] = value
if (key, tag) in self.cache:
logger.warning(f'Replaced library item "{key}" & existing cache entry.'
' Previously-generated Pattern will *not* be updated!')
del self.cache[(key, tag)]
def resolve_subpatterns(self, pat: 'Pattern', tag: str) -> 'Pattern': def resolve_subpatterns(self, pat: 'Pattern', tag: str) -> 'Pattern':
logger.debug(f'Resolving subpatterns in {pat.name}') logger.debug(f'Resolving subpatterns in {pat.name}')
@ -132,19 +79,15 @@ class Library:
continue continue
key = sp.identifier[0] key = sp.identifier[0]
if key in self.primary: if key in self.generators:
sp.pattern = self.get_primary(key) sp.pattern = self[key]
continue
if (key, tag) in self.secondary:
sp.pattern = self.get_secondary(key, tag)
continue continue
raise LibraryError(f'Broken reference to {key} (tag {tag})') raise LibraryError(f'Broken reference to {key} (tag {tag})')
return pat return pat
def keys(self) -> Iterator[str]: def keys(self) -> Iterator[str]:
return iter(self.primary.keys()) return iter(self.generators.keys())
def values(self) -> Iterator['Pattern']: def values(self) -> Iterator['Pattern']:
return iter(self[key] for key in self.keys()) return iter(self[key] for key in self.keys())
@ -153,56 +96,7 @@ class Library:
return iter((key, self[key]) for key in self.keys()) return iter((key, self[key]) for key in self.keys())
def __repr__(self) -> str: def __repr__(self) -> str:
return '<Library with keys ' + repr(list(self.primary.keys())) + '>' return '<Library with keys ' + repr(list(self.generators.keys())) + '>'
def set_const(
self,
key: str,
tag: Any,
const: 'Pattern',
secondary: bool = False,
) -> None:
"""
Convenience function to avoid having to manually wrap
constant values into callables.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
const: Pattern object to return
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=lambda: const)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def set_value(
self,
key: str,
tag: str,
value: Callable[[], 'Pattern'],
secondary: bool = False,
) -> None:
"""
Convenience function to automatically build a PatternGenerator.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for the source, used to disambiguate secondary patterns
value: Callable which takes no arguments and generates the `Pattern` object
secondary: If True, this pattern is not accessible for normal lookup, and is
only used as a sub-component of other patterns if no non-secondary
equivalent is available.
"""
pg = PatternGenerator(tag=tag, gen=value)
if secondary:
self.secondary[(key, tag)] = pg
else:
self.primary[key] = pg
def precache(self: L) -> L: def precache(self: L) -> L:
""" """
@ -211,17 +105,15 @@ class Library:
Returns: Returns:
self self
""" """
for key in self.primary: for key in self.generators:
_ = self.get_primary(key) _ = self[key]
for key2 in self.secondary:
_ = self.get_secondary(*key2)
return self return self
def add( def add(
self: L, self: L,
other: L, other: L,
use_ours: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False, use_ours: Callable[[str], bool] = lambda name: False,
use_theirs: Callable[[Union[str, Tuple[str, str]]], bool] = lambda name: False, use_theirs: Callable[[str], bool] = lambda name: False,
) -> L: ) -> L:
""" """
Add keys from another library into this one. Add keys from another library into this one.
@ -229,8 +121,6 @@ class Library:
Args: Args:
other: The library to insert keys from other: The library to insert keys from
use_ours: Decision function for name conflicts. use_ours: Decision function for name conflicts.
May be called with cell names and (name, tag) tuples for primary or
secondary cells, respectively.
Should return `True` if the value from `self` should be used. Should return `True` if the value from `self` should be used.
use_theirs: Decision function for name conflicts. Same format as `use_ours`. use_theirs: Decision function for name conflicts. Same format as `use_ours`.
Should return `True` if the value from `other` should be used. Should return `True` if the value from `other` should be used.
@ -238,72 +128,21 @@ class Library:
Returns: Returns:
self self
""" """
duplicates1 = set(self.primary.keys()) & set(other.primary.keys()) duplicates = set(self.keys()) & set(other.keys())
duplicates2 = set(self.secondary.keys()) & set(other.secondary.keys()) keep_ours = set(name for name in duplicates if use_ours(name))
keep_ours1 = set(name for name in duplicates1 if use_ours(name)) keep_theirs = set(name for name in duplicates - keep_ours if use_theirs(name))
keep_ours2 = set(name for name in duplicates2 if use_ours(name)) conflicts = duplicates - keep_ours - keep_theirs
keep_theirs1 = set(name for name in duplicates1 - keep_ours1 if use_theirs(name))
keep_theirs2 = set(name for name in duplicates2 - keep_ours2 if use_theirs(name))
conflicts1 = duplicates1 - keep_ours1 - keep_theirs1
conflicts2 = duplicates2 - keep_ours2 - keep_theirs2
if conflicts1: if conflicts:
raise LibraryError('Unresolved duplicate keys encountered in library merge: ' + pformat(conflicts1)) raise LibraryError('Unresolved duplicate keys encountered in library merge: '
+ pformat(conflicts))
if conflicts2:
raise LibraryError('Unresolved duplicate secondary keys encountered in library merge: ' + pformat(conflicts2))
for key1 in set(other.primary.keys()) - keep_ours1:
self[key1] = other.primary[key1]
if key1 in other.cache:
self.cache[key1] = other.cache[key1]
for key2 in set(other.secondary.keys()) - keep_ours2:
self.set_secondary(*key2, other.secondary[key2])
if key2 in other.cache:
self.cache[key2] = other.cache[key2]
for key in set(other.keys()) - keep_ours:
self[key] = other.generators[key]
if key in other.cache:
self.cache[key] = other.cache[key]
return self return self
def demote(self, key: str) -> None:
"""
Turn a primary pattern into a secondary one.
It will no longer be accessible through [] indexing and will only be used to
when referenced by other patterns from the same source, and only if no primary
pattern with the same name exists.
Args:
key: Lookup key, usually the cell/pattern name
"""
pg = self.primary[key]
key2 = (key, pg.tag)
self.secondary[key2] = pg
if key in self.cache:
self.cache[key2] = self.cache[key]
del self[key]
def promote(self, key: str, tag: str) -> None:
"""
Turn a secondary pattern into a primary one.
It will become accessible through [] indexing and will be used to satisfy any
reference to a pattern with its key, regardless of tag.
Args:
key: Lookup key, usually the cell/pattern name
tag: Unique tag for identifying the pattern's source, used to disambiguate
secondary patterns
"""
if key in self.primary:
raise LibraryError(f'Promoting ({key}, {tag}), but {key} already exists in primary!')
key2 = (key, tag)
pg = self.secondary[key2]
self.primary[key] = pg
if key2 in self.cache:
self.cache[key] = self.cache[key2]
del self.secondary[key2]
del self.cache[key2]
def copy(self, preserve_cache: bool = False) -> 'Library': def copy(self, preserve_cache: bool = False) -> 'Library':
""" """
Create a copy of this `Library`. Create a copy of this `Library`.
@ -315,8 +154,7 @@ class Library:
A copy of self A copy of self
""" """
new = Library() new = Library()
new.primary.update(self.primary) new.generators.update(self.generators)
new.secondary.update(self.secondary)
new.cache.update(self.cache) new.cache.update(self.cache)
return new return new
@ -335,23 +173,3 @@ class Library:
def __deepcopy__(self: L, memo: Optional[Dict] = None) -> L: def __deepcopy__(self: L, memo: Optional[Dict] = None) -> L:
raise LibraryError('Library cannot be deepcopied -- python copy.deepcopy() does not copy closures!') raise LibraryError('Library cannot be deepcopied -- python copy.deepcopy() does not copy closures!')
r"""
# Add a filter for names which aren't added
- Registration:
- scanned files (tag=filename, gen_fn[stream, {name: pos}])
- generator functions (tag='fn?', gen_fn[params])
- merge decision function (based on tag and cell name, can be "neither") ??? neither=keep both, load using same tag!
- Load process:
- file:
- read single cell
- check subpat identifiers, and load stuff recursively based on those. If not present, load from same file??
- function:
- generate cell
- traverse and check if we should load any subcells from elsewhere. replace if so.
* should fn generate subcells at all, or register those separately and have us control flow? maybe ask us and generate itself if not present?
- Scan all GDS files, save name -> (file, position). Keep the streams handy.
- Merge all names. This requires subcell merge because we don't know hierarchy.
- possibly include a "neither" option during merge, to deal with subcells. Means: just use parent's file.
"""