Compare commits
No commits in common. "master" and "performance_testing" have entirely different histories.
master
...
performanc
29
.flake8
29
.flake8
@ -1,29 +0,0 @@
|
||||
[flake8]
|
||||
ignore =
|
||||
# E501 line too long
|
||||
E501,
|
||||
# W391 newlines at EOF
|
||||
W391,
|
||||
# E241 multiple spaces after comma
|
||||
E241,
|
||||
# E302 expected 2 newlines
|
||||
E302,
|
||||
# W503 line break before binary operator (to be deprecated)
|
||||
W503,
|
||||
# E265 block comment should start with '# '
|
||||
E265,
|
||||
# E123 closing bracket does not match indentation of opening bracket's line
|
||||
E123,
|
||||
# E124 closing bracket does not match visual indentation
|
||||
E124,
|
||||
# E221 multiple spaces before operator
|
||||
E221,
|
||||
# E201 whitespace after '['
|
||||
E201,
|
||||
# E741 ambiguous variable name 'I'
|
||||
E741,
|
||||
|
||||
|
||||
per-file-ignores =
|
||||
# F401 import without use
|
||||
*/__init__.py: F401,
|
126
README.md
126
README.md
@ -10,105 +10,107 @@ has deprived the man of both a schematic and a better connectivity tool.
|
||||
|
||||
- [Source repository](https://mpxd.net/code/jan/snarled)
|
||||
- [PyPI](https://pypi.org/project/snarled)
|
||||
- [Github mirror](https://github.com/anewusername/snarled)
|
||||
|
||||
## Installation
|
||||
|
||||
Requirements:
|
||||
* python >= 3.10 (written and tested with 3.11)
|
||||
* python >= 3.9 (written and tested with 3.10)
|
||||
* numpy
|
||||
* klayout (python package only)
|
||||
* pyclipper
|
||||
|
||||
|
||||
Install with pip:
|
||||
```bash
|
||||
pip install snarled
|
||||
pip3 install snarled
|
||||
```
|
||||
|
||||
Alternatively, install from git
|
||||
```bash
|
||||
pip install git+https://mpxd.net/code/jan/snarled.git@release
|
||||
pip3 install git+https://mpxd.net/code/jan/snarled.git@release
|
||||
```
|
||||
|
||||
## Example
|
||||
See `examples/check.py` (python interface) or `examples/run.sh` (command-line interface).
|
||||
See `examples/check.py`. Note that the example uses `masque` to load data.
|
||||
|
||||
Command line:
|
||||
```bash
|
||||
snarled connectivity.oas connectivity.txt -m layermap.txt
|
||||
```
|
||||
|
||||
Python interface:
|
||||
```python3
|
||||
from pprint import pformat
|
||||
import logging
|
||||
|
||||
from masque.file import gdsii, oasis
|
||||
import snarled
|
||||
from snarled.types import layer_t
|
||||
import snarled.interfaces.masque
|
||||
|
||||
|
||||
logging.basicConfig()
|
||||
logging.getLogger('snarled').setLevel(logging.INFO)
|
||||
|
||||
|
||||
connectivity = [
|
||||
# Layer definitions
|
||||
connectivity = {
|
||||
((1, 0), (1, 2), (2, 0)), #M1 to M2 (via V12)
|
||||
((1, 0), (1, 3), (3, 0)), #M1 to M3 (via V13)
|
||||
((2, 0), (2, 3), (3, 0)), #M2 to M3 (via V23)
|
||||
]
|
||||
|
||||
labels_map: dict[layer_t, layer_t] = {
|
||||
(1, 0): (1, 0),
|
||||
(2, 0): (2, 0),
|
||||
(3, 0): (3, 0),
|
||||
}
|
||||
|
||||
filename = 'connectivity.oas'
|
||||
|
||||
nets = snarled.trace_layout(filename, connectivity, topcell='top', labels_map=labels_map)
|
||||
result = snarled.TraceAnalysis(nets)
|
||||
cells, props = oasis.readfile('connectivity.oas')
|
||||
topcell = cells['top']
|
||||
|
||||
print('\n')
|
||||
print(result)
|
||||
polys, labels = snarled.interfaces.masque.read_cell(topcell, connectivity)
|
||||
nets_info = snarled.trace_connectivity(polys, labels, connectivity)
|
||||
|
||||
print('\nFinal nets:')
|
||||
print([kk for kk in nets_info.nets if isinstance(kk.name, str)])
|
||||
|
||||
print('\nShorted net sets:')
|
||||
for short in nets_info.get_shorted_nets():
|
||||
print('(' + ','.join([repr(nn) for nn in sorted(list(short))]) + ')')
|
||||
|
||||
print('\nOpen nets:')
|
||||
print(pformat(dict(nets_info.get_open_nets())))
|
||||
```
|
||||
|
||||
this prints the following:
|
||||
|
||||
```
|
||||
INFO:snarled.trace:Adding layer (3, 0)
|
||||
INFO:snarled.trace:Adding layer (2, 3)
|
||||
INFO:snarled.trace:Adding layer (1, 3)
|
||||
INFO:snarled.trace:Adding layer (1, 2)
|
||||
INFO:snarled.trace:Adding layer (1, 0)
|
||||
INFO:snarled.trace:Adding layer (2, 0)
|
||||
Nets ['SignalD', 'SignalI'] are shorted on layer (1, 0) in poly:
|
||||
[[13000.0, -3000.0],
|
||||
[16000.0, -3000.0],
|
||||
[16000.0, -1000.0],
|
||||
[13000.0, -1000.0],
|
||||
[13000.0, 2000.0],
|
||||
[12000.0, 2000.0],
|
||||
[12000.0, -1000.0],
|
||||
[11000.0, -1000.0],
|
||||
[11000.0, -3000.0],
|
||||
[12000.0, -3000.0],
|
||||
[12000.0, -8000.0],
|
||||
[13000.0, -8000.0]]
|
||||
Nets ['SignalK', 'SignalK'] are shorted on layer (1, 0) in poly:
|
||||
[[18500.0, -8500.0], [28200.0, -8500.0], [28200.0, 1000.0], [18500.0, 1000.0]]
|
||||
Nets ['SignalC', 'SignalC'] are shorted on layer (1, 0) in poly:
|
||||
[[10200.0, 0.0], [-1100.0, 0.0], [-1100.0, -1000.0], [10200.0, -1000.0]]
|
||||
Nets ['SignalG', 'SignalH'] are shorted on layer (1, 0) in poly:
|
||||
[[10100.0, -2000.0], [5100.0, -2000.0], [5100.0, -3000.0], [10100.0, -3000.0]]
|
||||
|
||||
Final nets:
|
||||
[SignalA, SignalC__0, SignalE, SignalG, SignalK__0, SignalK__2, SignalL]
|
||||
|
||||
Trace analysis
|
||||
=============
|
||||
Nets
|
||||
(groups of electrically connected labels)
|
||||
SignalA,SignalB
|
||||
SignalC,SignalD,SignalI
|
||||
SignalE,SignalF
|
||||
SignalG,SignalH
|
||||
SignalK
|
||||
SignalK
|
||||
SignalL
|
||||
Shorted net sets:
|
||||
(SignalC__0,SignalC__1,SignalD,SignalI)
|
||||
(SignalK__0,SignalK__1)
|
||||
(SignalG,SignalH)
|
||||
(SignalA,SignalB)
|
||||
(SignalE,SignalF)
|
||||
|
||||
Opens
|
||||
(2+ nets containing the same name)
|
||||
SignalK : 2 nets
|
||||
|
||||
Shorts
|
||||
(2+ unique names for the same net)
|
||||
SignalA,SignalB
|
||||
SignalC,SignalD,SignalI
|
||||
SignalE,SignalF
|
||||
SignalG,SignalH
|
||||
=============
|
||||
Open nets:
|
||||
{'SignalK': [SignalK__0, SignalK__2]}
|
||||
```
|
||||
|
||||
## Code organization
|
||||
|
||||
- The primary functionality is in `trace`; specifically `trace.trace_layout()`.
|
||||
- `main` provides a command-line interface, supported by the functions in `utils`.
|
||||
- The main functionality is in `trace_connectivity`.
|
||||
- Useful classes, namely `NetsInfo` and `NetName`, are in `snarled.tracker`.
|
||||
- `snarled.interfaces` contains helper code for interfacing with other packages.
|
||||
|
||||
## Caveats
|
||||
|
||||
This package is slow, dumb, and the code is ugly. There's only a basic test case.
|
||||
|
||||
If you know what you're doing, you could probably do a much better job of it.
|
||||
|
||||
...but you *have* heard of it :)
|
||||
|
||||
|
@ -1,33 +1,40 @@
|
||||
"""
|
||||
Example code for checking connectivity in a layout by using `snarled`
|
||||
Example code for checking connectivity in a layout by using
|
||||
`snarled` and `masque`.
|
||||
"""
|
||||
from pprint import pformat
|
||||
import logging
|
||||
|
||||
from masque.file import gdsii, oasis
|
||||
|
||||
import snarled
|
||||
from snarled.types import layer_t
|
||||
import snarled.interfaces.masque
|
||||
|
||||
|
||||
logging.basicConfig()
|
||||
logging.getLogger('snarled').setLevel(logging.INFO)
|
||||
|
||||
# How are the conductors connected to each other?
|
||||
|
||||
connectivity = [
|
||||
((1, 0), (1, 2), (2, 0)), #M1 to M2 (via V12)
|
||||
((1, 0), (1, 3), (3, 0)), #M1 to M3 (via V13)
|
||||
((2, 0), (2, 3), (3, 0)), #M2 to M3 (via V23)
|
||||
]
|
||||
|
||||
# What labels should be loaded, and which geometry layers should they apply to?
|
||||
labels_map: dict[layer_t, layer_t] = {
|
||||
(1, 0): (1, 0),
|
||||
(2, 0): (2, 0),
|
||||
(3, 0): (3, 0),
|
||||
}
|
||||
|
||||
filename = 'connectivity.oas'
|
||||
#cells, props = gdsii.readfile('connectivity.gds')
|
||||
cells, props = oasis.readfile('connectivity.oas')
|
||||
topcell = cells['top']
|
||||
|
||||
nets = snarled.trace_layout(filename, connectivity, topcell='top', labels_map=labels_map)
|
||||
result = snarled.TraceAnalysis(nets)
|
||||
get_layer = snarled.interfaces.masque.prepare_cell(topcell)
|
||||
nets_info = snarled.trace_connectivity(get_layer, connectivity)
|
||||
|
||||
print('\n')
|
||||
print(result)
|
||||
print('\nFinal nets:')
|
||||
print([kk for kk in sorted(nets_info.nets.keys()) if isinstance(kk.name, str)])
|
||||
|
||||
print('\nShorted net sets:')
|
||||
for short in nets_info.get_shorted_nets():
|
||||
print('(' + ','.join([repr(nn) for nn in sorted(list(short))]) + ')')
|
||||
|
||||
print('\nOpen nets:')
|
||||
print(pformat(dict(nets_info.get_open_nets())))
|
||||
|
Binary file not shown.
@ -1,3 +0,0 @@
|
||||
M1, V12, M2
|
||||
M1, V13, M3
|
||||
M2, V23, M3
|
@ -1,6 +0,0 @@
|
||||
1/0:M1
|
||||
2/0:M2
|
||||
3/0:M3
|
||||
1/2:V12
|
||||
1/3:V13
|
||||
2/3:V23
|
@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cd $(dirname -- "$0") # cd to this script's parent directory
|
||||
|
||||
snarled connectivity.oas connectivity.txt -m layermap.txt
|
@ -1,93 +0,0 @@
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "snarled"
|
||||
description = "CAD layout electrical connectivity checker"
|
||||
readme = "README.md"
|
||||
license = { file = "LICENSE.md" }
|
||||
authors = [
|
||||
{ name="Jan Petykiewicz", email="jan@mpxd.net" },
|
||||
]
|
||||
homepage = "https://mpxd.net/code/jan/snarled"
|
||||
repository = "https://mpxd.net/code/jan/snarled"
|
||||
keywords = [
|
||||
"layout",
|
||||
"design",
|
||||
"CAD",
|
||||
"EDA",
|
||||
"electronics",
|
||||
"photonics",
|
||||
"IC",
|
||||
"mask",
|
||||
"pattern",
|
||||
"drawing",
|
||||
"lvs",
|
||||
"connectivity",
|
||||
"short",
|
||||
"unintentional",
|
||||
"label",
|
||||
"schematic",
|
||||
"verification",
|
||||
"checking",
|
||||
]
|
||||
classifiers = [
|
||||
"Programming Language :: Python :: 3",
|
||||
"Development Status :: 4 - Beta",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Intended Audience :: Manufacturing",
|
||||
"Intended Audience :: Science/Research",
|
||||
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
|
||||
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)",
|
||||
]
|
||||
requires-python = ">=3.11"
|
||||
dynamic = ["version"]
|
||||
dependencies = [
|
||||
"klayout~=0.29",
|
||||
]
|
||||
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "snarled/__init__.py"
|
||||
|
||||
[project.scripts]
|
||||
snarled = "snarled.main:main"
|
||||
|
||||
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
".git",
|
||||
"dist",
|
||||
]
|
||||
line-length = 145
|
||||
indent-width = 4
|
||||
lint.dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
lint.select = [
|
||||
"NPY", "E", "F", "W", "B", "ANN", "UP", "SLOT", "SIM", "LOG",
|
||||
"C4", "ISC", "PIE", "PT", "RET", "TCH", "PTH", "INT",
|
||||
"ARG", "PL", "R", "TRY",
|
||||
"G010", "G101", "G201", "G202",
|
||||
"Q002", "Q003", "Q004",
|
||||
]
|
||||
lint.ignore = [
|
||||
#"ANN001", # No annotation
|
||||
"ANN002", # *args
|
||||
"ANN003", # **kwargs
|
||||
"ANN401", # Any
|
||||
"ANN101", # self: Self
|
||||
"SIM108", # single-line if / else assignment
|
||||
"RET504", # x=y+z; return x
|
||||
"PIE790", # unnecessary pass
|
||||
"ISC003", # non-implicit string concatenation
|
||||
"C408", # dict(x=y) instead of {'x': y}
|
||||
"PLR09", # Too many xxx
|
||||
"PLR2004", # magic number
|
||||
"PLC0414", # import x as x
|
||||
"TRY003", # Long exception message
|
||||
"PTH123", # open()
|
||||
"UP015", # open(..., 'rt')
|
||||
"PLW2901", # overwriting loop var
|
||||
]
|
||||
|
63
setup.py
Normal file
63
setup.py
Normal file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
|
||||
with open('README.md', 'rt') as f:
|
||||
long_description = f.read()
|
||||
|
||||
with open('snarled/VERSION.py', 'rt') as f:
|
||||
version = f.readlines()[2].strip()
|
||||
|
||||
setup(name='snarled',
|
||||
version=version,
|
||||
description='CAD layout electrical connectivity checker',
|
||||
long_description=long_description,
|
||||
long_description_content_type='text/markdown',
|
||||
author='Jan Petykiewicz',
|
||||
author_email='jan@mpxd.net',
|
||||
url='https://mpxd.net/code/jan/snarled',
|
||||
packages=find_packages(),
|
||||
package_data={
|
||||
'snarled': ['py.typed',
|
||||
]
|
||||
},
|
||||
install_requires=[
|
||||
'numpy',
|
||||
'pyclipper',
|
||||
],
|
||||
extras_require={
|
||||
'masque': ['masque'],
|
||||
'oasis': ['fatamorgana>=0.7'],
|
||||
'gdsii': ['klamath>=1.0'],
|
||||
},
|
||||
classifiers=[
|
||||
'Programming Language :: Python :: 3',
|
||||
'Development Status :: 4 - Beta',
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: Information Technology',
|
||||
'Intended Audience :: Manufacturing',
|
||||
'Intended Audience :: Science/Research',
|
||||
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
|
||||
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)',
|
||||
],
|
||||
keywords=[
|
||||
'layout',
|
||||
'design',
|
||||
'CAD',
|
||||
'EDA',
|
||||
'electronics',
|
||||
'photonics',
|
||||
'IC',
|
||||
'mask',
|
||||
'pattern',
|
||||
'drawing',
|
||||
'lvs',
|
||||
'connectivity',
|
||||
'short',
|
||||
'unintentional',
|
||||
'label',
|
||||
'schematic',
|
||||
],
|
||||
)
|
||||
|
4
snarled/VERSION.py
Normal file
4
snarled/VERSION.py
Normal file
@ -0,0 +1,4 @@
|
||||
""" VERSION defintion. THIS FILE IS MANUALLY PARSED BY setup.py and REQUIRES A SPECIFIC FORMAT """
|
||||
__version__ = '''
|
||||
0.6
|
||||
'''.strip()
|
@ -7,15 +7,17 @@ Layout connectivity checker.
|
||||
`snarled` is a python package for checking electrical connectivity in multi-layer layouts.
|
||||
|
||||
It is intended to be "poor-man's LVS" (layout-versus-schematic), for when poverty
|
||||
has deprived the man of a schematic and a better connectivity tool.
|
||||
has deprived the man of both a schematic and a better connectivity tool.
|
||||
|
||||
The main functionality is in `trace`.
|
||||
`__main__.py` details the command-line interface.
|
||||
The main functionality is in `trace_connectivity`.
|
||||
Useful classes, namely `NetsInfo` and `NetName`, are in `snarled.tracker`.
|
||||
`snarled.interfaces` contains helper code for interfacing with other packages.
|
||||
"""
|
||||
from .trace import (
|
||||
trace_layout as trace_layout,
|
||||
TraceAnalysis as TraceAnalysis,
|
||||
)
|
||||
from .main import trace_connectivity, trace_connectivity_preloaded
|
||||
from .tracker import NetsInfo, NetName
|
||||
from . import interfaces
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
__version__ = '1.0'
|
||||
|
||||
from .VERSION import __version__
|
||||
|
@ -1,3 +0,0 @@
|
||||
|
||||
from .main import main
|
||||
main()
|
64
snarled/clipper.py
Normal file
64
snarled/clipper.py
Normal file
@ -0,0 +1,64 @@
|
||||
"""
|
||||
Wrappers to simplify some pyclipper functions
|
||||
"""
|
||||
from typing import Sequence, Optional, List
|
||||
|
||||
from numpy.typing import ArrayLike
|
||||
from pyclipper import (
|
||||
Pyclipper, PT_CLIP, PT_SUBJECT, CT_UNION, CT_INTERSECTION, PFT_NONZERO, PFT_EVENODD,
|
||||
PyPolyNode, CT_DIFFERENCE,
|
||||
)
|
||||
|
||||
from .types import contour_t
|
||||
|
||||
|
||||
def union_nonzero(shapes: Sequence[ArrayLike]) -> Optional[PyPolyNode]:
|
||||
if not shapes:
|
||||
return None
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(shapes, PT_CLIP, closed=True)
|
||||
result = pc.Execute2(CT_UNION, PFT_NONZERO, PFT_NONZERO)
|
||||
return result
|
||||
|
||||
|
||||
def union_evenodd(shapes: Sequence[ArrayLike]) -> List[contour_t]:
|
||||
if not shapes:
|
||||
return []
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(shapes, PT_CLIP, closed=True)
|
||||
return pc.Execute(CT_UNION, PFT_EVENODD, PFT_EVENODD)
|
||||
|
||||
|
||||
def intersection_evenodd(
|
||||
subject_shapes: Sequence[ArrayLike],
|
||||
clip_shapes: Sequence[ArrayLike],
|
||||
) -> List[contour_t]:
|
||||
if not subject_shapes or not clip_shapes:
|
||||
return []
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(subject_shapes, PT_SUBJECT, closed=True)
|
||||
pc.AddPaths(clip_shapes, PT_CLIP, closed=True)
|
||||
return pc.Execute(CT_INTERSECTION, PFT_EVENODD, PFT_EVENODD)
|
||||
|
||||
|
||||
def difference_evenodd(
|
||||
subject_shapes: Sequence[ArrayLike],
|
||||
clip_shapes: Sequence[ArrayLike],
|
||||
) -> List[contour_t]:
|
||||
if not subject_shapes:
|
||||
return []
|
||||
if not clip_shapes:
|
||||
return subject_shapes
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(subject_shapes, PT_SUBJECT, closed=True)
|
||||
pc.AddPaths(clip_shapes, PT_CLIP, closed=True)
|
||||
return pc.Execute(CT_DIFFERENCE, PFT_EVENODD, PFT_EVENODD)
|
||||
|
||||
|
||||
def hier2oriented(polys: Sequence[PyPolyNode]) -> List[ArrayLike]:
|
||||
contours = []
|
||||
for poly in polys:
|
||||
contours.append(poly.Contour)
|
||||
contours += [hole.Contour for hole in poly.Childs]
|
||||
|
||||
return contours
|
0
snarled/interfaces/__init__.py
Normal file
0
snarled/interfaces/__init__.py
Normal file
127
snarled/interfaces/masque.py
Normal file
127
snarled/interfaces/masque.py
Normal file
@ -0,0 +1,127 @@
|
||||
"""
|
||||
Functionality for extracting geometry and label info from `masque` patterns.
|
||||
"""
|
||||
from typing import Sequence, Dict, List, Any, Tuple, Optional, Mapping, Callable
|
||||
from collections import defaultdict
|
||||
|
||||
import numpy
|
||||
from numpy.typing import NDArray
|
||||
from masque import Pattern
|
||||
from masque.file import oasis, gdsii
|
||||
from masque.shapes import Polygon
|
||||
|
||||
from ..types import layer_t
|
||||
from ..utils import connectivity2layers
|
||||
|
||||
|
||||
def prepare_cell(
|
||||
cell: Pattern,
|
||||
label_mapping: Optional[Mapping[layer_t, layer_t]] = None,
|
||||
) -> Callable[[layer_t], Tuple[
|
||||
List[NDArray[numpy.float64]],
|
||||
List[Tuple[float, float, str]]
|
||||
]]:
|
||||
"""
|
||||
Generate a function for extracting `polys` and `labels` from a `masque.Pattern`.
|
||||
The returned function can be passed to `snarled.trace_connectivity`.
|
||||
|
||||
Args:
|
||||
cell: A `masque` `Pattern` object. Usually your topcell.
|
||||
label_mapping: A mapping of `{label_layer: metal_layer}`. This allows labels
|
||||
to refer to nets on metal layers without the labels themselves being on
|
||||
that layer.
|
||||
Default `None` reads labels from the same layer as the geometry.
|
||||
|
||||
Returns:
|
||||
`get_layer` function, to be passed to `snarled.trace_connectivity`.
|
||||
"""
|
||||
|
||||
def get_layer(
|
||||
layer: layer_t,
|
||||
) -> Tuple[
|
||||
List[NDArray[numpy.float64]],
|
||||
List[Tuple[float, float, str]]
|
||||
]:
|
||||
|
||||
if label_mapping is None:
|
||||
label_layers = {layer: layer}
|
||||
else:
|
||||
label_layers = {label_layer for label_layer, metal_layer in label_mapping.items()
|
||||
if metal_layer == layer}
|
||||
|
||||
subset = cell.deepcopy().subset( # TODO add single-op subset-and-copy, to avoid copying unwanted stuff
|
||||
shapes_func=lambda ss: ss.layer == layer,
|
||||
labels_func=lambda ll: ll.layer in label_layers,
|
||||
subpatterns_func=lambda ss: True,
|
||||
recursive=True,
|
||||
)
|
||||
|
||||
polygonized = subset.polygonize() # Polygonize Path shapes
|
||||
flat = polygonized.flatten()
|
||||
|
||||
# load polygons
|
||||
polys = []
|
||||
for ss in flat.shapes:
|
||||
assert(isinstance(ss, Polygon))
|
||||
|
||||
if ss.repetition is None:
|
||||
displacements = [(0, 0)]
|
||||
else:
|
||||
displacements = ss.repetition.displacements
|
||||
|
||||
for displacement in displacements:
|
||||
polys.append(
|
||||
ss.vertices + ss.offset + displacement
|
||||
)
|
||||
|
||||
# load metal labels
|
||||
labels = []
|
||||
for ll in flat.labels:
|
||||
if ll.repetition is None:
|
||||
displacements = [(0, 0)]
|
||||
else:
|
||||
displacements = ll.repetition.displacements
|
||||
|
||||
for displacement in displacements:
|
||||
offset = ll.offset + displacement
|
||||
labels.append((*offset, ll.string))
|
||||
|
||||
return polys, labels
|
||||
return get_layer
|
||||
|
||||
|
||||
def read_cell(
|
||||
cell: Pattern,
|
||||
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
|
||||
label_mapping: Optional[Mapping[layer_t, layer_t]] = None,
|
||||
) -> Tuple[
|
||||
defaultdict[layer_t, List[NDArray[numpy.float64]]],
|
||||
defaultdict[layer_t, List[Tuple[float, float, str]]]]:
|
||||
"""
|
||||
Extract `polys` and `labels` from a `masque.Pattern`.
|
||||
|
||||
This function extracts the data needed by `snarled.trace_connectivity`.
|
||||
|
||||
Args:
|
||||
cell: A `masque` `Pattern` object. Usually your topcell.
|
||||
connectivity: A sequence of 3-tuples specifying the layer connectivity.
|
||||
Same as what is provided to `snarled.trace_connectivity`.
|
||||
label_mapping: A mapping of `{label_layer: metal_layer}`. This allows labels
|
||||
to refer to nets on metal layers without the labels themselves being on
|
||||
that layer.
|
||||
|
||||
Returns:
|
||||
`polys` and `labels` data structures, to be passed to `snarled.trace_connectivity`.
|
||||
"""
|
||||
|
||||
metal_layers, via_layers = connectivity2layers(connectivity)
|
||||
poly_layers = metal_layers | via_layers
|
||||
|
||||
get_layer = prepare_cell(cell, label_mapping)
|
||||
|
||||
polys = defaultdict(list)
|
||||
labels = defaultdict(list)
|
||||
for layer in poly_layers:
|
||||
polys[layer], labels[layer] = get_layer(layer)
|
||||
|
||||
return polys, labels
|
399
snarled/main.py
399
snarled/main.py
@ -1,80 +1,369 @@
|
||||
from typing import Any
|
||||
import argparse
|
||||
"""
|
||||
Main connectivity-checking functionality for `snarled`
|
||||
"""
|
||||
from typing import Tuple, List, Dict, Set, Optional, Union, Sequence, Mapping, Callable
|
||||
from collections import defaultdict
|
||||
from pprint import pformat
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
|
||||
from . import utils
|
||||
from .trace import trace_layout, TraceAnalysis
|
||||
import numpy
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
from pyclipper import scale_to_clipper, scale_from_clipper, PyPolyNode
|
||||
|
||||
from .types import connectivity_t, layer_t, contour_t
|
||||
from .poly import poly_contains_points, intersects
|
||||
from .clipper import union_nonzero, union_evenodd, intersection_evenodd, difference_evenodd, hier2oriented
|
||||
from .tracker import NetsInfo, NetName
|
||||
from .utils import connectivity2layers
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='snarled',
|
||||
description='layout connectivity checker',
|
||||
)
|
||||
def trace_connectivity_preloaded(
|
||||
polys: Mapping[layer_t, Sequence[ArrayLike]],
|
||||
labels: Mapping[layer_t, Sequence[Tuple[float, float, str]]],
|
||||
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
|
||||
clipper_scale_factor: int = int(2 ** 24),
|
||||
) -> NetsInfo:
|
||||
"""
|
||||
Analyze the electrical connectivity of the provided layout.
|
||||
|
||||
parser.add_argument('file_path')
|
||||
parser.add_argument('connectivity_path')
|
||||
parser.add_argument('-m', '--layermap')
|
||||
parser.add_argument('-t', '--top')
|
||||
parser.add_argument('-p', '--labels-remap')
|
||||
The resulting `NetsInfo` will contain only disjoint `nets`, and its `net_aliases` can be used to
|
||||
understand which nets are shorted (and therefore known by more than one name).
|
||||
|
||||
parser.add_argument('-l', '--lfile-path')
|
||||
parser.add_argument('-r', '--lremap')
|
||||
parser.add_argument('-n', '--llayermap')
|
||||
parser.add_argument('-s', '--ltop')
|
||||
Args:
|
||||
polys: A full description of all conducting paths in the layout. Consists of lists of polygons
|
||||
(Nx2 arrays of vertices), indexed by layer. The structure looks roughly like
|
||||
`{layer0: [poly0, poly1, ..., [(x0, y0), (x1, y1), ...]], ...}`
|
||||
labels: A list of "named points" which are used to assign names to the nets they touch.
|
||||
A collection of lists of (x, y, name) tuples, indexed *by the layer they target*.
|
||||
`{layer0: [(x0, y0, name0), (x1, y1, name1), ...], ...}`
|
||||
connectivity: A sequence of 3-tuples specifying the electrical connectivity between layers.
|
||||
Each 3-tuple looks like `(top_layer, via_layer, bottom_layer)` and indicates that
|
||||
`top_layer` and `bottom_layer` are electrically connected at any location where
|
||||
shapes are present on all three (top, via, and bottom) layers.
|
||||
`via_layer` may be `None`, in which case any overlap between shapes on `top_layer`
|
||||
and `bottom_layer` is automatically considered a short (with no third shape necessary).
|
||||
clipper_scale_factor: `pyclipper` uses 64-bit integer math, while we accept either floats or ints.
|
||||
The coordinates from `polys` are scaled by this factor to put them roughly in the middle of
|
||||
the range `pyclipper` wants; you may need to adjust this if you are already using coordinates
|
||||
with large integer values.
|
||||
|
||||
parser.add_argument('-o', '--output')
|
||||
parser.add_argument('-u', '--raw-label-names', action='store_true')
|
||||
Returns:
|
||||
`NetsInfo` object describing the various nets and their connectivities.
|
||||
"""
|
||||
def get_layer(layer: layer_t) -> Tuple[Sequence[ArrayLike], Sequence[Tuple[float, float, str]]]:
|
||||
return polys[layer], labels[layer]
|
||||
|
||||
args = parser.parse_args()
|
||||
return trace_connectivity(get_layer, connectivity, clipper_scale_factor)
|
||||
|
||||
filepath = args.file_path
|
||||
connectivity = utils.read_connectivity(args.connectivity_path)
|
||||
|
||||
kwargs: dict[str, Any] = {}
|
||||
def trace_connectivity(
|
||||
get_layer: Callable[[layer_t], Tuple[Sequence[ArrayLike], Sequence[Tuple[float, float, str]]]],
|
||||
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
|
||||
clipper_scale_factor: int = int(2 ** 24),
|
||||
) -> NetsInfo:
|
||||
"""
|
||||
Analyze the electrical connectivity of a layout.
|
||||
|
||||
if args.layermap:
|
||||
kwargs['layer_map'] = utils.read_layermap(args.layermap)
|
||||
The resulting `NetsInfo` will contain only disjoint `nets`, and its `net_aliases` can be used to
|
||||
understand which nets are shorted (and therefore known by more than one name).
|
||||
|
||||
if args.top:
|
||||
kwargs['topcell'] = args.top
|
||||
This function attempts to reduce memory usage by lazy-loading layout data (layer-by-layer) and
|
||||
pruning away layers for which all interactions have already been computed.
|
||||
TODO: In the future, this will be extended to cover partial loading of spatial extents in
|
||||
addition to layers.
|
||||
|
||||
if args.labels_remap:
|
||||
kwargs['labels_remap'] = utils.read_remap(args.labels_remap)
|
||||
Args:
|
||||
get_layer: When called, `polys, labels = get_layer(layer)` should return the geometry and labels
|
||||
on that layer. Returns
|
||||
|
||||
if args.lfile_path:
|
||||
assert args.lremap
|
||||
kwargs['lfile_path'] = args.lfile_path
|
||||
kwargs['lfile_map'] = utils.read_remap(args.lremap)
|
||||
polys, A list of polygons (Nx2 arrays of vertices) on the layer. The structure looks like
|
||||
`[poly0, poly1, ..., [(x0, y0), (x1, y1), ...]]`
|
||||
|
||||
if args.llayermap:
|
||||
kwargs['lfile_layermap'] = utils.read_layermap(args.llayermap)
|
||||
labels, A list of "named points" which are used to assign names to the nets they touch.
|
||||
A list of (x, y, name) tuples targetting this layer.
|
||||
`[(x0, y0, name0), (x1, y1, name1), ...]`
|
||||
|
||||
if args.ltop:
|
||||
kwargs['lfile_topcell'] = args.ltop
|
||||
connectivity: A sequence of 3-tuples specifying the electrical connectivity between layers.
|
||||
|
||||
if args.output:
|
||||
kwargs['output_path'] = args.output
|
||||
Each 3-tuple looks like `(top_layer, via_layer, bottom_layer)` and indicates that
|
||||
`top_layer` and `bottom_layer` are electrically connected at any location where
|
||||
shapes are present on all three (top, via, and bottom) layers.
|
||||
|
||||
if not args.raw_label_names:
|
||||
from .utils import strip_underscored_label as parse_label
|
||||
`via_layer` may be `None`, in which case any overlap between shapes on `top_layer`
|
||||
and `bottom_layer` is automatically considered a short (with no third shape necessary).
|
||||
|
||||
NOTE that the order in which connectivity is specified (i.e. top-level ordering of the
|
||||
tuples) directly sets the order in which the layers are loaded and merged, and thus
|
||||
has a significant impact on memory usage by determining when layers can be pruned away.
|
||||
Try to group entries by the layers they affect!
|
||||
|
||||
clipper_scale_factor: `pyclipper` uses 64-bit integer math, while we accept either floats or ints.
|
||||
The coordinates from `polys` are scaled by this factor to put them roughly in the middle of
|
||||
the range `pyclipper` wants; you may need to adjust this if you are already using coordinates
|
||||
with large integer values.
|
||||
|
||||
Returns:
|
||||
`NetsInfo` object describing the various nets and their connectivities.
|
||||
"""
|
||||
loaded_layers = set()
|
||||
nets_info = NetsInfo()
|
||||
|
||||
for ii, (top_layer, via_layer, bot_layer) in enumerate(connectivity):
|
||||
logger.info(f'{ii}, {top_layer}, {via_layer}, {bot_layer}')
|
||||
for metal_layer in (top_layer, bot_layer):
|
||||
if metal_layer in loaded_layers:
|
||||
continue
|
||||
# Load and run initial union on each layer
|
||||
raw_polys, labels = get_layer(metal_layer)
|
||||
polys = union_input_polys(scale_to_clipper(raw_polys, clipper_scale_factor))
|
||||
|
||||
# Check each polygon for labels, and assign it to a net (possibly anonymous).
|
||||
nets_on_layer, merge_groups = label_polys(polys, labels, clipper_scale_factor)
|
||||
for name, net_polys in nets_on_layer.items():
|
||||
nets_info.nets[name][metal_layer] += hier2oriented(net_polys)
|
||||
|
||||
# Merge any nets that were shorted by having their labels on the same polygon
|
||||
for group in merge_groups:
|
||||
net_names = set(nn.name for nn in group)
|
||||
if len(net_names) > 1:
|
||||
logger.warning(f'Nets {net_names} are shorted on layer {metal_layer}')
|
||||
first_net, *defunct_nets = group
|
||||
for defunct_net in defunct_nets:
|
||||
nets_info.merge(first_net, defunct_net)
|
||||
|
||||
loaded_layers.add(metal_layer)
|
||||
|
||||
# Load and union vias
|
||||
via_raw_polys, _labels = get_layer(via_layer)
|
||||
via_union = union_input_polys(scale_to_clipper(via_raw_polys, clipper_scale_factor))
|
||||
via_polylists = scale_from_clipper(hier2oriented(via_union), clipper_scale_factor)
|
||||
via_polys = [numpy.array(vv) for vv in via_polylists]
|
||||
|
||||
# Figure out which nets are shorted by vias, then merge them
|
||||
merge_pairs = find_merge_pairs(nets_info.nets, top_layer, bot_layer, via_polys, clipper_scale_factor)
|
||||
for net_a, net_b in merge_pairs:
|
||||
nets_info.merge(net_a, net_b)
|
||||
|
||||
|
||||
remaining_layers = set()
|
||||
for layer_a, _, layer_b in connectivity[ii + 1:]:
|
||||
remaining_layers.add(layer_a)
|
||||
remaining_layers.add(layer_b)
|
||||
|
||||
finished_layers = loaded_layers - remaining_layers
|
||||
for layer in finished_layers:
|
||||
nets_info.prune(layer)
|
||||
loaded_layers.remove(layer)
|
||||
|
||||
return nets_info
|
||||
|
||||
|
||||
def union_input_polys(polys: Sequence[ArrayLike]) -> List[PyPolyNode]:
|
||||
"""
|
||||
Perform a union operation on the provided sequence of polygons, and return
|
||||
a list of `PyPolyNode`s corresponding to all of the outer (i.e. non-hole)
|
||||
contours.
|
||||
|
||||
Note that while islands are "outer" contours and returned in the list, they
|
||||
also are still available through the `.Childs` property of the "hole" they
|
||||
appear in. Meanwhile, "hole" contours are only accessible through the `.Childs`
|
||||
property of their parent "outer" contour, and are not returned in the list.
|
||||
|
||||
Args:
|
||||
polys: A sequence of polygons, `[[(x0, y0), (x1, y1), ...], poly1, poly2, ...]`
|
||||
Polygons may be implicitly closed.
|
||||
|
||||
Returns:
|
||||
List of PyPolyNodes, representing all "outer" contours (including islands) in
|
||||
the union of `polys`.
|
||||
"""
|
||||
for poly in polys:
|
||||
if (numpy.abs(poly) % 1).any():
|
||||
logger.warning('Warning: union_polys got non-integer coordinates; all values will be truncated.')
|
||||
break
|
||||
|
||||
#TODO: check if we need to reverse the order of points in some polygons
|
||||
# via sum((x2-x1)(y2+y1)) (-ve means ccw)
|
||||
|
||||
poly_tree = union_nonzero(polys)
|
||||
if poly_tree is None:
|
||||
return []
|
||||
|
||||
# Partially flatten the tree, reclassifying all the "outer" (non-hole) nodes as new root nodes
|
||||
unvisited_nodes = [poly_tree]
|
||||
outer_nodes = []
|
||||
while unvisited_nodes:
|
||||
node = unvisited_nodes.pop() # node will be the tree parent node (a container), or a hole
|
||||
for poly in node.Childs:
|
||||
outer_nodes.append(poly)
|
||||
for hole in poly.Childs: # type: ignore
|
||||
unvisited_nodes.append(hole)
|
||||
|
||||
return outer_nodes
|
||||
|
||||
def label_polys(
|
||||
polys: Sequence[PyPolyNode],
|
||||
labels: Sequence[Tuple[float, float, str]],
|
||||
clipper_scale_factor: int,
|
||||
) -> Tuple[
|
||||
defaultdict[NetName, List[PyPolyNode]],
|
||||
List[List[NetName]]
|
||||
]:
|
||||
merge_groups = []
|
||||
point_xys = []
|
||||
point_names = []
|
||||
nets = defaultdict(list)
|
||||
|
||||
for x, y, point_name in labels:
|
||||
point_xys.append((x, y))
|
||||
point_names.append(point_name)
|
||||
|
||||
for poly in polys:
|
||||
found_nets = label_poly(poly, point_xys, point_names, clipper_scale_factor)
|
||||
|
||||
if found_nets:
|
||||
name = NetName(found_nets[0])
|
||||
else:
|
||||
def parse_label(string: str) -> str:
|
||||
return string
|
||||
name = NetName() # Anonymous net
|
||||
|
||||
nets = trace_layout(
|
||||
filepath=filepath,
|
||||
connectivity=connectivity,
|
||||
**kwargs,
|
||||
)
|
||||
nets[name].append(poly)
|
||||
|
||||
parsed_nets = [{parse_label(ll) for ll in net} for net in nets]
|
||||
result = TraceAnalysis(parsed_nets)
|
||||
print('\n')
|
||||
print(result)
|
||||
if len(found_nets) > 1:
|
||||
# Found a short
|
||||
poly = pformat(scale_from_clipper(poly.Contour, clipper_scale_factor))
|
||||
merge_groups.append([name] + [NetName(nn) for nn in found_nets[1:]])
|
||||
return nets, merge_groups
|
||||
|
||||
return 0
|
||||
|
||||
def label_poly(
|
||||
poly: PyPolyNode,
|
||||
point_xys: ArrayLike,
|
||||
point_names: Sequence[str],
|
||||
clipper_scale_factor: int,
|
||||
) -> List[str]:
|
||||
"""
|
||||
Given a `PyPolyNode` (a polygon, possibly with holes) and a sequence of named points,
|
||||
return the list of point names contained inside the polygon.
|
||||
|
||||
Args:
|
||||
poly: A polygon, possibly with holes. "Islands" inside the holes (and deeper-nested
|
||||
structures) are not considered (i.e. only one non-hole contour is considered).
|
||||
point_xys: A sequence of point coordinates (Nx2, `[(x0, y0), (x1, y1), ...]`).
|
||||
point_names: A sequence of point names (same length N as point_xys)
|
||||
clipper_scale_factor: The PyPolyNode structure is from `pyclipper` and likely has
|
||||
a scale factor applied in order to use integer arithmetic. Due to precision
|
||||
limitations in `poly_contains_points`, it's prefereable to undo this scaling
|
||||
rather than asking for similarly-scaled `point_xys` coordinates.
|
||||
NOTE: This could be fixed by using `numpy.longdouble` in
|
||||
`poly_contains_points`, but the exact length of long-doubles is platform-
|
||||
dependent and so probably best avoided.
|
||||
|
||||
Result:
|
||||
All the `point_names` which correspond to points inside the polygon (but not in
|
||||
its holes).
|
||||
"""
|
||||
if not point_names:
|
||||
return []
|
||||
|
||||
poly_contour = scale_from_clipper(poly.Contour, clipper_scale_factor)
|
||||
inside = poly_contains_points(poly_contour, point_xys)
|
||||
for hole in poly.Childs:
|
||||
hole_contour = scale_from_clipper(hole.Contour, clipper_scale_factor)
|
||||
inside &= ~poly_contains_points(hole_contour, point_xys)
|
||||
|
||||
inside_nets = sorted([net_name for net_name, ii in zip(point_names, inside) if ii])
|
||||
|
||||
if inside.any():
|
||||
return inside_nets
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def find_merge_pairs(
|
||||
nets: Mapping[NetName, Mapping[layer_t, Sequence[contour_t]]],
|
||||
top_layer: layer_t,
|
||||
bot_layer: layer_t,
|
||||
via_polys: Optional[Sequence[contour_t]],
|
||||
clipper_scale_factor: int,
|
||||
) -> Set[Tuple[NetName, NetName]]:
|
||||
"""
|
||||
Given a collection of (possibly anonymous) nets, figure out which pairs of
|
||||
nets are shorted through a via (and thus should be merged).
|
||||
|
||||
Args:
|
||||
nets: A collection of all nets (seqences of polygons in mappings indexed by `NetName`
|
||||
and layer). See `NetsInfo.nets`.
|
||||
top_layer: Layer name of first layer
|
||||
bot_layer: Layer name of second layer
|
||||
via_polys: Sequence of via contours. `None` denotes to vias necessary (overlap is sufficent).
|
||||
|
||||
Returns:
|
||||
A set containing pairs of `NetName`s for each pair of nets which are shorted.
|
||||
"""
|
||||
merge_pairs = set()
|
||||
if via_polys is not None and not via_polys:
|
||||
logger.warning(f'No vias between layers {top_layer}, {bot_layer}')
|
||||
return merge_pairs
|
||||
|
||||
tested_pairs = set()
|
||||
with ThreadPoolExecutor() as executor:
|
||||
for top_name in nets.keys():
|
||||
top_polys = nets[top_name][top_layer]
|
||||
if not top_polys:
|
||||
continue
|
||||
|
||||
for bot_name in nets.keys():
|
||||
if bot_name == top_name:
|
||||
continue
|
||||
|
||||
name_pair: Tuple[NetName, NetName] = tuple(sorted((top_name, bot_name))) #type: ignore
|
||||
if name_pair in tested_pairs:
|
||||
continue
|
||||
tested_pairs.add(name_pair)
|
||||
|
||||
bot_polys = nets[bot_name][bot_layer]
|
||||
if not bot_polys:
|
||||
continue
|
||||
|
||||
executor.submit(check_overlap, top_polys, via_polys, bot_polys, clipper_scale_factor,
|
||||
lambda np=name_pair: merge_pairs.add(np))
|
||||
|
||||
return merge_pairs
|
||||
|
||||
|
||||
def check_overlap(
|
||||
top_polys: Sequence[contour_t],
|
||||
via_polys: Optional[Sequence[NDArray[numpy.float64]]],
|
||||
bot_polys: Sequence[contour_t],
|
||||
clipper_scale_factor: int,
|
||||
action: Callable[[], None],
|
||||
) -> None:
|
||||
"""
|
||||
Check for interaction between top and bottom polys, mediated by via polys if present.
|
||||
"""
|
||||
if via_polys is not None:
|
||||
top_bot = intersection_evenodd(top_polys, bot_polys)
|
||||
descaled = scale_from_clipper(top_bot, clipper_scale_factor)
|
||||
overlap = check_any_intersection(descaled, via_polys)
|
||||
# overlap = intersection_evenodd(top_bot, via_polys)
|
||||
# via_polys = difference_evenodd(via_polys, overlap) # reduce set of via polys for future nets
|
||||
else:
|
||||
# overlap = intersection_evenodd(top_polys, bot_polys) # TODO verify there aren't any suspicious corner cases for this
|
||||
overlap = check_any_intersection(
|
||||
scale_from_clipper(top_polys, clipper_scale_factor),
|
||||
scale_from_clipper(bot_polys, clipper_scale_factor))
|
||||
|
||||
if overlap:
|
||||
action()
|
||||
|
||||
|
||||
def check_any_intersection(polys_a, polys_b) -> bool:
|
||||
for poly_a in polys_a:
|
||||
for poly_b in polys_b:
|
||||
if intersects(poly_a, poly_b):
|
||||
return True
|
||||
return False
|
||||
|
157
snarled/poly.py
Normal file
157
snarled/poly.py
Normal file
@ -0,0 +1,157 @@
|
||||
"""
|
||||
Utilities for working with polygons
|
||||
"""
|
||||
import numpy
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
|
||||
|
||||
def poly_contains_points(
|
||||
vertices: ArrayLike,
|
||||
points: ArrayLike,
|
||||
include_boundary: bool = True,
|
||||
) -> NDArray[numpy.int_]:
|
||||
"""
|
||||
Tests whether the provided points are inside the implicitly closed polygon
|
||||
described by the provided list of vertices.
|
||||
|
||||
Args:
|
||||
vertices: Nx2 Arraylike of form [[x0, y0], [x1, y1], ...], describing an implicitly-
|
||||
closed polygon. Note that this should include any offsets.
|
||||
points: Nx2 ArrayLike of form [[x0, y0], [x1, y1], ...] containing the points to test.
|
||||
include_boundary: True if points on the boundary should be count as inside the shape.
|
||||
Default True.
|
||||
|
||||
Returns:
|
||||
ndarray of booleans, [point0_is_in_shape, point1_is_in_shape, ...]
|
||||
"""
|
||||
points = numpy.array(points, copy=False)
|
||||
vertices = numpy.array(vertices, copy=False)
|
||||
|
||||
if points.size == 0:
|
||||
return numpy.zeros(0)
|
||||
|
||||
min_bounds = numpy.min(vertices, axis=0)[None, :]
|
||||
max_bounds = numpy.max(vertices, axis=0)[None, :]
|
||||
|
||||
trivially_outside = ((points < min_bounds).any(axis=1)
|
||||
| (points > max_bounds).any(axis=1))
|
||||
|
||||
nontrivial = ~trivially_outside
|
||||
if trivially_outside.all():
|
||||
inside = numpy.zeros_like(trivially_outside, dtype=bool)
|
||||
return inside
|
||||
|
||||
ntpts = points[None, nontrivial, :] # nontrivial points, along axis 1 of ndarray
|
||||
verts = vertices[:, None, :] # vertices, along axis 0
|
||||
xydiff = ntpts - verts # Expands into (n_vertices, n_ntpts, 2)
|
||||
|
||||
y0_le = xydiff[:, :, 1] >= 0 # y_point >= y_vertex (axes 0, 1 for all points & vertices)
|
||||
y1_le = numpy.roll(y0_le, -1, axis=0) # same thing for next vertex
|
||||
|
||||
upward = y0_le & ~y1_le # edge passes point y coord going upwards
|
||||
downward = ~y0_le & y1_le # edge passes point y coord going downwards
|
||||
|
||||
dv = numpy.roll(verts, -1, axis=0) - verts
|
||||
is_left = (dv[..., 0] * xydiff[..., 1] # >0 if left of dv, <0 if right, 0 if on the line
|
||||
- dv[..., 1] * xydiff[..., 0])
|
||||
|
||||
winding_number = ((upward & (is_left > 0)).sum(axis=0)
|
||||
- (downward & (is_left < 0)).sum(axis=0))
|
||||
|
||||
nontrivial_inside = winding_number != 0 # filter nontrivial points based on winding number
|
||||
if include_boundary:
|
||||
nontrivial_inside[(is_left == 0).any(axis=0)] = True # check if point lies on any edge
|
||||
|
||||
inside = nontrivial.copy()
|
||||
inside[nontrivial] = nontrivial_inside
|
||||
return inside
|
||||
|
||||
|
||||
def intersects(poly_a: ArrayLike, poly_b: ArrayLike) -> bool:
|
||||
"""
|
||||
Check if two polygons overlap and/or touch.
|
||||
|
||||
Args:
|
||||
poly_a: List of vertices, implicitly closed: `[[x0, y0], [x1, y1], ...]`
|
||||
poly_b: List of vertices, implicitly closed: `[[x0, y0], [x1, y1], ...]`
|
||||
|
||||
Returns:
|
||||
`True` if the polygons overlap and/or touch.
|
||||
"""
|
||||
poly_a = numpy.array(poly_a, copy=False)
|
||||
poly_b = numpy.array(poly_b, copy=False)
|
||||
|
||||
# Check bounding boxes
|
||||
min_a = poly_a.min(axis=0)
|
||||
min_b = poly_b.min(axis=0)
|
||||
max_a = poly_a.max(axis=0)
|
||||
max_b = poly_b.max(axis=0)
|
||||
|
||||
if (min_a > max_b).any() or (min_b > max_a).any():
|
||||
return False
|
||||
|
||||
#TODO: Check against sorted coords?
|
||||
|
||||
#Check if edges intersect
|
||||
if poly_edges_intersect(poly_a, poly_b):
|
||||
return True
|
||||
|
||||
# Check if either polygon contains the other
|
||||
if poly_contains_points(poly_b, poly_a).any():
|
||||
return True
|
||||
|
||||
if poly_contains_points(poly_a, poly_b).any():
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def poly_edges_intersect(
|
||||
poly_a: NDArray[numpy.float64],
|
||||
poly_b: NDArray[numpy.float64],
|
||||
) -> NDArray[numpy.int_]:
|
||||
"""
|
||||
Check if the edges of two polygons intersect.
|
||||
|
||||
Args:
|
||||
poly_a: NDArray of vertices, implicitly closed: `[[x0, y0], [x1, y1], ...]`
|
||||
poly_b: NDArray of vertices, implicitly closed: `[[x0, y0], [x1, y1], ...]`
|
||||
|
||||
Returns:
|
||||
`True` if the polygons' edges intersect.
|
||||
"""
|
||||
a_next = numpy.roll(poly_a, -1, axis=0)
|
||||
b_next = numpy.roll(poly_b, -1, axis=0)
|
||||
|
||||
# Lists of initial/final coordinates for polygon segments
|
||||
xi1 = poly_a[:, 0, None]
|
||||
yi1 = poly_a[:, 1, None]
|
||||
xf1 = a_next[:, 0, None]
|
||||
yf1 = a_next[:, 1, None]
|
||||
|
||||
xi2 = poly_b[None, :, 0]
|
||||
yi2 = poly_b[None, :, 1]
|
||||
xf2 = b_next[None, :, 0]
|
||||
yf2 = b_next[None, :, 1]
|
||||
|
||||
# Perform calculation
|
||||
dxi = xi1 - xi2
|
||||
dyi = yi1 - yi2
|
||||
dx1 = xf1 - xi1
|
||||
dx2 = xf2 - xi2
|
||||
dy1 = yf1 - yi1
|
||||
dy2 = yf2 - yi2
|
||||
|
||||
numerator_a = dx2 * dyi - dy2 * dxi
|
||||
numerator_b = dx1 * dyi - dy1 * dxi
|
||||
denominator = dy2 * dx1 - dx2 * dy1
|
||||
|
||||
# Avoid warnings since we may multiply eg. NaN*False
|
||||
with numpy.errstate(invalid='ignore', divide='ignore'):
|
||||
u_a = numerator_a / denominator
|
||||
u_b = numerator_b / denominator
|
||||
|
||||
# Find the adjacency matrix
|
||||
adjacency = numpy.logical_and.reduce((u_a >= 0, u_a <= 1, u_b >= 0, u_b <= 1))
|
||||
|
||||
return adjacency.any()
|
0
snarled/py.typed
Normal file
0
snarled/py.typed
Normal file
315
snarled/trace.py
315
snarled/trace.py
@ -1,315 +0,0 @@
|
||||
from collections.abc import Sequence, Iterable
|
||||
import logging
|
||||
from collections import Counter
|
||||
from itertools import chain
|
||||
|
||||
from klayout import db
|
||||
from .types import lnum_t, layer_t
|
||||
from .utils import SnarledError
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TraceAnalysis:
|
||||
"""
|
||||
Short/Open analysis for a list of nets
|
||||
"""
|
||||
|
||||
nets: list[set[str]]
|
||||
""" List of nets (connected sets of labels) """
|
||||
|
||||
opens: dict[str, int]
|
||||
""" Labels which appear on 2+ disconnected nets, and the number of nets they touch """
|
||||
|
||||
shorts: list[set[str]]
|
||||
""" Nets containing more than one unique label """
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
nets: Sequence[Iterable[str]],
|
||||
) -> None:
|
||||
"""
|
||||
Args:
|
||||
nets: Sequence of nets. Each net is a sequence of labels
|
||||
which were found to be electrically connected.
|
||||
"""
|
||||
|
||||
setnets = [set(net) for net in nets]
|
||||
|
||||
# Shorts contain more than one label
|
||||
shorts = [net for net in setnets if len(net) > 1]
|
||||
|
||||
# Check number of times each label appears
|
||||
net_occurences = Counter(chain.from_iterable(setnets))
|
||||
|
||||
# Opens are where the same label appears on more than one net
|
||||
opens = {
|
||||
nn: count
|
||||
for nn, count in net_occurences.items()
|
||||
if count > 1
|
||||
}
|
||||
|
||||
self.nets = setnets
|
||||
self.shorts = shorts
|
||||
self.opens = opens
|
||||
|
||||
def __repr__(self) -> str:
|
||||
def format_net(net: Iterable[str]) -> str:
|
||||
names = [f"'{name}'" if any(cc in name for cc in ' \t\n') else name for name in sorted(net)]
|
||||
return ','.join(names)
|
||||
|
||||
def sort_nets(nets: Sequence[Iterable[str]]) -> list[Iterable[str]]:
|
||||
return sorted(nets, key=lambda net: ','.join(sorted(net)))
|
||||
|
||||
ss = 'Trace analysis'
|
||||
ss += '\n============='
|
||||
|
||||
ss += '\nNets'
|
||||
ss += '\n(groups of electrically connected labels)\n'
|
||||
for net in sort_nets(self.nets):
|
||||
ss += '\t' + format_net(net) + '\n'
|
||||
if not self.nets:
|
||||
ss += '\t<NO NETS FOUND>'
|
||||
|
||||
ss += '\nOpens'
|
||||
ss += '\n(2+ nets containing the same name)\n'
|
||||
for label, count in sorted(self.opens.items()):
|
||||
ss += f'\t{label} : {count} nets\n'
|
||||
if not self.opens:
|
||||
ss += '\t<No opens found>'
|
||||
|
||||
ss += '\nShorts'
|
||||
ss += '\n(2+ unique names for the same net)\n'
|
||||
for net in sort_nets(self.shorts):
|
||||
ss += '\t' + format_net(net) + '\n'
|
||||
if not self.shorts:
|
||||
ss += '\t<No shorts found>'
|
||||
|
||||
ss += '=============\n'
|
||||
return ss
|
||||
|
||||
|
||||
def trace_layout(
|
||||
filepath: str,
|
||||
connectivity: Sequence[tuple[layer_t, layer_t | None, layer_t]],
|
||||
layer_map: dict[str, lnum_t] | None = None,
|
||||
topcell: str | None = None,
|
||||
*,
|
||||
labels_map: dict[layer_t, layer_t] | None = None,
|
||||
lfile_path: str | None = None,
|
||||
lfile_map: dict[layer_t, layer_t] | None = None,
|
||||
lfile_layer_map: dict[str, lnum_t] | None = None,
|
||||
lfile_topcell: str | None = None,
|
||||
output_path: str | None = None,
|
||||
) -> list[set[str]]:
|
||||
"""
|
||||
Trace a layout to identify labeled nets.
|
||||
|
||||
To label a net, place a text label anywhere touching the net.
|
||||
Labels may be mapped from a different layer, or even a different
|
||||
layout file altogether.
|
||||
Note: Labels must not contain commas (,)!!
|
||||
|
||||
Args:
|
||||
filepath: Path to the primary layout, containing the conductor geometry
|
||||
(and optionally also the labels)
|
||||
connectivity: List of (conductor1, via12, conductor2) tuples,
|
||||
which indicate that the specified layers are electrically connected
|
||||
(conductor1 to via12 and via12 to conductor2). The middle (via) layer
|
||||
may be `None`, in which case the outer layers are directly connected
|
||||
at any overlap (conductor1 to conductor2).
|
||||
layer_map: {layer_name: (layer_num, dtype_num)} translation table.
|
||||
Should contain any strings present in `connectivity` and `labels_map`.
|
||||
Default is an empty dict.
|
||||
topcell: Cell name of the topcell. If `None`, it is automatically chosen.
|
||||
labels_map: {label_layer: metal_layer} mapping, which allows labels to
|
||||
reside on a different layer from their corresponding metals.
|
||||
Only labels on the provided label layers are used, so
|
||||
{metal_layer: metal_layer} entries must be explicitly specified if
|
||||
they are desired.
|
||||
If `None`, labels on each layer in `connectivity` are used alongside
|
||||
that same layer's geometry ({layer: layer} for all participating
|
||||
geometry layers)
|
||||
Default `None`.
|
||||
lfile_path: Path to a separate file from which labels should be merged.
|
||||
lfile_map: {lfile_layer: primary_layer} mapping, used when merging the
|
||||
labels into the primary layout.
|
||||
lfile_layer_map: {layer_name: (layer_num, dtype_num)} mapping for the
|
||||
secondary (label) file. Should contain all string keys in
|
||||
`lfile_map`.
|
||||
`None` reuses `layer_map` (default).
|
||||
lfile_topcell: Cell name for the topcell in the secondary (label) file.
|
||||
`None` automatically chooses the topcell (default).
|
||||
output_path: If provided, outputs the final net geometry to a layout
|
||||
at the given path. Default `None`.
|
||||
|
||||
Returns:
|
||||
List of labeled nets, where each entry is a set of label strings which
|
||||
were found on the given net.
|
||||
"""
|
||||
if layer_map is None:
|
||||
layer_map = {}
|
||||
|
||||
if labels_map is None:
|
||||
labels_map = {
|
||||
layer: layer
|
||||
for layer in chain(*connectivity)
|
||||
if layer is not None
|
||||
}
|
||||
|
||||
layout = db.Layout()
|
||||
layout.read(filepath)
|
||||
|
||||
topcell_obj = _get_topcell(layout, topcell)
|
||||
|
||||
# Merge labels from a separate layout if asked
|
||||
if lfile_path:
|
||||
if not lfile_map:
|
||||
raise SnarledError('Asked to load labels from a separate file, but no '
|
||||
+ 'label layers were specified in lfile_map')
|
||||
|
||||
if lfile_layer_map is None:
|
||||
lfile_layer_map = layer_map
|
||||
|
||||
lnum_map = {}
|
||||
for ltext, lshape in lfile_map.items():
|
||||
if isinstance(ltext, str):
|
||||
ltext = lfile_layer_map[ltext]
|
||||
if isinstance(lshape, str):
|
||||
lshape = layer_map[lshape]
|
||||
lnum_map[ltext] = lshape
|
||||
|
||||
_merge_labels_from(lfile_path, layout, topcell_obj, lnum_map, lfile_topcell)
|
||||
|
||||
#
|
||||
# Build a netlist from the layout
|
||||
#
|
||||
l2n = db.LayoutToNetlist(db.RecursiveShapeIterator(layout, topcell_obj, []))
|
||||
#l2n.include_floating_subcircuits = True
|
||||
|
||||
# Create l2n polygon layers
|
||||
layer2polys = {}
|
||||
for layer in set(chain(*connectivity)):
|
||||
if layer is None:
|
||||
continue
|
||||
if isinstance(layer, str):
|
||||
layer = layer_map[layer]
|
||||
klayer = layout.layer(*layer)
|
||||
layer2polys[layer] = l2n.make_polygon_layer(klayer)
|
||||
|
||||
# Create l2n text layers
|
||||
layer2texts = {}
|
||||
for layer in labels_map:
|
||||
if isinstance(layer, str):
|
||||
layer = layer_map[layer]
|
||||
klayer = layout.layer(*layer)
|
||||
texts = l2n.make_text_layer(klayer)
|
||||
texts.flatten()
|
||||
layer2texts[layer] = texts
|
||||
|
||||
# Connect each layer to itself
|
||||
for name, polys in layer2polys.items():
|
||||
logger.info(f'Adding layer {name}')
|
||||
l2n.connect(polys)
|
||||
|
||||
# Connect layers, optionally with vias
|
||||
for top, via, bot in connectivity:
|
||||
if isinstance(top, str):
|
||||
top = layer_map[top]
|
||||
if isinstance(via, str):
|
||||
via = layer_map[via]
|
||||
if isinstance(bot, str):
|
||||
bot = layer_map[bot]
|
||||
|
||||
if via is None:
|
||||
l2n.connect(layer2polys[top], layer2polys[bot])
|
||||
else:
|
||||
l2n.connect(layer2polys[top], layer2polys[via])
|
||||
l2n.connect(layer2polys[bot], layer2polys[via])
|
||||
|
||||
# Label nets
|
||||
for label_layer, metal_layer in labels_map.items():
|
||||
if isinstance(label_layer, str):
|
||||
label_layer = layer_map[label_layer]
|
||||
if isinstance(metal_layer, str):
|
||||
metal_layer = layer_map[metal_layer]
|
||||
|
||||
l2n.connect(layer2polys[metal_layer], layer2texts[label_layer])
|
||||
|
||||
# Get netlist
|
||||
l2n.extract_netlist()
|
||||
nl = l2n.netlist()
|
||||
nl.make_top_level_pins()
|
||||
|
||||
if output_path:
|
||||
_write_net_layout(l2n, output_path, layer2polys)
|
||||
|
||||
#
|
||||
# Return merged nets
|
||||
#
|
||||
top_circuits = [cc for cc, _ in zip(nl.each_circuit_top_down(), range(nl.top_circuit_count()), strict=False)]
|
||||
|
||||
# Nets with more than one label get their labels joined with a comma
|
||||
nets = [
|
||||
set(nn.name.split(','))
|
||||
for cc in top_circuits
|
||||
for nn in cc.each_net()
|
||||
if nn.name
|
||||
]
|
||||
return nets
|
||||
|
||||
|
||||
def _get_topcell(
|
||||
layout: db.Layout,
|
||||
name: str | None = None,
|
||||
) -> db.Cell:
|
||||
"""
|
||||
Get the topcell by name or hierarchy.
|
||||
|
||||
Args:
|
||||
layout: Layout to get the cell from
|
||||
name: If given, use the name to find the topcell; otherwise use hierarchy.
|
||||
|
||||
Returns:
|
||||
Cell object
|
||||
"""
|
||||
if name is None:
|
||||
return layout.top_cell()
|
||||
ind = layout.cell_by_name(name)
|
||||
return layout.cell(ind)
|
||||
|
||||
|
||||
def _write_net_layout(
|
||||
l2n: db.LayoutToNetlist,
|
||||
filepath: str,
|
||||
layer2polys: dict[lnum_t, db.Region],
|
||||
) -> None:
|
||||
layout = db.Layout()
|
||||
top = layout.create_cell('top')
|
||||
lmap = {layout.layer(*layer): polys for layer, polys in layer2polys.items()}
|
||||
l2n.build_all_nets(l2n.cell_mapping_into(layout, top), layout, lmap, 'net_', 'prop_', l2n.BNH_Flatten, 'circuit_')
|
||||
layout.write(filepath)
|
||||
|
||||
|
||||
def _merge_labels_from(
|
||||
filepath: str,
|
||||
into_layout: db.Layout,
|
||||
into_cell: db.Cell,
|
||||
lnum_map: dict[lnum_t, lnum_t],
|
||||
topcell: str | None = None,
|
||||
) -> None:
|
||||
layout = db.Layout()
|
||||
layout.read(filepath)
|
||||
|
||||
topcell_obj = _get_topcell(layout, topcell)
|
||||
|
||||
for labels_layer, conductor_layer in lnum_map.items():
|
||||
layer_ind_src = layout.layer(*labels_layer)
|
||||
layer_ind_dst = into_layout.layer(*conductor_layer)
|
||||
|
||||
shapes_dst = topcell_obj.shapes(layer_ind_dst)
|
||||
shapes_src = into_cell.shapes(layer_ind_src)
|
||||
for shape in shapes_src.each():
|
||||
new_shape = shapes_dst.insert(shape)
|
||||
shapes_dst.replace_prop_id(new_shape, 0) # clear shape properties
|
165
snarled/tracker.py
Normal file
165
snarled/tracker.py
Normal file
@ -0,0 +1,165 @@
|
||||
from typing import List, Set, ClassVar, Optional, Dict
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .types import layer_t, contour_t
|
||||
|
||||
|
||||
class NetName:
|
||||
"""
|
||||
Basically just a uniquely-sortable `Optional[str]`.
|
||||
|
||||
A `name` of `None` indicates that the net is anonymous.
|
||||
The `subname` is used to track multiple same-named nets, to allow testing for opens.
|
||||
"""
|
||||
name: Optional[str]
|
||||
subname: int
|
||||
|
||||
count: ClassVar[defaultdict[Optional[str], int]] = defaultdict(int)
|
||||
""" Counter for how many classes have been instantiated with each name """
|
||||
|
||||
def __init__(self, name: Optional[str] = None) -> None:
|
||||
self.name = name
|
||||
self.subname = self.count[name]
|
||||
NetName.count[name] += 1
|
||||
|
||||
def __lt__(self, other: 'NetName') -> bool:
|
||||
if self.name == other.name:
|
||||
return self.subname < other.subname
|
||||
elif self.name is None:
|
||||
return False
|
||||
elif other.name is None:
|
||||
return True
|
||||
else:
|
||||
return self.name < other.name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
if self.name is not None:
|
||||
name = self.name
|
||||
else:
|
||||
name = '(None)'
|
||||
|
||||
if NetName.count[self.name] == 1:
|
||||
return name
|
||||
else:
|
||||
return f'{name}__{self.subname}'
|
||||
|
||||
|
||||
class NetsInfo:
|
||||
"""
|
||||
Container for describing all nets and keeping track of the "canonical" name for each
|
||||
net. Nets which are known to be shorted together should be `merge`d together,
|
||||
combining their geometry under the "canonical" name and adding the other name as an alias.
|
||||
"""
|
||||
nets: defaultdict[NetName, defaultdict[layer_t, List]]
|
||||
"""
|
||||
Contains all polygons for all nets, in the format
|
||||
`{net_name: {layer: [poly0, poly1, ...]}}`
|
||||
|
||||
Polygons are usually stored in pyclipper-friendly coordinates, but may be either `PyPolyNode`s
|
||||
or simple lists of coordinates (oriented boundaries).
|
||||
"""
|
||||
|
||||
net_aliases: Dict[NetName, NetName]
|
||||
"""
|
||||
A mapping from alias to underlying name.
|
||||
Note that the underlying name may itself be an alias.
|
||||
`resolve_name` can be used to simplify lookup
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.nets = defaultdict(lambda: defaultdict(list))
|
||||
self.net_aliases = {}
|
||||
|
||||
def resolve_name(self, net_name: NetName) -> NetName:
|
||||
"""
|
||||
Find the canonical name (as used in `self.nets`) for any NetName.
|
||||
|
||||
Args:
|
||||
net_name: The name of the net to look up. May be an alias.
|
||||
|
||||
Returns:
|
||||
The canonical name for the net.
|
||||
"""
|
||||
while net_name in self.net_aliases:
|
||||
net_name = self.net_aliases[net_name]
|
||||
return net_name
|
||||
|
||||
def merge(self, net_a: NetName, net_b: NetName) -> None:
|
||||
"""
|
||||
Combine two nets into one.
|
||||
Usually used when it is discovered that two nets are shorted.
|
||||
|
||||
The name that is preserved is based on the sort order of `NetName`s,
|
||||
which favors non-anonymous, lexicograpically small names.
|
||||
|
||||
Args:
|
||||
net_a: A net to merge
|
||||
net_b: The other net to merge
|
||||
"""
|
||||
net_a = self.resolve_name(net_a)
|
||||
net_b = self.resolve_name(net_b)
|
||||
if net_a is net_b:
|
||||
return
|
||||
|
||||
# Always keep named nets if the other is anonymous
|
||||
keep_net, old_net = sorted((net_a, net_b))
|
||||
|
||||
#logger.info(f'merging {old_net} into {keep_net}')
|
||||
self.net_aliases[old_net] = keep_net
|
||||
if old_net in self.nets:
|
||||
for layer in self.nets[old_net]:
|
||||
self.nets[keep_net][layer] += self.nets[old_net][layer]
|
||||
del self.nets[old_net]
|
||||
|
||||
def prune(self, layer: layer_t) -> None:
|
||||
"""
|
||||
Delete all geometry for the given layer.
|
||||
|
||||
Args:
|
||||
layer: The layer to "forget"
|
||||
"""
|
||||
for net in self.nets.values():
|
||||
if layer in net:
|
||||
del net[layer]
|
||||
|
||||
def get_shorted_nets(self) -> List[Set[NetName]]:
|
||||
"""
|
||||
List groups of non-anonymous nets which were merged.
|
||||
|
||||
Returns:
|
||||
A list of sets of shorted nets.
|
||||
"""
|
||||
shorts = defaultdict(list)
|
||||
for kk in self.net_aliases:
|
||||
if kk.name is None:
|
||||
continue
|
||||
|
||||
base_name = self.resolve_name(kk)
|
||||
assert(base_name.name is not None)
|
||||
shorts[base_name].append(kk)
|
||||
|
||||
shorted_sets = [set([kk] + others)
|
||||
for kk, others in shorts.items()]
|
||||
return shorted_sets
|
||||
|
||||
def get_open_nets(self) -> defaultdict[str, List[NetName]]:
|
||||
"""
|
||||
List groups of same-named nets which were *not* merged.
|
||||
|
||||
Returns:
|
||||
A list of sets of same-named, non-shorted nets.
|
||||
"""
|
||||
opens = defaultdict(list)
|
||||
seen_names = {}
|
||||
for kk in self.nets:
|
||||
if kk.name is None:
|
||||
continue
|
||||
|
||||
if kk.name in seen_names:
|
||||
if kk.name not in opens:
|
||||
opens[kk.name].append(seen_names[kk.name])
|
||||
opens[kk.name].append(kk)
|
||||
else:
|
||||
seen_names[kk.name] = kk
|
||||
return opens
|
@ -1,3 +1,5 @@
|
||||
from typing import Union, Tuple, List, Sequence, Optional, Hashable
|
||||
|
||||
lnum_t = tuple[int, int]
|
||||
layer_t = lnum_t | str
|
||||
layer_t = Hashable
|
||||
contour_t = List[Tuple[int, int]]
|
||||
connectivity_t = Sequence[Tuple[layer_t, Optional[layer_t], layer_t]]
|
||||
|
210
snarled/utils.py
210
snarled/utils.py
@ -1,198 +1,28 @@
|
||||
import logging
|
||||
from .types import layer_t
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SnarledError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def strip_underscored_label(string: str) -> str:
|
||||
"""
|
||||
If the label ends in an underscore followed by an integer, strip
|
||||
that suffix. Otherwise, just return the label.
|
||||
|
||||
Args:
|
||||
string: The label string
|
||||
|
||||
Returns:
|
||||
The label string, with the suffix removed (if one was found)
|
||||
Some utility code that gets reused
|
||||
"""
|
||||
try:
|
||||
parts = string.split('_')
|
||||
int(parts[-1]) # must succeed to continue
|
||||
return '_'.join(parts[:-1])
|
||||
except Exception:
|
||||
return string
|
||||
from typing import Set, Tuple
|
||||
|
||||
from .types import connectivity_t, layer_t
|
||||
|
||||
|
||||
def read_layermap(path: str) -> dict[str, tuple[int, int]]:
|
||||
def connectivity2layers(
|
||||
connectivity: connectivity_t,
|
||||
) -> Tuple[Set[layer_t], Set[layer_t]]:
|
||||
"""
|
||||
Read a klayout-compatible layermap file.
|
||||
|
||||
Only the simplest format is supported:
|
||||
layer/dtype:layer_name
|
||||
|
||||
Empty lines are ignored.
|
||||
|
||||
Args:
|
||||
path: filepath for the input file
|
||||
|
||||
Returns:
|
||||
Dict of {name: (layer, dtype)}
|
||||
Extract the set of all metal layers and the set of all via layers
|
||||
from the connectivity description.
|
||||
"""
|
||||
with open(path, 'rt') as ff:
|
||||
lines = ff.readlines()
|
||||
metal_layers = set()
|
||||
via_layers = set()
|
||||
for top, via, bot in connectivity:
|
||||
metal_layers.add(top)
|
||||
metal_layers.add(bot)
|
||||
if via is not None:
|
||||
via_layers.add(via)
|
||||
|
||||
layer_map = {}
|
||||
for nn, line in enumerate(lines):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
both = metal_layers.intersection(via_layers)
|
||||
if both:
|
||||
raise Exception(f'The following layers are both vias and metals!? {both}')
|
||||
|
||||
for cc in '*-()':
|
||||
if cc in line:
|
||||
raise SnarledError(f'Failed to read layermap on line {nn} due to special character "{cc}"')
|
||||
|
||||
for cc in ':/':
|
||||
if cc not in line:
|
||||
raise SnarledError(f'Failed to read layermap on line {nn}; missing "{cc}"')
|
||||
|
||||
try:
|
||||
layer_part, name = line.split(':')
|
||||
layer_nums = str2lnum(layer_part)
|
||||
except Exception:
|
||||
logger.exception(f'Layer map read failed on line {nn}')
|
||||
raise
|
||||
|
||||
layer_map[name.strip()] = layer_nums
|
||||
|
||||
return layer_map
|
||||
|
||||
|
||||
def read_connectivity(path: str) -> list[tuple[layer_t, layer_t | None, layer_t]]:
|
||||
"""
|
||||
Read a connectivity spec file, which takes the form
|
||||
|
||||
conductor0, via01, conductor1
|
||||
conductor1, via12, conductor2
|
||||
conductor0, via02, conductor2
|
||||
...
|
||||
conductorX, conductorY
|
||||
|
||||
where each comma-separated entry is a layer name or numerical layer/dtype
|
||||
deisgnation (e.g. 123/45). Empty lines are ignored. Lines with only 2 entries
|
||||
are directly connected without needing a separate via layer.
|
||||
|
||||
Args:
|
||||
path: filepath for the input file
|
||||
|
||||
Returns:
|
||||
List of layer spec tuples (A, viaAB, B); the middle entry will be None
|
||||
if no via is given.
|
||||
"""
|
||||
with open(path, 'rt') as ff:
|
||||
lines = ff.readlines()
|
||||
|
||||
connections: list[tuple[layer_t, layer_t | None, layer_t]] = []
|
||||
for nn, line in enumerate(lines):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
parts = line.split(',')
|
||||
|
||||
if len(parts) not in (2, 3):
|
||||
raise SnarledError(f'Too many commas in connectivity spec on line {nn}')
|
||||
|
||||
layers = []
|
||||
for part in parts:
|
||||
layer: layer_t
|
||||
if '/' in part:
|
||||
try:
|
||||
layer = str2lnum(part)
|
||||
except Exception:
|
||||
logger.exception(f'Connectivity spec read failed on line {nn}')
|
||||
raise
|
||||
else:
|
||||
layer = part.strip()
|
||||
if not layer:
|
||||
raise SnarledError(f'Empty layer in connectivity spec on line {nn}')
|
||||
layers.append(layer)
|
||||
|
||||
if len(layers) == 2:
|
||||
connections.append((layers[0], None, layers[1]))
|
||||
else:
|
||||
connections.append((layers[0], layers[1], layers[2]))
|
||||
|
||||
return connections
|
||||
|
||||
|
||||
def read_remap(path: str) -> dict[layer_t, layer_t]:
|
||||
"""
|
||||
Read a layer remap spec file, which takes the form
|
||||
|
||||
old_layer1 : new_layer1
|
||||
old_layer2 : new_layer2
|
||||
...
|
||||
|
||||
where each layer entry is a layer name or numerical layer/dtype
|
||||
designation (e.g. 123/45).
|
||||
Empty lines are ignored.
|
||||
|
||||
Args:
|
||||
path: filepath for the input file
|
||||
|
||||
Returns:
|
||||
Dict mapping from left (old) layers to right (new) layers
|
||||
"""
|
||||
with open(path, 'rt') as ff:
|
||||
lines = ff.readlines()
|
||||
|
||||
remap = {}
|
||||
for nn, line in enumerate(lines):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
parts = line.split(':')
|
||||
|
||||
if len(parts) != 2:
|
||||
raise SnarledError(f'Too many commas in layer remap spec on line {nn}')
|
||||
|
||||
layers = []
|
||||
for part in parts:
|
||||
layer: layer_t
|
||||
if '/' in part:
|
||||
try:
|
||||
layer = str2lnum(part)
|
||||
except Exception:
|
||||
logger.exception(f'Layer remap spec read failed on line {nn}')
|
||||
raise
|
||||
else:
|
||||
layer = part.strip()
|
||||
if not layer:
|
||||
raise SnarledError(f'Empty layer in layer remap spec on line {nn}')
|
||||
layers.append(layer)
|
||||
|
||||
remap[layers[0]] = layers[1]
|
||||
|
||||
return remap
|
||||
|
||||
|
||||
def str2lnum(string: str) -> tuple[int, int]:
|
||||
"""
|
||||
Parse a '123/45'-style layer/dtype spec string.
|
||||
|
||||
Args:
|
||||
string: String specifying the layer/dtype
|
||||
|
||||
Returns:
|
||||
(layer, dtype)
|
||||
"""
|
||||
layer_str, dtype_str = string.split('/')
|
||||
layer = int(layer_str)
|
||||
dtype = int(dtype_str)
|
||||
return (layer, dtype)
|
||||
return metal_layers, via_layers
|
||||
|
Loading…
Reference in New Issue
Block a user