This commit is contained in:
jan 2022-03-31 00:01:45 -07:00
parent bcb967dff1
commit 47f8390f8b
7 changed files with 220 additions and 11 deletions

View File

@ -1,5 +1,17 @@
""" """
TODO: ALL DOCSTRINGS snarl
=====
Layout connectivity checker.
`snarl` is a python package for checking electrical connectivity in multi-layer layouts.
It is intended to be "poor-man's LVS" (layout-versus-schematic), for when poverty
has deprived the man of both a schematic and a better connectivity tool.
The main functionality is in `trace_connectivity`.
Useful classes, namely `NetsInfo` and `NetName`, are in `snarl.tracker`.
`snarl.interfaces` contains helper code for interfacing with other packages.
""" """
from .main import trace_connectivity from .main import trace_connectivity
from .tracker import NetsInfo, NetName from .tracker import NetsInfo, NetName

View File

@ -1,3 +1,6 @@
"""
Wrappers to simplify some pyclipper functions
"""
from typing import Sequence, Optional, List from typing import Sequence, Optional, List
from numpy.typing import ArrayLike from numpy.typing import ArrayLike

View File

@ -1,3 +1,6 @@
"""
Functionality for extracting geometry and label info from `masque` patterns.
"""
from typing import Sequence, Dict, List, Any, Tuple, Optional, Mapping from typing import Sequence, Dict, List, Any, Tuple, Optional, Mapping
from collections import defaultdict from collections import defaultdict
@ -18,6 +21,22 @@ def read_cell(
) -> Tuple[ ) -> Tuple[
defaultdict[layer_t, List[NDArray[numpy.float64]]], defaultdict[layer_t, List[NDArray[numpy.float64]]],
defaultdict[layer_t, List[Tuple[float, float, str]]]]: defaultdict[layer_t, List[Tuple[float, float, str]]]]:
"""
Extract `polys` and `labels` from a `masque.Pattern`.
This function extracts the data needed by `snarl.trace_connectivity`.
Args:
cell: A `masque` `Pattern` object. Usually your topcell.
connectivity: A sequence of 3-tuples specifying the layer connectivity.
Same as what is provided to `snarl.trace_connectivity`.
label_mapping: A mapping of `{label_layer: metal_layer}`. This allows labels
to refer to nets on metal layers without the labels themselves being on
that layer.
Returns:
`polys` and `labels` data structures, to be passed to `snarl.trace_connectivity`.
"""
metal_layers, via_layers = connectivity2layers(connectivity) metal_layers, via_layers = connectivity2layers(connectivity)
poly_layers = metal_layers | via_layers poly_layers = metal_layers | via_layers
@ -60,6 +79,17 @@ def load_polys(
cell: Pattern, cell: Pattern,
layers: Sequence[layer_t], layers: Sequence[layer_t],
) -> defaultdict[layer_t, List[NDArray[numpy.float64]]]: ) -> defaultdict[layer_t, List[NDArray[numpy.float64]]]:
"""
Given a *flat* `masque.Pattern`, extract the polygon info into the format used by `snarl`.
Args:
cell: The `Pattern` object to extract from.
layers: The layers to extract.
Returns:
`{layer0: [poly0, [(x0, y0), (x1, y1), ...], poly2, ...]}`
`polys` structure usable by `snarl.trace_connectivity`.
"""
polys = defaultdict(list) polys = defaultdict(list)
for ss in cell.shapes: for ss in cell.shapes:
if ss.layer not in layers: if ss.layer not in layers:

View File

@ -1,3 +1,6 @@
"""
Main connectivity-checking functionality for `snarl`
"""
from typing import Tuple, List, Dict, Set, Optional, Union, Sequence, Mapping from typing import Tuple, List, Dict, Set, Optional, Union, Sequence, Mapping
from collections import defaultdict from collections import defaultdict
from pprint import pformat from pprint import pformat
@ -23,7 +26,38 @@ def trace_connectivity(
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]], connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
clipper_scale_factor: int = int(2 ** 24), clipper_scale_factor: int = int(2 ** 24),
) -> NetsInfo: ) -> NetsInfo:
"""
Analyze the electrical connectivity of the layout.
This is the primary purpose of `snarl`.
The resulting `NetsInfo` will contain only disjoint `nets`, and its `net_aliases` can be used to
understand which nets are shorted (and therefore known by more than one name).
Args:
polys: A full description of all conducting paths in the layout. Consists of lists of polygons
(Nx2 arrays of vertices), indexed by layer. The structure looks roughly like
`{layer0: [poly0, poly1, ..., [(x0, y0), (x1, y1), ...]], ...}`
labels: A list of "named points" which are used to assign names to the nets they touch.
A collection of lists of (x, y, name) tuples, indexed *by the layer they target*.
`{layer0: [(x0, y0, name0), (x1, y1, name1), ...], ...}`
connectivity: A sequence of 3-tuples specifying the electrical connectivity between layers.
Each 3-tuple looks like `(top_layer, via_layer, bottom_layer)` and indicates that
`top_layer` and `bottom_layer` are electrically connected at any location where
shapes are present on all three (top, via, and bottom) layers.
`via_layer` may be `None`, in which case any overlap between shapes on `top_layer`
and `bottom_layer` is automatically considered a short (with no third shape necessary).
clipper_scale_factor: `pyclipper` uses 64-bit integer math, while we accept either floats or ints.
The coordinates from `polys` are scaled by this factor to put them roughly in the middle of
the range `pyclipper` wants; you may need to adjust this if you are already using coordinates
with large integer values.
Returns:
`NetsInfo` object describing the various nets and their connectivities.
"""
#
# Figure out which layers are metals vs vias, and run initial union on each layer
#
metal_layers, via_layers = connectivity2layers(connectivity) metal_layers, via_layers = connectivity2layers(connectivity)
metal_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor)) metal_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor))
@ -31,6 +65,9 @@ def trace_connectivity(
via_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor)) via_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor))
for layer in via_layers} for layer in via_layers}
#
# Check each polygon for labels, and assign it to a net (possibly anonymous).
#
nets_info = NetsInfo() nets_info = NetsInfo()
merge_groups: List[List[NetName]] = [] merge_groups: List[List[NetName]] = []
@ -44,7 +81,6 @@ def trace_connectivity(
for poly in metal_polys[layer]: for poly in metal_polys[layer]:
found_nets = label_poly(poly, point_xys, point_names, clipper_scale_factor) found_nets = label_poly(poly, point_xys, point_names, clipper_scale_factor)
name: Optional[str]
if found_nets: if found_nets:
name = NetName(found_nets[0]) name = NetName(found_nets[0])
else: else:
@ -58,14 +94,16 @@ def trace_connectivity(
logger.warning(f'Nets {found_nets} are shorted on layer {layer} in poly:\n {poly}') logger.warning(f'Nets {found_nets} are shorted on layer {layer} in poly:\n {poly}')
merge_groups.append([name] + [NetName(nn) for nn in found_nets[1:]]) merge_groups.append([name] + [NetName(nn) for nn in found_nets[1:]])
#
# Merge any nets that were shorted by having their labels on the same polygon
#
for group in merge_groups: for group in merge_groups:
first_net, *defunct_nets = group first_net, *defunct_nets = group
for defunct_net in defunct_nets: for defunct_net in defunct_nets:
nets_info.merge(first_net, defunct_net) nets_info.merge(first_net, defunct_net)
# #
# Take EVENODD union within each net # Convert to non-hierarchical polygon representation
# & stay in EVENODD-friendly representation
# #
for net in nets_info.nets.values(): for net in nets_info.nets.values():
for layer in net: for layer in net:
@ -75,7 +113,9 @@ def trace_connectivity(
for layer in via_polys: for layer in via_polys:
via_polys[layer] = hier2oriented(via_polys[layer]) via_polys[layer] = hier2oriented(via_polys[layer])
#
# Figure out which nets are shorted by vias, then merge them
#
merge_pairs = find_merge_pairs(connectivity, nets_info.nets, via_polys) merge_pairs = find_merge_pairs(connectivity, nets_info.nets, via_polys)
for net_a, net_b in merge_pairs: for net_a, net_b in merge_pairs:
nets_info.merge(net_a, net_b) nets_info.merge(net_a, net_b)
@ -83,12 +123,33 @@ def trace_connectivity(
return nets_info return nets_info
def union_input_polys(polys: List[ArrayLike]) -> List[PyPolyNode]: def union_input_polys(polys: Sequence[ArrayLike]) -> List[PyPolyNode]:
"""
Perform a union operation on the provided sequence of polygons, and return
a list of `PyPolyNode`s corresponding to all of the outer (i.e. non-hole)
contours.
Note that while islands are "outer" contours and returned in the list, they
also are still available through the `.Childs` property of the "hole" they
appear in. Meanwhile, "hole" contours are only accessible through the `.Childs`
property of their parent "outer" contour, and are not returned in the list.
Args:
polys: A sequence of polygons, `[[(x0, y0), (x1, y1), ...], poly1, poly2, ...]`
Polygons may be implicitly closed.
Returns:
List of PyPolyNodes, representing all "outer" contours (including islands) in
the union of `polys`.
"""
for poly in polys: for poly in polys:
if (numpy.abs(poly) % 1).any(): if (numpy.abs(poly) % 1).any():
logger.warning('Warning: union_polys got non-integer coordinates; all values will be truncated.') logger.warning('Warning: union_polys got non-integer coordinates; all values will be truncated.')
break break
#TODO: check if we need to reverse the order of points in some polygons
# via sum((x2-x1)(y2+y1)) (-ve means ccw)
poly_tree = union_nonzero(polys) poly_tree = union_nonzero(polys)
if poly_tree is None: if poly_tree is None:
return [] return []
@ -112,7 +173,27 @@ def label_poly(
point_names: Sequence[str], point_names: Sequence[str],
clipper_scale_factor: int = int(2 ** 24), clipper_scale_factor: int = int(2 ** 24),
) -> List[str]: ) -> List[str]:
"""
Given a `PyPolyNode` (a polygon, possibly with holes) and a sequence of named points,
return the list of point names contained inside the polygon.
Args:
poly: A polygon, possibly with holes. "Islands" inside the holes (and deeper-nested
structures) are not considered (i.e. only one non-hole contour is considered).
point_xys: A sequence of point coordinates (Nx2, `[(x0, y0), (x1, y1), ...]`).
point_names: A sequence of point names (same length N as point_xys)
clipper_scale_factor: The PyPolyNode structure is from `pyclipper` and likely has
a scale factor applied in order to use integer arithmetic. Due to precision
limitations in `poly_contains_points`, it's prefereable to undo this scaling
rather than asking for similarly-scaled `point_xys` coordinates.
NOTE: This could be fixed by using `numpy.longdouble` in
`poly_contains_points`, but the exact length of long-doubles is platform-
dependent and so probably best avoided.
Result:
All the `point_names` which correspond to points inside the polygon (but not in
its holes).
"""
poly_contour = scale_from_clipper(poly.Contour, clipper_scale_factor) poly_contour = scale_from_clipper(poly.Contour, clipper_scale_factor)
inside = poly_contains_points(poly_contour, point_xys) inside = poly_contains_points(poly_contour, point_xys)
for hole in poly.Childs: for hole in poly.Childs:
@ -132,9 +213,24 @@ def find_merge_pairs(
nets: Mapping[NetName, Mapping[layer_t, Sequence[contour_t]]], nets: Mapping[NetName, Mapping[layer_t, Sequence[contour_t]]],
via_polys: Mapping[layer_t, Sequence[contour_t]], via_polys: Mapping[layer_t, Sequence[contour_t]],
) -> Set[Tuple[NetName, NetName]]: ) -> Set[Tuple[NetName, NetName]]:
# """
# Merge nets based on via connectivity Given a collection of (possibly anonymous) nets, figure out which pairs of
# nets are shorted through a via (and thus should be merged).
Args:
connectivity: A sequence of 3-tuples specifying the electrical connectivity between layers.
Each 3-tuple looks like `(top_layer, via_layer, bottom_layer)` and indicates that
`top_layer` and `bottom_layer` are electrically connected at any location where
shapes are present on all three (top, via, and bottom) layers.
`via_layer` may be `None`, in which case any overlap between shapes on `top_layer`
and `bottom_layer` is automatically considered a short (with no third shape necessary).
nets: A collection of all nets (seqences of polygons in mappings indexed by `NetName`
and layer). See `NetsInfo.nets`.
via_polys: A collection of all vias (in a mapping indexed by layer).
Returns:
A set containing pairs of `NetName`s for each pair of nets which are shorted.
"""
merge_pairs = set() merge_pairs = set()
for top_layer, via_layer, bot_layer in connectivity: for top_layer, via_layer, bot_layer in connectivity:
if via_layer is not None: if via_layer is not None:
@ -151,7 +247,7 @@ def find_merge_pairs(
for bot_name in nets.keys(): for bot_name in nets.keys():
if bot_name == top_name: if bot_name == top_name:
continue continue
name_pair = tuple(sorted((top_name, bot_name))) name_pair: Tuple[NetName, NetName] = tuple(sorted((top_name, bot_name))) #type: ignore
if name_pair in merge_pairs: if name_pair in merge_pairs:
continue continue
@ -163,7 +259,7 @@ def find_merge_pairs(
via_top = intersection_evenodd(top_polys, vias) via_top = intersection_evenodd(top_polys, vias)
overlap = intersection_evenodd(via_top, bot_polys) overlap = intersection_evenodd(via_top, bot_polys)
else: else:
overlap = intersection_evenodd(top_polys, bot_polys) # TODO verify there aren't any suspicious corner cases for this overlap = intersection_evenodd(top_polys, bot_polys) # TODO verify there aren't any suspicious corner cases for this
if not overlap: if not overlap:
continue continue

View File

@ -1,3 +1,6 @@
"""
Utilities for working with polygons
"""
import numpy import numpy
from numpy.typing import NDArray, ArrayLike from numpy.typing import NDArray, ArrayLike

View File

@ -6,9 +6,17 @@ from .types import layer_t, contour_t
class NetName: class NetName:
"""
Basically just a uniquely-sortable `Optional[str]`.
A `name` of `None` indicates that the net is anonymous.
The `subname` is used to track multiple same-named nets, to allow testing for opens.
"""
name: Optional[str] name: Optional[str]
subname: int subname: int
count: ClassVar[defaultdict[Optional[str], int]] = defaultdict(int) count: ClassVar[defaultdict[Optional[str], int]] = defaultdict(int)
""" Counter for how many classes have been instantiated with each name """
def __init__(self, name: Optional[str] = None) -> None: def __init__(self, name: Optional[str] = None) -> None:
self.name = name self.name = name
@ -38,19 +46,57 @@ class NetName:
class NetsInfo: class NetsInfo:
"""
Container for describing all nets and keeping track of the "canonical" name for each
net. Nets which are known to be shorted together should be `merge`d together,
combining their geometry under the "canonical" name and adding the other name as an alias.
"""
nets: defaultdict[NetName, defaultdict[layer_t, List]] nets: defaultdict[NetName, defaultdict[layer_t, List]]
"""
Contains all polygons for all nets, in the format
`{net_name: {layer: [poly0, poly1, ...]}}`
Polygons are usually stored in pyclipper-friendly coordinates, but may be either `PyPolyNode`s
or simple lists of coordinates (oriented boundaries).
"""
net_aliases: Dict[NetName, NetName] net_aliases: Dict[NetName, NetName]
"""
A mapping from alias to underlying name.
Note that the underlying name may itself be an alias.
`resolve_name` can be used to simplify lookup
"""
def __init__(self) -> None: def __init__(self) -> None:
self.nets = defaultdict(lambda: defaultdict(list)) self.nets = defaultdict(lambda: defaultdict(list))
self.net_aliases = {} self.net_aliases = {}
def resolve_name(self, net_name: NetName) -> NetName: def resolve_name(self, net_name: NetName) -> NetName:
"""
Find the canonical name (as used in `self.nets`) for any NetName.
Args:
net_name: The name of the net to look up. May be an alias.
Returns:
The canonical name for the net.
"""
while net_name in self.net_aliases: while net_name in self.net_aliases:
net_name = self.net_aliases[net_name] net_name = self.net_aliases[net_name]
return net_name return net_name
def merge(self, net_a: NetName, net_b: NetName) -> None: def merge(self, net_a: NetName, net_b: NetName) -> None:
"""
Combine two nets into one.
Usually used when it is discovered that two nets are shorted.
The name that is preserved is based on the sort order of `NetName`s,
which favors non-anonymous, lexicograpically small names.
Args:
net_a: A net to merge
net_b: The other net to merge
"""
net_a = self.resolve_name(net_a) net_a = self.resolve_name(net_a)
net_b = self.resolve_name(net_b) net_b = self.resolve_name(net_b)
@ -66,6 +112,12 @@ class NetsInfo:
def get_shorted_nets(self) -> List[Set[NetName]]: def get_shorted_nets(self) -> List[Set[NetName]]:
"""
List groups of non-anonymous nets which were merged.
Returns:
A list of sets of shorted nets.
"""
shorts = defaultdict(list) shorts = defaultdict(list)
for kk in self.net_aliases: for kk in self.net_aliases:
if kk.name is None: if kk.name is None:
@ -80,6 +132,12 @@ class NetsInfo:
return shorted_sets return shorted_sets
def get_open_nets(self) -> defaultdict[str, List[NetName]]: def get_open_nets(self) -> defaultdict[str, List[NetName]]:
"""
List groups of same-named nets which were *not* merged.
Returns:
A list of sets of same-named, non-shorted nets.
"""
opens = defaultdict(list) opens = defaultdict(list)
seen_names = {} seen_names = {}
for kk in self.nets: for kk in self.nets:

View File

@ -1,3 +1,6 @@
"""
Some utility code that gets reused
"""
from typing import Set, Tuple from typing import Set, Tuple
from .types import connectivity_t, layer_t from .types import connectivity_t, layer_t
@ -6,6 +9,10 @@ from .types import connectivity_t, layer_t
def connectivity2layers( def connectivity2layers(
connectivity: connectivity_t, connectivity: connectivity_t,
) -> Tuple[Set[layer_t], Set[layer_t]]: ) -> Tuple[Set[layer_t], Set[layer_t]]:
"""
Extract the set of all metal layers and the set of all via layers
from the connectivity description.
"""
metal_layers = set() metal_layers = set()
via_layers = set() via_layers = set()
for top, via, bot in connectivity: for top, via, bot in connectivity: