rename to snarled
This commit is contained in:
parent
ffa5bfa96c
commit
4d7709d615
14 changed files with 32 additions and 32 deletions
4
snarled/VERSION.py
Normal file
4
snarled/VERSION.py
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
""" VERSION defintion. THIS FILE IS MANUALLY PARSED BY setup.py and REQUIRES A SPECIFIC FORMAT """
|
||||
__version__ = '''
|
||||
0.1
|
||||
'''.strip()
|
||||
23
snarled/__init__.py
Normal file
23
snarled/__init__.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
"""
|
||||
snarled
|
||||
=====
|
||||
|
||||
Layout connectivity checker.
|
||||
|
||||
`snarled` is a python package for checking electrical connectivity in multi-layer layouts.
|
||||
|
||||
It is intended to be "poor-man's LVS" (layout-versus-schematic), for when poverty
|
||||
has deprived the man of both a schematic and a better connectivity tool.
|
||||
|
||||
The main functionality is in `trace_connectivity`.
|
||||
Useful classes, namely `NetsInfo` and `NetName`, are in `snarled.tracker`.
|
||||
`snarled.interfaces` contains helper code for interfacing with other packages.
|
||||
"""
|
||||
from .main import trace_connectivity
|
||||
from .tracker import NetsInfo, NetName
|
||||
from . import interfaces
|
||||
|
||||
|
||||
__author__ = 'Jan Petykiewicz'
|
||||
|
||||
from .VERSION import __version__
|
||||
50
snarled/clipper.py
Normal file
50
snarled/clipper.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
"""
|
||||
Wrappers to simplify some pyclipper functions
|
||||
"""
|
||||
from typing import Sequence, Optional, List
|
||||
|
||||
from numpy.typing import ArrayLike
|
||||
from pyclipper import (
|
||||
Pyclipper, PT_CLIP, PT_SUBJECT, CT_UNION, CT_INTERSECTION, PFT_NONZERO, PFT_EVENODD,
|
||||
PyPolyNode,
|
||||
)
|
||||
|
||||
from .types import contour_t
|
||||
|
||||
|
||||
def union_nonzero(shapes: Sequence[ArrayLike]) -> Optional[PyPolyNode]:
|
||||
if not shapes:
|
||||
return None
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(shapes, PT_CLIP, closed=True)
|
||||
result = pc.Execute2(CT_UNION, PFT_NONZERO, PFT_NONZERO)
|
||||
return result
|
||||
|
||||
|
||||
def union_evenodd(shapes: Sequence[ArrayLike]) -> List[contour_t]:
|
||||
if not shapes:
|
||||
return []
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(shapes, PT_CLIP, closed=True)
|
||||
return pc.Execute(CT_UNION, PFT_EVENODD, PFT_EVENODD)
|
||||
|
||||
|
||||
def intersection_evenodd(
|
||||
subject_shapes: Sequence[ArrayLike],
|
||||
clip_shapes: Sequence[ArrayLike],
|
||||
) -> List[contour_t]:
|
||||
if not subject_shapes or not clip_shapes:
|
||||
return []
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(subject_shapes, PT_SUBJECT, closed=True)
|
||||
pc.AddPaths(clip_shapes, PT_CLIP, closed=True)
|
||||
return pc.Execute(CT_INTERSECTION, PFT_EVENODD, PFT_EVENODD)
|
||||
|
||||
|
||||
def hier2oriented(polys: Sequence[PyPolyNode]) -> List[ArrayLike]:
|
||||
contours = []
|
||||
for poly in polys:
|
||||
contours.append(poly.Contour)
|
||||
contours += [hole.Contour for hole in poly.Childs]
|
||||
|
||||
return contours
|
||||
0
snarled/interfaces/__init__.py
Normal file
0
snarled/interfaces/__init__.py
Normal file
109
snarled/interfaces/masque.py
Normal file
109
snarled/interfaces/masque.py
Normal file
|
|
@ -0,0 +1,109 @@
|
|||
"""
|
||||
Functionality for extracting geometry and label info from `masque` patterns.
|
||||
"""
|
||||
from typing import Sequence, Dict, List, Any, Tuple, Optional, Mapping
|
||||
from collections import defaultdict
|
||||
|
||||
import numpy
|
||||
from numpy.typing import NDArray
|
||||
from masque import Pattern
|
||||
from masque.file import oasis, gdsii
|
||||
from masque.shapes import Polygon
|
||||
|
||||
from ..types import layer_t
|
||||
from ..utils import connectivity2layers
|
||||
|
||||
|
||||
def read_cell(
|
||||
cell: Pattern,
|
||||
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
|
||||
label_mapping: Optional[Mapping[layer_t, layer_t]] = None,
|
||||
) -> Tuple[
|
||||
defaultdict[layer_t, List[NDArray[numpy.float64]]],
|
||||
defaultdict[layer_t, List[Tuple[float, float, str]]]]:
|
||||
"""
|
||||
Extract `polys` and `labels` from a `masque.Pattern`.
|
||||
|
||||
This function extracts the data needed by `snarled.trace_connectivity`.
|
||||
|
||||
Args:
|
||||
cell: A `masque` `Pattern` object. Usually your topcell.
|
||||
connectivity: A sequence of 3-tuples specifying the layer connectivity.
|
||||
Same as what is provided to `snarled.trace_connectivity`.
|
||||
label_mapping: A mapping of `{label_layer: metal_layer}`. This allows labels
|
||||
to refer to nets on metal layers without the labels themselves being on
|
||||
that layer.
|
||||
|
||||
Returns:
|
||||
`polys` and `labels` data structures, to be passed to `snarled.trace_connectivity`.
|
||||
"""
|
||||
|
||||
metal_layers, via_layers = connectivity2layers(connectivity)
|
||||
poly_layers = metal_layers | via_layers
|
||||
|
||||
if label_mapping is None:
|
||||
label_mapping = {layer: layer for layer in metal_layers}
|
||||
label_layers = {label_layer for label_layer in label_mapping.keys()}
|
||||
|
||||
cell = cell.deepcopy().subset(
|
||||
shapes_func=lambda ss: ss.layer in poly_layers,
|
||||
labels_func=lambda ll: ll.layer in label_layers,
|
||||
subpatterns_func=lambda ss: True,
|
||||
)
|
||||
cell = cell.flatten()
|
||||
|
||||
polys = load_polys(cell, list(poly_layers))
|
||||
|
||||
metal_labels = defaultdict(list)
|
||||
for label_layer, metal_layer in label_mapping.items():
|
||||
labels = []
|
||||
for ll in cell.labels:
|
||||
if ll.layer != label_layer:
|
||||
continue
|
||||
|
||||
if ll.repetition is None:
|
||||
displacements = [(0, 0)]
|
||||
else:
|
||||
displacements = ll.repetition.displacements
|
||||
|
||||
for displacement in displacements:
|
||||
offset = ll.offset + displacement
|
||||
metal_labels[metal_layer].append(
|
||||
(*offset, ll.string)
|
||||
)
|
||||
|
||||
return polys, metal_labels
|
||||
|
||||
|
||||
def load_polys(
|
||||
cell: Pattern,
|
||||
layers: Sequence[layer_t],
|
||||
) -> defaultdict[layer_t, List[NDArray[numpy.float64]]]:
|
||||
"""
|
||||
Given a *flat* `masque.Pattern`, extract the polygon info into the format used by `snarled`.
|
||||
|
||||
Args:
|
||||
cell: The `Pattern` object to extract from.
|
||||
layers: The layers to extract.
|
||||
|
||||
Returns:
|
||||
`{layer0: [poly0, [(x0, y0), (x1, y1), ...], poly2, ...]}`
|
||||
`polys` structure usable by `snarled.trace_connectivity`.
|
||||
"""
|
||||
polys = defaultdict(list)
|
||||
for ss in cell.shapes:
|
||||
if ss.layer not in layers:
|
||||
continue
|
||||
|
||||
assert(isinstance(ss, Polygon))
|
||||
|
||||
if ss.repetition is None:
|
||||
displacements = [(0, 0)]
|
||||
else:
|
||||
displacements = ss.repetition.displacements
|
||||
|
||||
for displacement in displacements:
|
||||
polys[ss.layer].append(
|
||||
ss.vertices + ss.offset + displacement
|
||||
)
|
||||
return polys
|
||||
268
snarled/main.py
Normal file
268
snarled/main.py
Normal file
|
|
@ -0,0 +1,268 @@
|
|||
"""
|
||||
Main connectivity-checking functionality for `snarled`
|
||||
"""
|
||||
from typing import Tuple, List, Dict, Set, Optional, Union, Sequence, Mapping
|
||||
from collections import defaultdict
|
||||
from pprint import pformat
|
||||
import logging
|
||||
|
||||
import numpy
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
from pyclipper import scale_to_clipper, scale_from_clipper, PyPolyNode
|
||||
|
||||
from .types import connectivity_t, layer_t, contour_t
|
||||
from .poly import poly_contains_points
|
||||
from .clipper import union_nonzero, union_evenodd, intersection_evenodd, hier2oriented
|
||||
from .tracker import NetsInfo, NetName
|
||||
from .utils import connectivity2layers
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def trace_connectivity(
|
||||
polys: Mapping[layer_t, Sequence[ArrayLike]],
|
||||
labels: Mapping[layer_t, Sequence[Tuple[float, float, str]]],
|
||||
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
|
||||
clipper_scale_factor: int = int(2 ** 24),
|
||||
) -> NetsInfo:
|
||||
"""
|
||||
Analyze the electrical connectivity of the layout.
|
||||
|
||||
This is the primary purpose of `snarled`.
|
||||
|
||||
The resulting `NetsInfo` will contain only disjoint `nets`, and its `net_aliases` can be used to
|
||||
understand which nets are shorted (and therefore known by more than one name).
|
||||
|
||||
Args:
|
||||
polys: A full description of all conducting paths in the layout. Consists of lists of polygons
|
||||
(Nx2 arrays of vertices), indexed by layer. The structure looks roughly like
|
||||
`{layer0: [poly0, poly1, ..., [(x0, y0), (x1, y1), ...]], ...}`
|
||||
labels: A list of "named points" which are used to assign names to the nets they touch.
|
||||
A collection of lists of (x, y, name) tuples, indexed *by the layer they target*.
|
||||
`{layer0: [(x0, y0, name0), (x1, y1, name1), ...], ...}`
|
||||
connectivity: A sequence of 3-tuples specifying the electrical connectivity between layers.
|
||||
Each 3-tuple looks like `(top_layer, via_layer, bottom_layer)` and indicates that
|
||||
`top_layer` and `bottom_layer` are electrically connected at any location where
|
||||
shapes are present on all three (top, via, and bottom) layers.
|
||||
`via_layer` may be `None`, in which case any overlap between shapes on `top_layer`
|
||||
and `bottom_layer` is automatically considered a short (with no third shape necessary).
|
||||
clipper_scale_factor: `pyclipper` uses 64-bit integer math, while we accept either floats or ints.
|
||||
The coordinates from `polys` are scaled by this factor to put them roughly in the middle of
|
||||
the range `pyclipper` wants; you may need to adjust this if you are already using coordinates
|
||||
with large integer values.
|
||||
|
||||
Returns:
|
||||
`NetsInfo` object describing the various nets and their connectivities.
|
||||
"""
|
||||
#
|
||||
# Figure out which layers are metals vs vias, and run initial union on each layer
|
||||
#
|
||||
metal_layers, via_layers = connectivity2layers(connectivity)
|
||||
|
||||
metal_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor))
|
||||
for layer in metal_layers}
|
||||
via_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor))
|
||||
for layer in via_layers}
|
||||
|
||||
#
|
||||
# Check each polygon for labels, and assign it to a net (possibly anonymous).
|
||||
#
|
||||
nets_info = NetsInfo()
|
||||
|
||||
merge_groups: List[List[NetName]] = []
|
||||
for layer in metal_layers:
|
||||
point_xys = []
|
||||
point_names = []
|
||||
for x, y, point_name in labels[layer]:
|
||||
point_xys.append((x, y))
|
||||
point_names.append(point_name)
|
||||
|
||||
for poly in metal_polys[layer]:
|
||||
found_nets = label_poly(poly, point_xys, point_names, clipper_scale_factor)
|
||||
|
||||
if found_nets:
|
||||
name = NetName(found_nets[0])
|
||||
else:
|
||||
name = NetName() # Anonymous net
|
||||
|
||||
nets_info.nets[name][layer].append(poly)
|
||||
|
||||
if len(found_nets) > 1:
|
||||
# Found a short
|
||||
poly = pformat(scale_from_clipper(poly.Contour, clipper_scale_factor))
|
||||
logger.warning(f'Nets {found_nets} are shorted on layer {layer} in poly:\n {poly}')
|
||||
merge_groups.append([name] + [NetName(nn) for nn in found_nets[1:]])
|
||||
|
||||
#
|
||||
# Merge any nets that were shorted by having their labels on the same polygon
|
||||
#
|
||||
for group in merge_groups:
|
||||
first_net, *defunct_nets = group
|
||||
for defunct_net in defunct_nets:
|
||||
nets_info.merge(first_net, defunct_net)
|
||||
|
||||
#
|
||||
# Convert to non-hierarchical polygon representation
|
||||
#
|
||||
for net in nets_info.nets.values():
|
||||
for layer in net:
|
||||
#net[layer] = union_evenodd(hier2oriented(net[layer]))
|
||||
net[layer] = hier2oriented(net[layer])
|
||||
|
||||
for layer in via_polys:
|
||||
via_polys[layer] = hier2oriented(via_polys[layer])
|
||||
|
||||
#
|
||||
# Figure out which nets are shorted by vias, then merge them
|
||||
#
|
||||
merge_pairs = find_merge_pairs(connectivity, nets_info.nets, via_polys)
|
||||
for net_a, net_b in merge_pairs:
|
||||
nets_info.merge(net_a, net_b)
|
||||
|
||||
return nets_info
|
||||
|
||||
|
||||
def union_input_polys(polys: Sequence[ArrayLike]) -> List[PyPolyNode]:
|
||||
"""
|
||||
Perform a union operation on the provided sequence of polygons, and return
|
||||
a list of `PyPolyNode`s corresponding to all of the outer (i.e. non-hole)
|
||||
contours.
|
||||
|
||||
Note that while islands are "outer" contours and returned in the list, they
|
||||
also are still available through the `.Childs` property of the "hole" they
|
||||
appear in. Meanwhile, "hole" contours are only accessible through the `.Childs`
|
||||
property of their parent "outer" contour, and are not returned in the list.
|
||||
|
||||
Args:
|
||||
polys: A sequence of polygons, `[[(x0, y0), (x1, y1), ...], poly1, poly2, ...]`
|
||||
Polygons may be implicitly closed.
|
||||
|
||||
Returns:
|
||||
List of PyPolyNodes, representing all "outer" contours (including islands) in
|
||||
the union of `polys`.
|
||||
"""
|
||||
for poly in polys:
|
||||
if (numpy.abs(poly) % 1).any():
|
||||
logger.warning('Warning: union_polys got non-integer coordinates; all values will be truncated.')
|
||||
break
|
||||
|
||||
#TODO: check if we need to reverse the order of points in some polygons
|
||||
# via sum((x2-x1)(y2+y1)) (-ve means ccw)
|
||||
|
||||
poly_tree = union_nonzero(polys)
|
||||
if poly_tree is None:
|
||||
return []
|
||||
|
||||
# Partially flatten the tree, reclassifying all the "outer" (non-hole) nodes as new root nodes
|
||||
unvisited_nodes = [poly_tree]
|
||||
outer_nodes = []
|
||||
while unvisited_nodes:
|
||||
node = unvisited_nodes.pop() # node will be the tree parent node (a container), or a hole
|
||||
for poly in node.Childs:
|
||||
outer_nodes.append(poly)
|
||||
for hole in poly.Childs: # type: ignore
|
||||
unvisited_nodes.append(hole)
|
||||
|
||||
return outer_nodes
|
||||
|
||||
|
||||
def label_poly(
|
||||
poly: PyPolyNode,
|
||||
point_xys: ArrayLike,
|
||||
point_names: Sequence[str],
|
||||
clipper_scale_factor: int = int(2 ** 24),
|
||||
) -> List[str]:
|
||||
"""
|
||||
Given a `PyPolyNode` (a polygon, possibly with holes) and a sequence of named points,
|
||||
return the list of point names contained inside the polygon.
|
||||
|
||||
Args:
|
||||
poly: A polygon, possibly with holes. "Islands" inside the holes (and deeper-nested
|
||||
structures) are not considered (i.e. only one non-hole contour is considered).
|
||||
point_xys: A sequence of point coordinates (Nx2, `[(x0, y0), (x1, y1), ...]`).
|
||||
point_names: A sequence of point names (same length N as point_xys)
|
||||
clipper_scale_factor: The PyPolyNode structure is from `pyclipper` and likely has
|
||||
a scale factor applied in order to use integer arithmetic. Due to precision
|
||||
limitations in `poly_contains_points`, it's prefereable to undo this scaling
|
||||
rather than asking for similarly-scaled `point_xys` coordinates.
|
||||
NOTE: This could be fixed by using `numpy.longdouble` in
|
||||
`poly_contains_points`, but the exact length of long-doubles is platform-
|
||||
dependent and so probably best avoided.
|
||||
|
||||
Result:
|
||||
All the `point_names` which correspond to points inside the polygon (but not in
|
||||
its holes).
|
||||
"""
|
||||
poly_contour = scale_from_clipper(poly.Contour, clipper_scale_factor)
|
||||
inside = poly_contains_points(poly_contour, point_xys)
|
||||
for hole in poly.Childs:
|
||||
hole_contour = scale_from_clipper(hole.Contour, clipper_scale_factor)
|
||||
inside &= ~poly_contains_points(hole_contour, point_xys)
|
||||
|
||||
inside_nets = sorted([net_name for net_name, ii in zip(point_names, inside) if ii])
|
||||
|
||||
if inside.any():
|
||||
return inside_nets
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def find_merge_pairs(
|
||||
connectivity: connectivity_t,
|
||||
nets: Mapping[NetName, Mapping[layer_t, Sequence[contour_t]]],
|
||||
via_polys: Mapping[layer_t, Sequence[contour_t]],
|
||||
) -> Set[Tuple[NetName, NetName]]:
|
||||
"""
|
||||
Given a collection of (possibly anonymous) nets, figure out which pairs of
|
||||
nets are shorted through a via (and thus should be merged).
|
||||
|
||||
Args:
|
||||
connectivity: A sequence of 3-tuples specifying the electrical connectivity between layers.
|
||||
Each 3-tuple looks like `(top_layer, via_layer, bottom_layer)` and indicates that
|
||||
`top_layer` and `bottom_layer` are electrically connected at any location where
|
||||
shapes are present on all three (top, via, and bottom) layers.
|
||||
`via_layer` may be `None`, in which case any overlap between shapes on `top_layer`
|
||||
and `bottom_layer` is automatically considered a short (with no third shape necessary).
|
||||
nets: A collection of all nets (seqences of polygons in mappings indexed by `NetName`
|
||||
and layer). See `NetsInfo.nets`.
|
||||
via_polys: A collection of all vias (in a mapping indexed by layer).
|
||||
|
||||
Returns:
|
||||
A set containing pairs of `NetName`s for each pair of nets which are shorted.
|
||||
"""
|
||||
merge_pairs = set()
|
||||
for top_layer, via_layer, bot_layer in connectivity:
|
||||
if via_layer is not None:
|
||||
vias = via_polys[via_layer]
|
||||
if not vias:
|
||||
logger.warning(f'No vias on layer {via_layer}')
|
||||
continue
|
||||
|
||||
for top_name in nets.keys():
|
||||
top_polys = nets[top_name][top_layer]
|
||||
if not top_polys:
|
||||
continue
|
||||
|
||||
for bot_name in nets.keys():
|
||||
if bot_name == top_name:
|
||||
continue
|
||||
name_pair: Tuple[NetName, NetName] = tuple(sorted((top_name, bot_name))) #type: ignore
|
||||
if name_pair in merge_pairs:
|
||||
continue
|
||||
|
||||
bot_polys = nets[bot_name][bot_layer]
|
||||
if not bot_polys:
|
||||
continue
|
||||
|
||||
if via_layer is not None:
|
||||
via_top = intersection_evenodd(top_polys, vias)
|
||||
overlap = intersection_evenodd(via_top, bot_polys)
|
||||
else:
|
||||
overlap = intersection_evenodd(top_polys, bot_polys) # TODO verify there aren't any suspicious corner cases for this
|
||||
|
||||
if not overlap:
|
||||
continue
|
||||
|
||||
merge_pairs.add(name_pair)
|
||||
return merge_pairs
|
||||
67
snarled/poly.py
Normal file
67
snarled/poly.py
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
"""
|
||||
Utilities for working with polygons
|
||||
"""
|
||||
import numpy
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
|
||||
|
||||
def poly_contains_points(
|
||||
vertices: ArrayLike,
|
||||
points: ArrayLike,
|
||||
include_boundary: bool = True,
|
||||
) -> NDArray[numpy.int_]:
|
||||
"""
|
||||
Tests whether the provided points are inside the implicitly closed polygon
|
||||
described by the provided list of vertices.
|
||||
|
||||
Args:
|
||||
vertices: Nx2 Arraylike of form [[x0, y0], [x1, y1], ...], describing an implicitly-
|
||||
closed polygon. Note that this should include any offsets.
|
||||
points: Nx2 ArrayLike of form [[x0, y0], [x1, y1], ...] containing the points to test.
|
||||
include_boundary: True if points on the boundary should be count as inside the shape.
|
||||
Default True.
|
||||
|
||||
Returns:
|
||||
ndarray of booleans, [point0_is_in_shape, point1_is_in_shape, ...]
|
||||
"""
|
||||
points = numpy.array(points, copy=False)
|
||||
vertices = numpy.array(vertices, copy=False)
|
||||
|
||||
if points.size == 0:
|
||||
return numpy.zeros(0)
|
||||
|
||||
min_bounds = numpy.min(vertices, axis=0)[None, :]
|
||||
max_bounds = numpy.max(vertices, axis=0)[None, :]
|
||||
|
||||
trivially_outside = ((points < min_bounds).any(axis=1)
|
||||
| (points > max_bounds).any(axis=1))
|
||||
|
||||
nontrivial = ~trivially_outside
|
||||
if trivially_outside.all():
|
||||
inside = numpy.zeros_like(trivially_outside, dtype=bool)
|
||||
return inside
|
||||
|
||||
ntpts = points[None, nontrivial, :] # nontrivial points, along axis 1 of ndarray
|
||||
verts = vertices[:, None, :] # vertices, along axis 0
|
||||
xydiff = ntpts - verts # Expands into (n_vertices, n_ntpts, 2)
|
||||
|
||||
y0_le = xydiff[:, :, 1] >= 0 # y_point >= y_vertex (axes 0, 1 for all points & vertices)
|
||||
y1_le = numpy.roll(y0_le, -1, axis=0) # same thing for next vertex
|
||||
|
||||
upward = y0_le & ~y1_le # edge passes point y coord going upwards
|
||||
downward = ~y0_le & y1_le # edge passes point y coord going downwards
|
||||
|
||||
dv = numpy.roll(verts, -1, axis=0) - verts
|
||||
is_left = (dv[..., 0] * xydiff[..., 1] # >0 if left of dv, <0 if right, 0 if on the line
|
||||
- dv[..., 1] * xydiff[..., 0])
|
||||
|
||||
winding_number = ((upward & (is_left > 0)).sum(axis=0)
|
||||
- (downward & (is_left < 0)).sum(axis=0))
|
||||
|
||||
nontrivial_inside = winding_number != 0 # filter nontrivial points based on winding number
|
||||
if include_boundary:
|
||||
nontrivial_inside[(is_left == 0).any(axis=0)] = True # check if point lies on any edge
|
||||
|
||||
inside = nontrivial.copy()
|
||||
inside[nontrivial] = nontrivial_inside
|
||||
return inside
|
||||
0
snarled/py.typed
Normal file
0
snarled/py.typed
Normal file
153
snarled/tracker.py
Normal file
153
snarled/tracker.py
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
from typing import List, Set, ClassVar, Optional, Dict
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .types import layer_t, contour_t
|
||||
|
||||
|
||||
class NetName:
|
||||
"""
|
||||
Basically just a uniquely-sortable `Optional[str]`.
|
||||
|
||||
A `name` of `None` indicates that the net is anonymous.
|
||||
The `subname` is used to track multiple same-named nets, to allow testing for opens.
|
||||
"""
|
||||
name: Optional[str]
|
||||
subname: int
|
||||
|
||||
count: ClassVar[defaultdict[Optional[str], int]] = defaultdict(int)
|
||||
""" Counter for how many classes have been instantiated with each name """
|
||||
|
||||
def __init__(self, name: Optional[str] = None) -> None:
|
||||
self.name = name
|
||||
self.subname = self.count[name]
|
||||
NetName.count[name] += 1
|
||||
|
||||
def __lt__(self, other: 'NetName') -> bool:
|
||||
if self.name == other.name:
|
||||
return self.subname < other.subname
|
||||
elif self.name is None:
|
||||
return False
|
||||
elif other.name is None:
|
||||
return True
|
||||
else:
|
||||
return self.name < other.name
|
||||
|
||||
def __repr__(self) -> str:
|
||||
if self.name is not None:
|
||||
name = self.name
|
||||
else:
|
||||
name = '(None)'
|
||||
|
||||
if NetName.count[self.name] == 1:
|
||||
return name
|
||||
else:
|
||||
return f'{name}__{self.subname}'
|
||||
|
||||
|
||||
class NetsInfo:
|
||||
"""
|
||||
Container for describing all nets and keeping track of the "canonical" name for each
|
||||
net. Nets which are known to be shorted together should be `merge`d together,
|
||||
combining their geometry under the "canonical" name and adding the other name as an alias.
|
||||
"""
|
||||
nets: defaultdict[NetName, defaultdict[layer_t, List]]
|
||||
"""
|
||||
Contains all polygons for all nets, in the format
|
||||
`{net_name: {layer: [poly0, poly1, ...]}}`
|
||||
|
||||
Polygons are usually stored in pyclipper-friendly coordinates, but may be either `PyPolyNode`s
|
||||
or simple lists of coordinates (oriented boundaries).
|
||||
"""
|
||||
|
||||
net_aliases: Dict[NetName, NetName]
|
||||
"""
|
||||
A mapping from alias to underlying name.
|
||||
Note that the underlying name may itself be an alias.
|
||||
`resolve_name` can be used to simplify lookup
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.nets = defaultdict(lambda: defaultdict(list))
|
||||
self.net_aliases = {}
|
||||
|
||||
def resolve_name(self, net_name: NetName) -> NetName:
|
||||
"""
|
||||
Find the canonical name (as used in `self.nets`) for any NetName.
|
||||
|
||||
Args:
|
||||
net_name: The name of the net to look up. May be an alias.
|
||||
|
||||
Returns:
|
||||
The canonical name for the net.
|
||||
"""
|
||||
while net_name in self.net_aliases:
|
||||
net_name = self.net_aliases[net_name]
|
||||
return net_name
|
||||
|
||||
def merge(self, net_a: NetName, net_b: NetName) -> None:
|
||||
"""
|
||||
Combine two nets into one.
|
||||
Usually used when it is discovered that two nets are shorted.
|
||||
|
||||
The name that is preserved is based on the sort order of `NetName`s,
|
||||
which favors non-anonymous, lexicograpically small names.
|
||||
|
||||
Args:
|
||||
net_a: A net to merge
|
||||
net_b: The other net to merge
|
||||
"""
|
||||
net_a = self.resolve_name(net_a)
|
||||
net_b = self.resolve_name(net_b)
|
||||
|
||||
# Always keep named nets if the other is anonymous
|
||||
keep_net, old_net = sorted((net_a, net_b))
|
||||
|
||||
#logger.info(f'merging {old_net} into {keep_net}')
|
||||
self.net_aliases[old_net] = keep_net
|
||||
if old_net in self.nets:
|
||||
for layer in self.nets[old_net]:
|
||||
self.nets[keep_net][layer] += self.nets[old_net][layer]
|
||||
del self.nets[old_net]
|
||||
|
||||
|
||||
def get_shorted_nets(self) -> List[Set[NetName]]:
|
||||
"""
|
||||
List groups of non-anonymous nets which were merged.
|
||||
|
||||
Returns:
|
||||
A list of sets of shorted nets.
|
||||
"""
|
||||
shorts = defaultdict(list)
|
||||
for kk in self.net_aliases:
|
||||
if kk.name is None:
|
||||
continue
|
||||
|
||||
base_name = self.resolve_name(kk)
|
||||
assert(base_name.name is not None)
|
||||
shorts[base_name].append(kk)
|
||||
|
||||
shorted_sets = [set([kk] + others)
|
||||
for kk, others in shorts.items()]
|
||||
return shorted_sets
|
||||
|
||||
def get_open_nets(self) -> defaultdict[str, List[NetName]]:
|
||||
"""
|
||||
List groups of same-named nets which were *not* merged.
|
||||
|
||||
Returns:
|
||||
A list of sets of same-named, non-shorted nets.
|
||||
"""
|
||||
opens = defaultdict(list)
|
||||
seen_names = {}
|
||||
for kk in self.nets:
|
||||
if kk.name is None:
|
||||
continue
|
||||
|
||||
if kk.name in seen_names:
|
||||
if kk.name not in opens:
|
||||
opens[kk.name].append(seen_names[kk.name])
|
||||
opens[kk.name].append(kk)
|
||||
else:
|
||||
seen_names[kk.name] = kk
|
||||
return opens
|
||||
5
snarled/types.py
Normal file
5
snarled/types.py
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
from typing import Union, Tuple, List, Sequence, Optional, Hashable
|
||||
|
||||
layer_t = Hashable
|
||||
contour_t = List[Tuple[int, int]]
|
||||
connectivity_t = Sequence[Tuple[layer_t, Optional[layer_t], layer_t]]
|
||||
28
snarled/utils.py
Normal file
28
snarled/utils.py
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
"""
|
||||
Some utility code that gets reused
|
||||
"""
|
||||
from typing import Set, Tuple
|
||||
|
||||
from .types import connectivity_t, layer_t
|
||||
|
||||
|
||||
def connectivity2layers(
|
||||
connectivity: connectivity_t,
|
||||
) -> Tuple[Set[layer_t], Set[layer_t]]:
|
||||
"""
|
||||
Extract the set of all metal layers and the set of all via layers
|
||||
from the connectivity description.
|
||||
"""
|
||||
metal_layers = set()
|
||||
via_layers = set()
|
||||
for top, via, bot in connectivity:
|
||||
metal_layers.add(top)
|
||||
metal_layers.add(bot)
|
||||
if via is not None:
|
||||
via_layers.add(via)
|
||||
|
||||
both = metal_layers.intersection(via_layers)
|
||||
if both:
|
||||
raise Exception(f'The following layers are both vias and metals!? {both}')
|
||||
|
||||
return metal_layers, via_layers
|
||||
Loading…
Add table
Add a link
Reference in a new issue