initial commit
This commit is contained in:
commit
ba228378d9
13 changed files with 1401 additions and 0 deletions
2
snarl/__init__.py
Normal file
2
snarl/__init__.py
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
from .main import check_connectivity
|
||||
from . import interfaces
|
||||
47
snarl/clipper.py
Normal file
47
snarl/clipper.py
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
from typing import Sequence, Optional, List
|
||||
|
||||
from numpy.typing import ArrayLike
|
||||
from pyclipper import (
|
||||
Pyclipper, PT_CLIP, PT_SUBJECT, CT_UNION, CT_INTERSECTION, PFT_NONZERO, PFT_EVENODD,
|
||||
PyPolyNode,
|
||||
)
|
||||
|
||||
from .types import contour_t
|
||||
|
||||
|
||||
def union_nonzero(shapes: Sequence[ArrayLike]) -> Optional[PyPolyNode]:
|
||||
if not shapes:
|
||||
return None
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(shapes, PT_CLIP, closed=True)
|
||||
result = pc.Execute2(CT_UNION, PFT_NONZERO, PFT_NONZERO)
|
||||
return result
|
||||
|
||||
|
||||
def union_evenodd(shapes: Sequence[ArrayLike]) -> List[contour_t]:
|
||||
if not shapes:
|
||||
return []
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(shapes, PT_CLIP, closed=True)
|
||||
return pc.Execute(CT_UNION, PFT_EVENODD, PFT_EVENODD)
|
||||
|
||||
|
||||
def intersection_evenodd(
|
||||
subject_shapes: Sequence[ArrayLike],
|
||||
clip_shapes: Sequence[ArrayLike],
|
||||
) -> List[contour_t]:
|
||||
if not subject_shapes or not clip_shapes:
|
||||
return []
|
||||
pc = Pyclipper()
|
||||
pc.AddPaths(subject_shapes, PT_SUBJECT, closed=True)
|
||||
pc.AddPaths(clip_shapes, PT_CLIP, closed=True)
|
||||
return pc.Execute(CT_INTERSECTION, PFT_EVENODD, PFT_EVENODD)
|
||||
|
||||
|
||||
def hier2oriented(polys: Sequence[PyPolyNode]) -> List[ArrayLike]:
|
||||
contours = []
|
||||
for poly in polys:
|
||||
contours.append(poly.Contour)
|
||||
contours += [hole.Contour for hole in poly.Childs]
|
||||
|
||||
return contours
|
||||
0
snarl/interfaces/__init__.py
Normal file
0
snarl/interfaces/__init__.py
Normal file
66
snarl/interfaces/masque.py
Normal file
66
snarl/interfaces/masque.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
from typing import Sequence, Dict, List, Any, Tuple, Optional, Mapping
|
||||
from collections import defaultdict
|
||||
|
||||
import numpy
|
||||
from numpy.typing import NDArray
|
||||
from masque import Pattern
|
||||
from masque.file import oasis, gdsii
|
||||
from masque.shapes import Polygon
|
||||
|
||||
from ..types import layer_t
|
||||
from ..utils import connectivity2layers
|
||||
|
||||
|
||||
def read_topcell(
|
||||
topcell: Pattern,
|
||||
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
|
||||
label_mapping: Optional[Mapping[layer_t, layer_t]] = None,
|
||||
) -> Tuple[
|
||||
defaultdict[layer_t, List[NDArray[numpy.float64]]],
|
||||
defaultdict[layer_t, List[Tuple[float, float, str]]]]:
|
||||
|
||||
metal_layers, via_layers = connectivity2layers(connectivity)
|
||||
poly_layers = metal_layers | via_layers
|
||||
|
||||
if label_mapping is None:
|
||||
label_mapping = {layer: layer for layer in metal_layers}
|
||||
label_layers = {label_layer for label_layer in label_mapping.keys()}
|
||||
|
||||
topcell = topcell.deepcopy().subset(
|
||||
shapes_func=lambda ss: ss.layer in poly_layers,
|
||||
labels_func=lambda ll: ll.layer in label_layers,
|
||||
subpatterns_func=lambda ss: True,
|
||||
)
|
||||
topcell = topcell.flatten()
|
||||
|
||||
polys = load_polys(topcell, list(poly_layers))
|
||||
|
||||
metal_labels = defaultdict(list)
|
||||
for label_layer, metal_layer in label_mapping.items():
|
||||
labels = [ll for ll in topcell.labels if ll.layer == label_layer]
|
||||
metal_labels[metal_layer] += [(*ll.offset, ll.string) for ll in labels]
|
||||
|
||||
return polys, metal_labels
|
||||
|
||||
|
||||
def load_polys(
|
||||
topcell: Pattern,
|
||||
layers: Sequence[layer_t],
|
||||
) -> defaultdict[layer_t, List[NDArray[numpy.float64]]]:
|
||||
polys = defaultdict(list)
|
||||
for ss in topcell.shapes:
|
||||
if ss.layer not in layers:
|
||||
continue
|
||||
|
||||
assert(isinstance(ss, Polygon))
|
||||
|
||||
if ss.repetition is None:
|
||||
displacements = [(0, 0)]
|
||||
else:
|
||||
displacements = ss.repetition.displacements
|
||||
|
||||
for displacement in displacements:
|
||||
polys[ss.layer].append(
|
||||
ss.vertices + ss.offset + displacement
|
||||
)
|
||||
return polys
|
||||
186
snarl/main.py
Normal file
186
snarl/main.py
Normal file
|
|
@ -0,0 +1,186 @@
|
|||
from typing import Tuple, List, Dict, Set, Optional, Union, Sequence, Mapping
|
||||
from collections import defaultdict
|
||||
from pprint import pformat
|
||||
import logging
|
||||
|
||||
import numpy
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
from pyclipper import scale_to_clipper, scale_from_clipper, PyPolyNode
|
||||
|
||||
from .types import connectivity_t, layer_t, contour_t, net_name_t
|
||||
from .poly import poly_contains_points
|
||||
from .clipper import union_nonzero, union_evenodd, intersection_evenodd, hier2oriented
|
||||
from .tracker import NetsInfo
|
||||
from .utils import connectivity2layers
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def check_connectivity(
|
||||
polys: Mapping[layer_t, Sequence[ArrayLike]],
|
||||
labels: Mapping[layer_t, Sequence[Tuple[float, float, str]]],
|
||||
connectivity: Sequence[Tuple[layer_t, Optional[layer_t], layer_t]],
|
||||
label_mapping: Optional[Mapping[layer_t, layer_t]] = None,
|
||||
clipper_scale_factor: int = int(2 ** 24),
|
||||
) -> NetsInfo:
|
||||
|
||||
metal_layers, via_layers = connectivity2layers(connectivity)
|
||||
if label_mapping is None:
|
||||
label_mapping = {layer: layer for layer in metal_layers}
|
||||
|
||||
metal_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor))
|
||||
for layer in metal_layers}
|
||||
via_polys = {layer: union_input_polys(scale_to_clipper(polys[layer], clipper_scale_factor))
|
||||
for layer in via_layers}
|
||||
|
||||
nets_info = NetsInfo()
|
||||
|
||||
merge_groups: List[List[net_name_t]] = []
|
||||
for layer, labels_for_layer in labels.items():
|
||||
point_xys = []
|
||||
point_names = []
|
||||
for x, y, point_name in labels_for_layer:
|
||||
point_xys.append((x, y))
|
||||
point_names.append(point_name)
|
||||
|
||||
for poly in metal_polys[layer]:
|
||||
found_nets = label_poly(poly, point_xys, point_names, clipper_scale_factor)
|
||||
|
||||
name: net_name_t
|
||||
if found_nets:
|
||||
name = found_nets[0]
|
||||
else:
|
||||
name = object() # Anonymous net
|
||||
|
||||
nets_info.get(name, layer).append(poly)
|
||||
|
||||
if len(found_nets) > 1:
|
||||
# Found a short
|
||||
logger.warning(f'Nets {found_nets} are shorted on layer {layer} in poly:\n {pformat(poly)}')
|
||||
merge_groups.append(found_nets) # type: ignore
|
||||
|
||||
for group in merge_groups:
|
||||
first_net, *defunct_nets = group
|
||||
for defunct_net in defunct_nets:
|
||||
nets_info.merge(first_net, defunct_net)
|
||||
|
||||
#
|
||||
# Take EVENODD union within each net
|
||||
# & stay in EVENODD-friendly representation
|
||||
#
|
||||
for net in nets_info.nets.values():
|
||||
for layer in net:
|
||||
#net[layer] = union_evenodd(hier2oriented(net[layer]))
|
||||
net[layer] = hier2oriented(net[layer])
|
||||
|
||||
for layer in via_polys:
|
||||
via_polys[layer] = hier2oriented(via_polys[layer])
|
||||
|
||||
|
||||
merge_pairs = find_merge_pairs(connectivity, nets_info.nets, via_polys)
|
||||
for net_a, net_b in merge_pairs:
|
||||
nets_info.merge(net_a, net_b)
|
||||
|
||||
|
||||
print('merged pairs')
|
||||
print(pformat(merge_pairs))
|
||||
|
||||
print('\nFinal nets:')
|
||||
print([kk for kk in nets_info.nets if isinstance(kk, str)])
|
||||
|
||||
print('\nNet sets:')
|
||||
for short in nets_info.get_shorted_nets():
|
||||
print('(' + ','.join(sorted(list(short))) + ')')
|
||||
|
||||
return nets_info
|
||||
|
||||
|
||||
def union_input_polys(polys: List[ArrayLike]) -> List[PyPolyNode]:
|
||||
for poly in polys:
|
||||
if (numpy.abs(poly) % 1).any():
|
||||
logger.warning('Warning: union_polys got non-integer coordinates; all values will be truncated.')
|
||||
break
|
||||
|
||||
poly_tree = union_nonzero(polys)
|
||||
if poly_tree is None:
|
||||
return []
|
||||
|
||||
# Partially flatten the tree, reclassifying all the "outer" (non-hole) nodes as new root nodes
|
||||
unvisited_nodes = [poly_tree]
|
||||
outer_nodes = []
|
||||
while unvisited_nodes:
|
||||
node = unvisited_nodes.pop() # node will be the tree parent node (a container), or a hole
|
||||
for poly in node.Childs:
|
||||
outer_nodes.append(poly)
|
||||
for hole in poly.Childs: # type: ignore
|
||||
unvisited_nodes.append(hole)
|
||||
|
||||
return outer_nodes
|
||||
|
||||
|
||||
def label_poly(
|
||||
poly: PyPolyNode,
|
||||
point_xys: ArrayLike,
|
||||
point_names: Sequence[str],
|
||||
clipper_scale_factor: int = int(2 ** 24),
|
||||
) -> List[str]:
|
||||
|
||||
poly_contour = scale_from_clipper(poly.Contour, clipper_scale_factor)
|
||||
inside = poly_contains_points(poly_contour, point_xys)
|
||||
for hole in poly.Childs:
|
||||
hole_contour = scale_from_clipper(hole.Contour, clipper_scale_factor)
|
||||
inside &= ~poly_contains_points(hole_contour, point_xys)
|
||||
|
||||
inside_nets = sorted([net_name for net_name, ii in zip(point_names, inside) if ii])
|
||||
|
||||
if inside.any():
|
||||
return inside_nets
|
||||
else:
|
||||
return []
|
||||
|
||||
|
||||
def find_merge_pairs(
|
||||
connectivity: connectivity_t,
|
||||
nets: Mapping[net_name_t, Mapping[layer_t, Sequence[contour_t]]],
|
||||
via_polys: Mapping[layer_t, Sequence[contour_t]],
|
||||
) -> Set[Tuple[net_name_t, net_name_t]]:
|
||||
#
|
||||
# Merge nets based on via connectivity
|
||||
#
|
||||
merge_pairs = set()
|
||||
for top_layer, via_layer, bot_layer in connectivity:
|
||||
if via_layer is not None:
|
||||
vias = via_polys[via_layer]
|
||||
if not vias:
|
||||
continue
|
||||
|
||||
#TODO deal with polygons that have holes (loops?)
|
||||
|
||||
for top_name in nets.keys():
|
||||
top_polys = nets[top_name][top_layer]
|
||||
if not top_polys:
|
||||
continue
|
||||
|
||||
for bot_name in nets.keys():
|
||||
if bot_name == top_name:
|
||||
continue
|
||||
name_pair = tuple(sorted((top_name, bot_name), key=lambda s: id(s)))
|
||||
if name_pair in merge_pairs:
|
||||
continue
|
||||
|
||||
bot_polys = nets[bot_name][bot_layer]
|
||||
if not bot_polys:
|
||||
continue
|
||||
|
||||
if via_layer is not None:
|
||||
via_top = intersection_evenodd(top_polys, vias)
|
||||
overlap = intersection_evenodd(via_top, bot_polys)
|
||||
else:
|
||||
overlap = intersection_evenodd(top_polys, bot_polys) # TODO verify there aren't any suspicious corner cases for this
|
||||
|
||||
if not overlap:
|
||||
continue
|
||||
|
||||
merge_pairs.add(name_pair)
|
||||
return merge_pairs
|
||||
64
snarl/poly.py
Normal file
64
snarl/poly.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
import numpy
|
||||
from numpy.typing import NDArray, ArrayLike
|
||||
|
||||
|
||||
def poly_contains_points(
|
||||
vertices: ArrayLike,
|
||||
points: ArrayLike,
|
||||
include_boundary: bool = True,
|
||||
) -> NDArray[numpy.int_]:
|
||||
"""
|
||||
Tests whether the provided points are inside the implicitly closed polygon
|
||||
described by the provided list of vertices.
|
||||
|
||||
Args:
|
||||
vertices: Nx2 Arraylike of form [[x0, y0], [x1, y1], ...], describing an implicitly-
|
||||
closed polygon. Note that this should include any offsets.
|
||||
points: Nx2 ArrayLike of form [[x0, y0], [x1, y1], ...] containing the points to test.
|
||||
include_boundary: True if points on the boundary should be count as inside the shape.
|
||||
Default True.
|
||||
|
||||
Returns:
|
||||
ndarray of booleans, [point0_is_in_shape, point1_is_in_shape, ...]
|
||||
"""
|
||||
points = numpy.array(points, copy=False)
|
||||
vertices = numpy.array(vertices, copy=False)
|
||||
|
||||
if points.size == 0:
|
||||
return numpy.zeros(0)
|
||||
|
||||
min_bounds = numpy.min(vertices, axis=0)[None, :]
|
||||
max_bounds = numpy.max(vertices, axis=0)[None, :]
|
||||
|
||||
trivially_outside = ((points < min_bounds).any(axis=1)
|
||||
| (points > max_bounds).any(axis=1))
|
||||
|
||||
nontrivial = ~trivially_outside
|
||||
if trivially_outside.all():
|
||||
inside = numpy.zeros_like(trivially_outside, dtype=bool)
|
||||
return inside
|
||||
|
||||
ntpts = points[None, nontrivial, :] # nontrivial points, along axis 1 of ndarray
|
||||
verts = vertices[:, None, :] # vertices, along axis 0
|
||||
xydiff = ntpts - verts # Expands into (n_vertices, n_ntpts, 2)
|
||||
|
||||
y0_le = xydiff[:, :, 1] >= 0 # y_point >= y_vertex (axes 0, 1 for all points & vertices)
|
||||
y1_le = numpy.roll(y0_le, -1, axis=0) # same thing for next vertex
|
||||
|
||||
upward = y0_le & ~y1_le # edge passes point y coord going upwards
|
||||
downward = ~y0_le & y1_le # edge passes point y coord going downwards
|
||||
|
||||
dv = numpy.roll(verts, -1, axis=0) - verts
|
||||
is_left = (dv[..., 0] * xydiff[..., 1] # >0 if left of dv, <0 if right, 0 if on the line
|
||||
- dv[..., 1] * xydiff[..., 0])
|
||||
|
||||
winding_number = ((upward & (is_left > 0)).sum(axis=0)
|
||||
- (downward & (is_left < 0)).sum(axis=0))
|
||||
|
||||
nontrivial_inside = winding_number != 0 # filter nontrivial points based on winding number
|
||||
if include_boundary:
|
||||
nontrivial_inside[(is_left == 0).any(axis=0)] = True # check if point lies on any edge
|
||||
|
||||
inside = nontrivial.copy()
|
||||
inside[nontrivial] = nontrivial_inside
|
||||
return inside
|
||||
50
snarl/tracker.py
Normal file
50
snarl/tracker.py
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
from typing import List, Set
|
||||
from collections import defaultdict
|
||||
|
||||
from .types import layer_t, net_name_t, contour_t
|
||||
|
||||
|
||||
class NetsInfo:
|
||||
nets: defaultdict[net_name_t, defaultdict[layer_t, List]]
|
||||
net_aliases: defaultdict[net_name_t, net_name_t]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.nets = defaultdict(lambda: defaultdict(list))
|
||||
self.net_aliases = defaultdict(list)
|
||||
|
||||
def resolve_name(self, net_name: net_name_t) -> net_name_t:
|
||||
while net_name in self.net_aliases:
|
||||
net_name = self.net_aliases[net_name]
|
||||
return net_name
|
||||
|
||||
def merge(self, net_a: net_name_t, net_b: net_name_t) -> None:
|
||||
net_a = self.resolve_name(net_a)
|
||||
net_b = self.resolve_name(net_b)
|
||||
|
||||
# Always keep named nets if the other is anonymous
|
||||
if not isinstance(net_a, str) and isinstance(net_b, str):
|
||||
keep_net, old_net = net_b, net_a
|
||||
else:
|
||||
keep_net, old_net = net_a, net_b
|
||||
|
||||
#logger.info(f'merging {old_net} into {keep_net}')
|
||||
self.net_aliases[old_net] = keep_net
|
||||
if old_net in self.nets:
|
||||
for layer in self.nets[old_net]:
|
||||
self.nets[keep_net][layer] += self.nets[old_net][layer]
|
||||
del self.nets[old_net]
|
||||
|
||||
def get(self, net: net_name_t, layer: layer_t) -> List[contour_t]:
|
||||
return self.nets[self.resolve_name(net)][layer]
|
||||
|
||||
def get_shorted_nets(self) -> List[Set[str]]:
|
||||
shorts = defaultdict(list)
|
||||
for kk in self.net_aliases:
|
||||
if isinstance(kk, str):
|
||||
base_name = self.resolve_name(kk)
|
||||
assert(isinstance(base_name, str))
|
||||
shorts[base_name].append(kk)
|
||||
|
||||
shorted_sets = [set([kk] + others)
|
||||
for kk, others in shorts.items()]
|
||||
return shorted_sets
|
||||
6
snarl/types.py
Normal file
6
snarl/types.py
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
from typing import Union, Tuple, List, Sequence, Optional
|
||||
|
||||
layer_t = Tuple[int, int]
|
||||
contour_t = List[Tuple[int, int]]
|
||||
net_name_t = Union[str, object]
|
||||
connectivity_t = Sequence[Tuple[layer_t, Optional[layer_t], layer_t]]
|
||||
19
snarl/utils.py
Normal file
19
snarl/utils.py
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
from typing import Set, Tuple
|
||||
|
||||
from .types import connectivity_t, layer_t
|
||||
|
||||
|
||||
def connectivity2layers(
|
||||
connectivity: connectivity_t,
|
||||
) -> Tuple[Set[layer_t], Set[layer_t]]:
|
||||
metal_layers = set()
|
||||
via_layers = set()
|
||||
for top, via, bot in connectivity:
|
||||
metal_layers.add(top)
|
||||
metal_layers.add(bot)
|
||||
if via is not None:
|
||||
via_layers.add(via)
|
||||
|
||||
# TODO verify no overlap between metal and via layer specifications
|
||||
|
||||
return metal_layers, via_layers
|
||||
Loading…
Add table
Add a link
Reference in a new issue