Compare commits

...

3 commits

38 changed files with 2623 additions and 1905 deletions

View file

@ -19,6 +19,7 @@ The `AStarContext` stores the configuration and persistent state for the A* sear
| `sbend_penalty` | `float` | 500.0 | Flat cost added for every S-bend. |
| `bend_collision_type` | `str` | `"arc"` | Collision model for bends: `"arc"`, `"bbox"`, or `"clipped_bbox"`. |
| `bend_clip_margin` | `float` | 10.0 | Extra space (µm) around the waveguide for clipped models. |
| `visibility_guidance` | `str` | `"tangent_corner"` | Visibility-driven straight candidate mode: `"off"`, `"exact_corner"`, or `"tangent_corner"`. |
## 2. AStarMetrics
@ -86,6 +87,12 @@ If the router produces many small bends instead of a long straight line:
2. Ensure `straight_lengths` includes larger values like `25.0` or `100.0`.
3. Decrease `greedy_h_weight` closer to `1.0`.
### Visibility Guidance
The router can bias straight stop points using static obstacle corners.
- **`"tangent_corner"`**: Default. Proposes straight lengths that set up a clean tangent bend around nearby visible corners. This helps obstacle-dense layouts more than open space.
- **`"exact_corner"`**: Only uses precomputed corner-to-corner visibility when the current search state already lands on an obstacle corner.
- **`"off"`**: Disables visibility-derived straight candidates entirely.
### Handling Congestion
In multi-net designs, if nets are overlapping:
1. Increase `congestion_penalty` in `CostEvaluator`.

View file

@ -23,7 +23,7 @@ def main() -> None:
# 2. Configure Router
evaluator = CostEvaluator(engine, danger_map)
context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0])
context = AStarContext(evaluator, bend_radii=[10.0])
metrics = AStarMetrics()
pf = PathFinder(context, metrics)

View file

@ -17,8 +17,8 @@ def main() -> None:
danger_map.precompute([])
# Configure a router with high congestion penalties
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, bend_penalty=50.0, sbend_penalty=150.0)
context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], sbend_radii=[10.0])
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, bend_penalty=250.0, sbend_penalty=500.0)
context = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[10.0])
metrics = AStarMetrics()
pf = PathFinder(context, metrics, base_congestion_penalty=1000.0)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 67 KiB

After

Width:  |  Height:  |  Size: 72 KiB

Before After
Before After

View file

@ -16,8 +16,8 @@ def main() -> None:
danger_map = DangerMap(bounds=bounds)
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map)
context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=250.0, sbend_penalty=500.0)
context = AStarContext(evaluator, bend_radii=[10.0])
metrics = AStarMetrics()
pf = PathFinder(context, metrics)

View file

@ -28,7 +28,6 @@ def main() -> None:
context = AStarContext(
evaluator,
node_limit=50000,
snap_size=1.0,
bend_radii=[10.0, 30.0],
sbend_offsets=[5.0], # Use a simpler offset
bend_penalty=10.0,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 101 KiB

After

Width:  |  Height:  |  Size: 92 KiB

Before After
Before After

View file

@ -17,7 +17,7 @@ def main() -> None:
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0)
context = AStarContext(evaluator, snap_size=5.0, bend_radii=[20.0])
context = AStarContext(evaluator, bend_radii=[20.0])
metrics = AStarMetrics()
pf = PathFinder(context, metrics)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 80 KiB

After

Width:  |  Height:  |  Size: 84 KiB

Before After
Before After

View file

@ -33,15 +33,15 @@ def main() -> None:
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
# Scenario 1: Standard 'arc' model (High fidelity)
context_arc = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type="arc")
context_arc = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="arc")
netlist_arc = {"arc_model": (Port(10, 120, 0), Port(90, 140, 90))}
# Scenario 2: 'bbox' model (Conservative axis-aligned box)
context_bbox = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type="bbox")
context_bbox = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="bbox")
netlist_bbox = {"bbox_model": (Port(10, 70, 0), Port(90, 90, 90))}
# Scenario 3: 'clipped_bbox' model (Balanced)
context_clipped = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type="clipped_bbox", bend_clip_margin=1.0)
context_clipped = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="clipped_bbox", bend_clip_margin=1.0)
netlist_clipped = {"clipped_model": (Port(10, 20, 0), Port(90, 40, 90))}
# 2. Route each scenario

View file

@ -10,7 +10,7 @@ from inire.utils.visualization import plot_routing_results, plot_danger_map, plo
from shapely.geometry import box
def main() -> None:
print("Running Example 07: Fan-Out (10 Nets, 50um Radius, 5um Grid)...")
print("Running Example 07: Fan-Out (10 Nets, 50um Radius)...")
# 1. Setup Environment
bounds = (0, 0, 1000, 1000)
@ -29,7 +29,7 @@ def main() -> None:
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, unit_length_cost=0.1, bend_penalty=100.0, sbend_penalty=400.0, congestion_penalty=100.0)
context = AStarContext(evaluator, node_limit=2000000, snap_size=5.0, bend_radii=[50.0], sbend_radii=[50.0])
context = AStarContext(evaluator, node_limit=2000000, bend_radii=[50.0], sbend_radii=[50.0])
metrics = AStarMetrics()
pf = PathFinder(context, metrics, max_iterations=15, base_congestion_penalty=100.0, congestion_multiplier=1.4)
@ -44,8 +44,8 @@ def main() -> None:
end_y_pitch = 800.0 / (num_nets - 1)
for i in range(num_nets):
sy = round((start_y_base + i * 10.0) / 5.0) * 5.0
ey = round((end_y_base + i * end_y_pitch) / 5.0) * 5.0
sy = int(round(start_y_base + i * 10.0))
ey = int(round(end_y_base + i * end_y_pitch))
netlist[f"net_{i:02d}"] = (Port(start_x, sy, 0), Port(end_x, ey, 0))
net_widths = {nid: 2.0 for nid in netlist}
@ -60,39 +60,8 @@ def main() -> None:
total_collisions = sum(r.collisions for r in current_results.values())
total_nodes = metrics.nodes_expanded
# Identify Hotspots
hotspots = {}
overlap_matrix = {} # (net_a, net_b) -> count
for nid, res in current_results.items():
if not res.path:
continue
for comp in res.path:
for poly in comp.geometry:
# Check what it overlaps with
overlaps = engine.dynamic_index.intersection(poly.bounds)
for other_obj_id in overlaps:
if other_obj_id in engine.dynamic_geometries:
other_nid, other_poly = engine.dynamic_geometries[other_obj_id]
if other_nid != nid:
if poly.intersects(other_poly):
# Record hotspot
cx, cy = poly.centroid.x, poly.centroid.y
grid_key = (int(cx/20)*20, int(cy/20)*20)
hotspots[grid_key] = hotspots.get(grid_key, 0) + 1
# Record pair
pair = tuple(sorted((nid, other_nid)))
overlap_matrix[pair] = overlap_matrix.get(pair, 0) + 1
print(f" Iteration {idx} finished. Successes: {successes}/{len(netlist)}, Collisions: {total_collisions}")
if overlap_matrix:
top_pairs = sorted(overlap_matrix.items(), key=lambda x: x[1], reverse=True)[:3]
print(f" Top Conflicts: {top_pairs}")
if hotspots:
top_hotspots = sorted(hotspots.items(), key=lambda x: x[1], reverse=True)[:3]
print(f" Top Hotspots: {top_hotspots}")
# Adaptive Greediness: Decay from 1.5 to 1.1 over 10 iterations
new_greedy = max(1.1, 1.5 - ((idx + 1) / 10.0) * 0.4)
evaluator.greedy_h_weight = new_greedy
@ -104,46 +73,12 @@ def main() -> None:
'Congestion': total_collisions,
'Nodes': total_nodes
})
# Save plots only for certain iterations to save time
# if idx % 20 == 0 or idx == pf.max_iterations - 1:
if True:
# Save a plot of this iteration's result
fig, ax = plot_routing_results(current_results, obstacles, bounds, netlist=netlist)
plot_danger_map(danger_map, ax=ax)
# Overlay failures: show where they stopped
for nid, res in current_results.items():
if not res.is_valid and res.path:
last_p = res.path[-1].end_port
target_p = netlist[nid][1]
dist = abs(last_p.x - target_p.x) + abs(last_p.y - target_p.y)
ax.scatter(last_p.x, last_p.y, color='red', marker='x', s=100)
ax.text(last_p.x, last_p.y, f" {nid} (rem: {dist:.0f}um)", color='red', fontsize=8)
fig.savefig(f"examples/07_iteration_{idx:02d}.png")
import matplotlib.pyplot as plt
plt.close(fig)
# Plot Expansion Density if data is available
if pf.accumulated_expanded_nodes:
fig_d, ax_d = plot_expansion_density(pf.accumulated_expanded_nodes, bounds)
fig_d.savefig(f"examples/07_iteration_{idx:02d}_density.png")
plt.close(fig_d)
metrics.reset_per_route()
import cProfile, pstats
profiler = cProfile.Profile()
profiler.enable()
t0 = time.perf_counter()
results = pf.route_all(netlist, net_widths, store_expanded=True, iteration_callback=iteration_callback, shuffle_nets=True, seed=42)
t1 = time.perf_counter()
profiler.disable()
# Final stats
stats = pstats.Stats(profiler).sort_stats('tottime')
stats.print_stats(20)
print(f"Routing took {t1-t0:.4f}s")
# 4. Check Results
@ -157,28 +92,15 @@ def main() -> None:
print(f"\nFinal: Routed {success_count}/{len(netlist)} nets successfully.")
for nid, res in results.items():
target_p = netlist[nid][1]
if not res.is_valid:
last_p = res.path[-1].end_port if res.path else netlist[nid][0]
dist = abs(last_p.x - target_p.x) + abs(last_p.y - target_p.y)
print(f" FAILED: {nid} (Stopped {dist:.1f}um from target)")
print(f" FAILED: {nid}, collisions={res.collisions}")
else:
types = [move.move_type for move in res.path]
from collections import Counter
counts = Counter(types)
print(f" {nid}: {len(res.path)} segments, {dict(counts)}")
print(f" {nid}: SUCCESS")
# 5. Visualize
fig, ax = plot_routing_results(results, obstacles, bounds, netlist=netlist)
# Overlay Danger Map
plot_danger_map(danger_map, ax=ax)
# Overlay Expanded Nodes from last routed net (as an example)
if metrics.last_expanded_nodes:
print(f"Plotting {len(metrics.last_expanded_nodes)} expanded nodes for the last net...")
plot_expanded_nodes(metrics.last_expanded_nodes, ax=ax, color='blue', alpha=0.1)
fig.savefig("examples/07_large_scale_routing.png")
print("Saved plot to examples/07_large_scale_routing.png")

Binary file not shown.

Before

Width:  |  Height:  |  Size: 90 KiB

After

Width:  |  Height:  |  Size: 75 KiB

Before After
Before After

View file

@ -19,7 +19,7 @@ def main() -> None:
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], sbend_radii=[])
context = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[])
metrics = AStarMetrics()
pf = PathFinder(context, metrics)
@ -40,7 +40,7 @@ def main() -> None:
print("Routing with custom collision model...")
# Override bend_collision_type with a literal Polygon
context_custom = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type=custom_poly, sbend_radii=[])
context_custom = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type=custom_poly, sbend_radii=[])
metrics_custom = AStarMetrics()
results_custom = PathFinder(context_custom, metrics_custom, use_tiered_strategy=False).route_all(
{"custom_model": netlist["custom_bend"]}, {"custom_model": 2.0}

View file

@ -8,51 +8,49 @@ from inire.utils.visualization import plot_routing_results
from shapely.geometry import box
def main() -> None:
print("Running Example 09: Best-Effort (Unroutable Net)...")
print("Running Example 09: Best-Effort Under Tight Search Budget...")
# 1. Setup Environment
bounds = (0, 0, 100, 100)
engine = CollisionEngine(clearance=2.0)
# Create a 'cage' that completely blocks the target
cage = [
box(70, 30, 75, 70), # Left wall
box(70, 70, 95, 75), # Top wall
box(70, 25, 95, 30), # Bottom wall
# A small obstacle cluster keeps the partial route visually interesting.
obstacles = [
box(35, 35, 45, 65),
box(55, 35, 65, 65),
]
for obs in cage:
for obs in obstacles:
engine.add_static_obstacle(obs)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute(cage)
danger_map.precompute(obstacles)
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
# Use a low node limit to fail faster
context = AStarContext(evaluator, node_limit=2000, snap_size=1.0, bend_radii=[10.0])
# Keep the search budget intentionally tiny so the router returns a partial path.
context = AStarContext(evaluator, node_limit=3, bend_radii=[10.0])
metrics = AStarMetrics()
# Enable partial path return (handled internally by PathFinder calling route_astar with return_partial=True)
pf = PathFinder(context, metrics)
# 2. Define Netlist: start outside, target inside the cage
pf = PathFinder(context, metrics, warm_start=None)
# 2. Define Netlist: reaching the target requires additional turns the search budget cannot afford.
netlist = {
"trapped_net": (Port(10, 50, 0), Port(85, 50, 0)),
"budget_limited_net": (Port(10, 50, 0), Port(85, 60, 180)),
}
net_widths = {"trapped_net": 2.0}
net_widths = {"budget_limited_net": 2.0}
# 3. Route
print("Routing net into a cage (should fail and return partial)...")
print("Routing with a deliberately tiny node budget (should return a partial path)...")
results = pf.route_all(netlist, net_widths)
# 4. Check Results
res = results["trapped_net"]
if not res.is_valid:
print(f"Net failed to route as expected. Partial path length: {len(res.path)} segments.")
res = results["budget_limited_net"]
if not res.reached_target:
print(f"Target not reached as expected. Partial path length: {len(res.path)} segments.")
else:
print("Wait, it found a way in? Check the cage geometry!")
print("The route unexpectedly reached the target. Increase difficulty or reduce the node budget further.")
# 5. Visualize
fig, ax = plot_routing_results(results, cage, bounds, netlist=netlist)
fig, ax = plot_routing_results(results, obstacles, bounds, netlist=netlist)
fig.savefig("examples/09_unroutable_best_effort.png")
print("Saved plot to examples/09_unroutable_best_effort.png")

View file

@ -23,12 +23,13 @@ class CollisionEngine:
'clearance', 'max_net_width', 'safety_zone_radius',
'static_index', 'static_geometries', 'static_dilated', 'static_prepared',
'static_is_rect', 'static_tree', 'static_obj_ids', 'static_safe_cache',
'static_grid', 'grid_cell_size', '_static_id_counter',
'static_grid', 'grid_cell_size', '_static_id_counter', '_net_specific_trees',
'_net_specific_is_rect', '_net_specific_bounds',
'dynamic_index', 'dynamic_geometries', 'dynamic_dilated', 'dynamic_prepared',
'dynamic_tree', 'dynamic_obj_ids', 'dynamic_grid', '_dynamic_id_counter',
'metrics', '_dynamic_tree_dirty', '_dynamic_net_ids_array', '_inv_grid_cell_size',
'_static_bounds_array', '_static_is_rect_array', '_locked_nets',
'_static_raw_tree', '_static_raw_obj_ids', '_dynamic_bounds_array'
'_static_raw_tree', '_static_raw_obj_ids', '_dynamic_bounds_array', '_static_version'
)
def __init__(
@ -53,6 +54,10 @@ class CollisionEngine:
self._static_is_rect_array: numpy.ndarray | None = None
self._static_raw_tree: STRtree | None = None
self._static_raw_obj_ids: list[int] = []
self._net_specific_trees: dict[tuple[float, float], STRtree] = {}
self._net_specific_is_rect: dict[tuple[float, float], numpy.ndarray] = {}
self._net_specific_bounds: dict[tuple[float, float], numpy.ndarray] = {}
self._static_version = 0
self.static_safe_cache: set[tuple] = set()
self.static_grid: dict[tuple[int, int], list[int]] = {}
@ -96,22 +101,21 @@ class CollisionEngine:
f" Congestion: {m['congestion_tree_queries']} checks\n"
f" Safety Zone: {m['safety_zone_checks']} full intersections performed")
def add_static_obstacle(self, polygon: Polygon) -> int:
def add_static_obstacle(self, polygon: Polygon, dilated_geometry: Polygon | None = None) -> int:
obj_id = self._static_id_counter
self._static_id_counter += 1
# Consistent with Wi/2 + C/2 separation:
# Buffer static obstacles by half clearance.
# Checkers must also buffer waveguide by Wi/2 + C/2.
dilated = polygon.buffer(self.clearance / 2.0, join_style=2)
# Preserve existing dilation if provided, else use default C/2
if dilated_geometry is not None:
dilated = dilated_geometry
else:
dilated = polygon.buffer(self.clearance / 2.0, join_style=2)
self.static_geometries[obj_id] = polygon
self.static_dilated[obj_id] = dilated
self.static_prepared[obj_id] = prep(dilated)
self.static_index.insert(obj_id, dilated.bounds)
self.static_tree = None
self._static_raw_tree = None
self.static_grid = {}
self._invalidate_static_caches()
b = dilated.bounds
area = (b[2] - b[0]) * (b[3] - b[1])
self.static_is_rect[obj_id] = (abs(dilated.area - area) < 1e-4)
@ -131,10 +135,21 @@ class CollisionEngine:
del self.static_dilated[obj_id]
del self.static_prepared[obj_id]
del self.static_is_rect[obj_id]
self._invalidate_static_caches()
def _invalidate_static_caches(self) -> None:
self.static_tree = None
self._static_bounds_array = None
self._static_is_rect_array = None
self.static_obj_ids = []
self._static_raw_tree = None
self._static_raw_obj_ids = []
self.static_grid = {}
self._net_specific_trees.clear()
self._net_specific_is_rect.clear()
self._net_specific_bounds.clear()
self.static_safe_cache.clear()
self._static_version += 1
def _ensure_static_tree(self) -> None:
if self.static_tree is None and self.static_dilated:
@ -144,6 +159,37 @@ class CollisionEngine:
self._static_bounds_array = numpy.array([g.bounds for g in geoms])
self._static_is_rect_array = numpy.array([self.static_is_rect[i] for i in self.static_obj_ids])
def _ensure_net_static_tree(self, net_width: float) -> STRtree:
"""
Lazily generate a tree where obstacles are dilated by (net_width/2 + clearance).
"""
key = (round(net_width, 4), round(self.clearance, 4))
if key in self._net_specific_trees:
return self._net_specific_trees[key]
# Physical separation must be >= clearance.
# Centerline to raw obstacle edge must be >= net_width/2 + clearance.
total_dilation = net_width / 2.0 + self.clearance
geoms = []
is_rect_list = []
bounds_list = []
for obj_id in sorted(self.static_geometries.keys()):
poly = self.static_geometries[obj_id]
dilated = poly.buffer(total_dilation, join_style=2)
geoms.append(dilated)
b = dilated.bounds
bounds_list.append(b)
area = (b[2] - b[0]) * (b[3] - b[1])
is_rect_list.append(abs(dilated.area - area) < 1e-4)
tree = STRtree(geoms)
self._net_specific_trees[key] = tree
self._net_specific_is_rect[key] = numpy.array(is_rect_list, dtype=bool)
self._net_specific_bounds[key] = numpy.array(bounds_list)
return tree
def _ensure_static_raw_tree(self) -> None:
if self._static_raw_tree is None and self.static_geometries:
self._static_raw_obj_ids = sorted(self.static_geometries.keys())
@ -205,7 +251,9 @@ class CollisionEngine:
to_move = [obj_id for obj_id, (nid, _) in self.dynamic_geometries.items() if nid == net_id]
for obj_id in to_move:
poly = self.dynamic_geometries[obj_id][1]
self.add_static_obstacle(poly)
dilated = self.dynamic_dilated[obj_id]
# Preserve dilation for perfect consistency
self.add_static_obstacle(poly, dilated_geometry=dilated)
# Remove from dynamic index (without triggering the locked-net guard)
self.dynamic_tree = None
@ -219,9 +267,9 @@ class CollisionEngine:
def unlock_net(self, net_id: str) -> None:
self._locked_nets.discard(net_id)
def check_move_straight_static(self, start_port: Port, length: float) -> bool:
def check_move_straight_static(self, start_port: Port, length: float, net_width: float) -> bool:
self.metrics['static_straight_fast'] += 1
reach = self.ray_cast(start_port, start_port.orientation, max_dist=length + 0.01)
reach = self.ray_cast(start_port, start_port.orientation, max_dist=length + 0.01, net_width=net_width)
return reach < length - 0.001
def _is_in_safety_zone_fast(self, idx: int, start_port: Port | None, end_port: Port | None) -> bool:
@ -236,19 +284,19 @@ class CollisionEngine:
b[1]-sz <= end_port.y <= b[3]+sz): return True
return False
def check_move_static(self, result: ComponentResult, start_port: Port | None = None, end_port: Port | None = None) -> bool:
def check_move_static(self, result: ComponentResult, start_port: Port | None = None, end_port: Port | None = None, net_width: float | None = None) -> bool:
if not self.static_dilated: return False
self.metrics['static_tree_queries'] += 1
self._ensure_static_tree()
# 1. Fast total bounds check
tb = result.total_bounds
# 1. Fast total bounds check (Use dilated bounds to ensure clearance is caught)
tb = result.total_dilated_bounds if result.total_dilated_bounds else result.total_bounds
hits = self.static_tree.query(box(*tb))
if hits.size == 0: return False
# 2. Per-hit check
s_bounds = self._static_bounds_array
move_poly_bounds = result.bounds
move_poly_bounds = result.dilated_bounds if result.dilated_bounds else result.bounds
for hit_idx in hits:
obs_b = s_bounds[hit_idx]
@ -266,9 +314,6 @@ class CollisionEngine:
if self._is_in_safety_zone_fast(hit_idx, start_port, end_port):
# If near port, we must use the high-precision check
obj_id = self.static_obj_ids[hit_idx]
# Triggers lazy evaluation of geometry only if needed
poly_move = result.geometry[0] # Simplification: assume 1 poly for now or loop
# Actually, better loop over move polygons for high-fidelity
collision_found = False
for p_move in result.geometry:
if not self._is_in_safety_zone(p_move, obj_id, start_port, end_port):
@ -277,13 +322,14 @@ class CollisionEngine:
return True
# Not in safety zone and AABBs overlap - check real intersection
# This is the most common path for real collisions or near misses
obj_id = self.static_obj_ids[hit_idx]
raw_obstacle = self.static_geometries[obj_id]
# Use dilated geometry (Wi/2 + C/2) against static_dilated (C/2) to get Wi/2 + C.
# Touching means gap is exactly C. Intersection without touches means gap < C.
test_geoms = result.dilated_geometry if result.dilated_geometry else result.geometry
static_obs_dilated = self.static_dilated[obj_id]
for i, p_test in enumerate(test_geoms):
if p_test.intersects(raw_obstacle):
if p_test.intersects(static_obs_dilated) and not p_test.touches(static_obs_dilated):
return True
return False
@ -339,11 +385,11 @@ class CollisionEngine:
possible_total = (tb[0] < d_bounds[:, 2]) & (tb[2] > d_bounds[:, 0]) & \
(tb[1] < d_bounds[:, 3]) & (tb[3] > d_bounds[:, 1])
valid_hits = (self._dynamic_net_ids_array != net_id)
if not numpy.any(possible_total & valid_hits):
valid_hits_mask = (self._dynamic_net_ids_array != net_id)
if not numpy.any(possible_total & valid_hits_mask):
return 0
# 2. Per-polygon AABB check using query on geometries (LAZY triggering)
# 2. Per-polygon check using query
geoms_to_test = result.dilated_geometry if result.dilated_geometry else result.geometry
res_indices, tree_indices = self.dynamic_tree.query(geoms_to_test, predicate='intersects')
@ -351,8 +397,35 @@ class CollisionEngine:
return 0
hit_net_ids = numpy.take(self._dynamic_net_ids_array, tree_indices)
valid_geoms_hits = (hit_net_ids != net_id)
return int(numpy.sum(valid_geoms_hits))
# Group by other net_id to minimize 'touches' calls
unique_other_nets = numpy.unique(hit_net_ids[hit_net_ids != net_id])
if unique_other_nets.size == 0:
return 0
tree_geoms = self.dynamic_tree.geometries
real_hits_count = 0
for other_nid in unique_other_nets:
other_mask = (hit_net_ids == other_nid)
sub_tree_indices = tree_indices[other_mask]
sub_res_indices = res_indices[other_mask]
# Check if ANY hit for THIS other net is a real collision
found_real = False
for j in range(len(sub_tree_indices)):
p_test = geoms_to_test[sub_res_indices[j]]
p_tree = tree_geoms[sub_tree_indices[j]]
if not p_test.touches(p_tree):
# Add small area tolerance for numerical precision
if p_test.intersection(p_tree).area > 1e-7:
found_real = True
break
if found_real:
real_hits_count += 1
return real_hits_count
def _is_in_safety_zone(self, geometry: Polygon, obj_id: int, start_port: Port | None, end_port: Port | None) -> bool:
"""
@ -392,17 +465,21 @@ class CollisionEngine:
self._ensure_static_tree()
if self.static_tree is None: return False
# Separation needed: (Wi + C)/2.
# static_dilated is buffered by C/2.
# So we need geometry buffered by Wi/2.
if dilated_geometry:
# Separation needed: Centerline-to-WallEdge >= Wi/2 + C.
# static_tree has obstacles buffered by C/2.
# geometry is physical waveguide (Wi/2 from centerline).
# So we buffer geometry by C/2 to get Wi/2 + C/2.
# Intersection means separation < (Wi/2 + C/2) + C/2 = Wi/2 + C.
if dilated_geometry is not None:
test_geom = dilated_geometry
else:
dist = (net_width / 2.0) if net_width is not None else 0.0
test_geom = geometry.buffer(dist + 1e-7, join_style=2) if dist >= 0 else geometry
dist = self.clearance / 2.0
test_geom = geometry.buffer(dist + 1e-7, join_style=2) if dist > 0 else geometry
hits = self.static_tree.query(test_geom, predicate='intersects')
tree_geoms = self.static_tree.geometries
for hit_idx in hits:
if test_geom.touches(tree_geoms[hit_idx]): continue
obj_id = self.static_obj_ids[hit_idx]
if self._is_in_safety_zone(geometry, obj_id, start_port, end_port): continue
return True
@ -412,60 +489,166 @@ class CollisionEngine:
if self.dynamic_tree is None: return 0
test_poly = dilated_geometry if dilated_geometry else geometry.buffer(self.clearance / 2.0)
hits = self.dynamic_tree.query(test_poly, predicate='intersects')
count = 0
tree_geoms = self.dynamic_tree.geometries
hit_net_ids = []
for hit_idx in hits:
if test_poly.touches(tree_geoms[hit_idx]): continue
obj_id = self.dynamic_obj_ids[hit_idx]
if self.dynamic_geometries[obj_id][0] != net_id: count += 1
return count
other_id = self.dynamic_geometries[obj_id][0]
if other_id != net_id:
hit_net_ids.append(other_id)
return len(numpy.unique(hit_net_ids)) if hit_net_ids else 0
def is_collision(self, geometry: Polygon, net_id: str = 'default', net_width: float | None = None, start_port: Port | None = None, end_port: Port | None = None) -> bool:
""" Unified entry point for static collision checks. """
result = self.check_collision(geometry, net_id, buffer_mode='static', start_port=start_port, end_port=end_port, net_width=net_width)
return bool(result)
def ray_cast(self, origin: Port, angle_deg: float, max_dist: float = 2000.0) -> float:
def verify_path(self, net_id: str, components: list[ComponentResult]) -> tuple[bool, int]:
"""
Non-approximated, full-polygon intersection check of a path against all
static obstacles and other nets.
"""
collision_count = 0
# 1. Check against static obstacles
self._ensure_static_raw_tree()
if self._static_raw_tree is not None:
raw_geoms = self._static_raw_tree.geometries
for comp in components:
# Use ACTUAL geometry, not dilated/proxy
actual_geoms = comp.actual_geometry if comp.actual_geometry is not None else comp.geometry
for p_actual in actual_geoms:
# Physical separation must be >= clearance.
p_verify = p_actual.buffer(self.clearance, join_style=2)
hits = self._static_raw_tree.query(p_verify, predicate='intersects')
for hit_idx in hits:
p_obs = raw_geoms[hit_idx]
# If they ONLY touch, gap is exactly clearance. Valid.
if p_verify.touches(p_obs): continue
obj_id = self._static_raw_obj_ids[hit_idx]
if not self._is_in_safety_zone(p_actual, obj_id, None, None):
collision_count += 1
# 2. Check against other nets
self._ensure_dynamic_tree()
if self.dynamic_tree is not None:
tree_geoms = self.dynamic_tree.geometries
for comp in components:
# Robust fallback chain to ensure crossings are caught even with zero clearance
d_geoms = comp.dilated_actual_geometry or comp.dilated_geometry or comp.actual_geometry or comp.geometry
if not d_geoms: continue
# Ensure d_geoms is a list/array for STRtree.query
if not isinstance(d_geoms, (list, tuple, numpy.ndarray)):
d_geoms = [d_geoms]
res_indices, tree_indices = self.dynamic_tree.query(d_geoms, predicate='intersects')
if tree_indices.size > 0:
hit_net_ids = numpy.take(self._dynamic_net_ids_array, tree_indices)
net_id_str = str(net_id)
comp_hits = []
for i in range(len(tree_indices)):
if hit_net_ids[i] == net_id_str: continue
p_new = d_geoms[res_indices[i]]
p_tree = tree_geoms[tree_indices[i]]
if not p_new.touches(p_tree):
# Numerical tolerance for area overlap
if p_new.intersection(p_tree).area > 1e-7:
comp_hits.append(hit_net_ids[i])
if comp_hits:
collision_count += len(numpy.unique(comp_hits))
return (collision_count == 0), collision_count
def ray_cast(self, origin: Port, angle_deg: float, max_dist: float = 2000.0, net_width: float | None = None) -> float:
rad = numpy.radians(angle_deg)
cos_v, sin_v = numpy.cos(rad), numpy.sin(rad)
dx, dy = max_dist * cos_v, max_dist * sin_v
min_x, max_x = sorted([origin.x, origin.x + dx])
min_y, max_y = sorted([origin.y, origin.y + dy])
self._ensure_static_tree()
if self.static_tree is None: return max_dist
candidates = self.static_tree.query(box(min_x, min_y, max_x, max_y))
key = None
if net_width is not None:
tree = self._ensure_net_static_tree(net_width)
key = (round(net_width, 4), round(self.clearance, 4))
is_rect_arr = self._net_specific_is_rect[key]
bounds_arr = self._net_specific_bounds[key]
else:
self._ensure_static_tree()
tree = self.static_tree
is_rect_arr = self._static_is_rect_array
bounds_arr = self._static_bounds_array
if tree is None: return max_dist
candidates = tree.query(box(min_x, min_y, max_x, max_y))
if candidates.size == 0: return max_dist
min_dist = max_dist
inv_dx = 1.0 / dx if abs(dx) > 1e-12 else 1e30
inv_dy = 1.0 / dy if abs(dy) > 1e-12 else 1e30
b_arr = self._static_bounds_array[candidates]
dist_sq = (b_arr[:, 0] - origin.x)**2 + (b_arr[:, 1] - origin.y)**2
sorted_indices = numpy.argsort(dist_sq)
tree_geoms = tree.geometries
ray_line = None
for i in sorted_indices:
c = candidates[i]; b = self._static_bounds_array[c]
if abs(dx) < 1e-12:
# Fast AABB-based pre-sort
candidates_bounds = bounds_arr[candidates]
# Distance to AABB min corner as heuristic
dist_sq = (candidates_bounds[:, 0] - origin.x)**2 + (candidates_bounds[:, 1] - origin.y)**2
sorted_indices = numpy.argsort(dist_sq)
for idx in sorted_indices:
c = candidates[idx]
b = bounds_arr[c]
# Fast axis-aligned ray-AABB intersection
# (Standard Slab method)
if abs(dx) < 1e-12: # Vertical ray
if origin.x < b[0] or origin.x > b[2]: tx_min, tx_max = 1e30, -1e30
else: tx_min, tx_max = -1e30, 1e30
else:
t1, t2 = (b[0] - origin.x) * inv_dx, (b[2] - origin.x) * inv_dx
tx_min, tx_max = min(t1, t2), max(t1, t2)
if abs(dy) < 1e-12:
if abs(dy) < 1e-12: # Horizontal ray
if origin.y < b[1] or origin.y > b[3]: ty_min, ty_max = 1e30, -1e30
else: ty_min, ty_max = -1e30, 1e30
else:
t1, t2 = (b[1] - origin.y) * inv_dy, (b[3] - origin.y) * inv_dy
ty_min, ty_max = min(t1, t2), max(t1, t2)
t_min, t_max = max(tx_min, ty_min), min(tx_max, ty_max)
if t_max < 0 or t_min > t_max or t_min > 1.0 or t_min >= min_dist / max_dist: continue
if self._static_is_rect_array[c]:
min_dist = max(0.0, t_min * max_dist); continue
if ray_line is None: ray_line = LineString([(origin.x, origin.y), (origin.x + dx, origin.y + dy)])
obj_id = self.static_obj_ids[c]
if self.static_prepared[obj_id].intersects(ray_line):
intersection = ray_line.intersection(self.static_dilated[obj_id])
# Intersection conditions
if t_max < 0 or t_min > t_max or t_min > 1.0: continue
# If hit is further than current min_dist, skip
if t_min * max_dist >= min_dist: continue
# HIGH PRECISION CHECK
if is_rect_arr[c]:
# Rectangles are perfectly described by their AABB
min_dist = max(0.0, t_min * max_dist)
continue
# Fallback to full geometry check for non-rectangles (arcs, etc.)
if ray_line is None:
ray_line = LineString([(origin.x, origin.y), (origin.x + dx, origin.y + dy)])
obs_dilated = tree_geoms[c]
if obs_dilated.intersects(ray_line):
intersection = ray_line.intersection(obs_dilated)
if intersection.is_empty: continue
def get_dist(geom):
if hasattr(geom, 'geoms'): return min(get_dist(g) for g in geom.geoms)
return numpy.sqrt((geom.coords[0][0] - origin.x)**2 + (geom.coords[0][1] - origin.y)**2)
d = get_dist(intersection)
if d < min_dist: min_dist = d
return min_dist

View file

@ -1,326 +1,105 @@
from __future__ import annotations
import math
from typing import Literal, cast, Any
from typing import Literal
import numpy
import shapely
from shapely.geometry import Polygon, box, MultiPolygon
from shapely.ops import unary_union
from shapely.affinity import translate
from shapely.affinity import translate as shapely_translate
from shapely.geometry import Polygon, box
from inire.constants import DEFAULT_SEARCH_GRID_SNAP_UM, TOLERANCE_LINEAR, TOLERANCE_ANGULAR
from .primitives import Port
from inire.constants import TOLERANCE_ANGULAR, TOLERANCE_LINEAR
from .primitives import Port, rotation_matrix2
def snap_search_grid(value: float, snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM) -> float:
"""
Snap a coordinate to the nearest search grid unit.
"""
return round(value / snap_size) * snap_size
def _normalize_length(value: float) -> float:
return float(value)
class ComponentResult:
"""
Standard container for generated move geometry and state.
Supports Lazy Evaluation for translation to improve performance.
"""
__slots__ = (
'_geometry', '_dilated_geometry', '_proxy_geometry', '_actual_geometry', '_dilated_actual_geometry',
'end_port', 'length', 'move_type', '_bounds', '_dilated_bounds',
'_total_bounds', '_total_dilated_bounds', '_bounds_cached', '_total_geom_list', '_offsets', '_coords_cache',
'_base_result', '_offset', 'rel_gx', 'rel_gy', 'rel_go'
"geometry",
"dilated_geometry",
"proxy_geometry",
"actual_geometry",
"dilated_actual_geometry",
"end_port",
"length",
"move_type",
"_bounds",
"_total_bounds",
"_dilated_bounds",
"_total_dilated_bounds",
)
def __init__(
self,
geometry: list[Polygon] | None = None,
end_port: Port | None = None,
length: float = 0.0,
dilated_geometry: list[Polygon] | None = None,
proxy_geometry: list[Polygon] | None = None,
actual_geometry: list[Polygon] | None = None,
dilated_actual_geometry: list[Polygon] | None = None,
skip_bounds: bool = False,
move_type: str = 'Unknown',
_total_geom_list: list[Polygon] | None = None,
_offsets: list[int] | None = None,
_coords_cache: numpy.ndarray | None = None,
_base_result: ComponentResult | None = None,
_offset: tuple[float, float] | None = None,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
rel_gx: int | None = None,
rel_gy: int | None = None,
rel_go: int | None = None
) -> None:
self,
geometry: list[Polygon],
end_port: Port,
length: float,
move_type: str,
dilated_geometry: list[Polygon] | None = None,
proxy_geometry: list[Polygon] | None = None,
actual_geometry: list[Polygon] | None = None,
dilated_actual_geometry: list[Polygon] | None = None,
) -> None:
self.geometry = geometry
self.dilated_geometry = dilated_geometry
self.proxy_geometry = proxy_geometry
self.actual_geometry = actual_geometry
self.dilated_actual_geometry = dilated_actual_geometry
self.end_port = end_port
self.length = length
self.length = float(length)
self.move_type = move_type
self._base_result = _base_result
self._offset = _offset
self._bounds_cached = False
if rel_gx is not None:
self.rel_gx = rel_gx
self.rel_gy = rel_gy
self.rel_go = rel_go
elif end_port:
inv_snap = 1.0 / snap_size
self.rel_gx = int(round(end_port.x * inv_snap))
self.rel_gy = int(round(end_port.y * inv_snap))
self.rel_go = int(round(end_port.orientation))
else:
self.rel_gx = 0; self.rel_gy = 0; self.rel_go = 0
if _base_result is not None:
# Lazy Mode
self._geometry = None
self._dilated_geometry = None
self._proxy_geometry = None
self._actual_geometry = None
self._dilated_actual_geometry = None
self._bounds = None
self._bounds = [poly.bounds for poly in self.geometry]
self._total_bounds = _combine_bounds(self._bounds)
if self.dilated_geometry is None:
self._dilated_bounds = None
self._total_bounds = None
self._total_dilated_bounds = None
else:
# Eager Mode (Base Component)
self._geometry = geometry
self._dilated_geometry = dilated_geometry
self._proxy_geometry = proxy_geometry
self._actual_geometry = actual_geometry
self._dilated_actual_geometry = dilated_actual_geometry
# These are mostly legacy/unused but kept for slot safety
self._total_geom_list = _total_geom_list
self._offsets = _offsets
self._coords_cache = _coords_cache
if not skip_bounds and geometry:
# Use plain tuples for bounds to avoid NumPy overhead
self._bounds = [p.bounds for p in geometry]
b0 = self._bounds[0]
minx, miny, maxx, maxy = b0
for i in range(1, len(self._bounds)):
b = self._bounds[i]
if b[0] < minx: minx = b[0]
if b[1] < miny: miny = b[1]
if b[2] > maxx: maxx = b[2]
if b[3] > maxy: maxy = b[3]
self._total_bounds = (minx, miny, maxx, maxy)
if dilated_geometry is not None:
self._dilated_bounds = [p.bounds for p in dilated_geometry]
b0 = self._dilated_bounds[0]
minx, miny, maxx, maxy = b0
for i in range(1, len(self._dilated_bounds)):
b = self._dilated_bounds[i]
if b[0] < minx: minx = b[0]
if b[1] < miny: miny = b[1]
if b[2] > maxx: maxx = b[2]
if b[3] > maxy: maxy = b[3]
self._total_dilated_bounds = (minx, miny, maxx, maxy)
else:
self._dilated_bounds = None
self._total_dilated_bounds = None
else:
self._bounds = None
self._total_bounds = None
self._dilated_bounds = None
self._total_dilated_bounds = None
self._bounds_cached = True
def _ensure_evaluated(self, attr_name: str) -> None:
if self._base_result is None:
return
# Check if specific attribute is already translated
internal_name = f'_{attr_name}'
if getattr(self, internal_name) is not None:
return
# Perform Translation for the specific attribute only
base_geoms = getattr(self._base_result, internal_name)
if base_geoms is None:
return
dx, dy = self._offset
# Use shapely.affinity.translate (imported at top level)
translated_geoms = [translate(p, dx, dy) for p in base_geoms]
setattr(self, internal_name, translated_geoms)
@property
def geometry(self) -> list[Polygon]:
self._ensure_evaluated('geometry')
return self._geometry
@property
def dilated_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('dilated_geometry')
return self._dilated_geometry
@property
def proxy_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('proxy_geometry')
return self._proxy_geometry
@property
def actual_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('actual_geometry')
return self._actual_geometry
@property
def dilated_actual_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('dilated_actual_geometry')
return self._dilated_actual_geometry
self._dilated_bounds = [poly.bounds for poly in self.dilated_geometry]
self._total_dilated_bounds = _combine_bounds(self._dilated_bounds)
@property
def bounds(self) -> list[tuple[float, float, float, float]]:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._bounds
@property
def total_bounds(self) -> tuple[float, float, float, float]:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._total_bounds
@property
def dilated_bounds(self) -> list[tuple[float, float, float, float]] | None:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._dilated_bounds
@property
def total_dilated_bounds(self) -> tuple[float, float, float, float] | None:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._total_dilated_bounds
def _ensure_bounds_evaluated(self) -> None:
if self._bounds_cached: return
base = self._base_result
if base is not None:
dx, dy = self._offset
# Direct tuple creation is much faster than NumPy for single AABBs
if base._bounds is not None:
self._bounds = [(b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy) for b in base._bounds]
if base._total_bounds is not None:
b = base._total_bounds
self._total_bounds = (b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy)
if base._dilated_bounds is not None:
self._dilated_bounds = [(b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy) for b in base._dilated_bounds]
if base._total_dilated_bounds is not None:
b = base._total_dilated_bounds
self._total_dilated_bounds = (b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy)
self._bounds_cached = True
def translate(self, dx: float, dy: float, rel_gx: int | None = None, rel_gy: int | None = None, rel_go: int | None = None) -> ComponentResult:
"""
Create a new ComponentResult translated by (dx, dy).
"""
new_port = Port(self.end_port.x + dx, self.end_port.y + dy, self.end_port.orientation, snap=False)
# LAZY TRANSLATE
if self._base_result:
base = self._base_result
current_offset = self._offset
new_offset = (current_offset[0] + dx, current_offset[1] + dy)
else:
base = self
new_offset = (dx, dy)
def translate(self, dx: int | float, dy: int | float) -> ComponentResult:
return ComponentResult(
end_port=new_port,
geometry=[shapely_translate(poly, dx, dy) for poly in self.geometry],
end_port=self.end_port + [dx, dy, 0],
length=self.length,
move_type=self.move_type,
_base_result=base,
_offset=new_offset,
rel_gx=rel_gx,
rel_gy=rel_gy,
rel_go=rel_go
dilated_geometry=None if self.dilated_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.dilated_geometry],
proxy_geometry=None if self.proxy_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.proxy_geometry],
actual_geometry=None if self.actual_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.actual_geometry],
dilated_actual_geometry=None if self.dilated_actual_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.dilated_actual_geometry],
)
class Straight:
"""
Move generator for straight waveguide segments.
"""
@staticmethod
def generate(
start_port: Port,
length: float,
width: float,
snap_to_grid: bool = True,
dilation: float = 0.0,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
) -> ComponentResult:
"""
Generate a straight waveguide segment.
"""
rad = numpy.radians(start_port.orientation)
cos_val = numpy.cos(rad)
sin_val = numpy.sin(rad)
ex = start_port.x + length * cos_val
ey = start_port.y + length * sin_val
if snap_to_grid:
ex = snap_search_grid(ex, snap_size)
ey = snap_search_grid(ey, snap_size)
end_port = Port(ex, ey, start_port.orientation)
actual_length = numpy.sqrt((end_port.x - start_port.x)**2 + (end_port.y - start_port.y)**2)
# Create polygons using vectorized points
half_w = width / 2.0
pts_raw = numpy.array([
[0, half_w],
[actual_length, half_w],
[actual_length, -half_w],
[0, -half_w]
])
# Rotation matrix (standard 2D rotation)
rot_matrix = numpy.array([[cos_val, -sin_val], [sin_val, cos_val]])
# Transform points: P' = R * P + T
poly_points = (pts_raw @ rot_matrix.T) + [start_port.x, start_port.y]
geom = [Polygon(poly_points)]
dilated_geom = None
if dilation > 0:
# Direct calculation of dilated rectangle instead of expensive buffer()
half_w_dil = half_w + dilation
pts_dil = numpy.array([
[-dilation, half_w_dil],
[actual_length + dilation, half_w_dil],
[actual_length + dilation, -half_w_dil],
[-dilation, -half_w_dil]
])
poly_points_dil = (pts_dil @ rot_matrix.T) + [start_port.x, start_port.y]
dilated_geom = [Polygon(poly_points_dil)]
# Pre-calculate grid indices for faster ComponentResult init
inv_snap = 1.0 / snap_size
rgx = int(round(ex * inv_snap))
rgy = int(round(ey * inv_snap))
rgo = int(round(start_port.orientation))
# For straight segments, geom IS the actual geometry
return ComponentResult(
geometry=geom, end_port=end_port, length=actual_length,
dilated_geometry=dilated_geom, actual_geometry=geom,
dilated_actual_geometry=dilated_geom, move_type='Straight',
snap_size=snap_size, rel_gx=rgx, rel_gy=rgy, rel_go=rgo
)
def _combine_bounds(bounds_list: list[tuple[float, float, float, float]]) -> tuple[float, float, float, float]:
arr = numpy.asarray(bounds_list, dtype=numpy.float64)
return (
float(arr[:, 0].min()),
float(arr[:, 1].min()),
float(arr[:, 2].max()),
float(arr[:, 3].max()),
)
def _get_num_segments(radius: float, angle_deg: float, sagitta: float = 0.01) -> int:
"""
Calculate number of segments for an arc to maintain a maximum sagitta.
"""
if radius <= 0:
return 1
ratio = max(0.0, min(1.0, 1.0 - sagitta / radius))
@ -332,350 +111,223 @@ def _get_num_segments(radius: float, angle_deg: float, sagitta: float = 0.01) ->
def _get_arc_polygons(
cx: float,
cy: float,
radius: float,
width: float,
t_start: float,
t_end: float,
sagitta: float = 0.01,
dilation: float = 0.0,
) -> list[Polygon]:
"""
Helper to generate arc-shaped polygons using vectorized NumPy operations.
"""
num_segments = _get_num_segments(radius, float(numpy.degrees(abs(t_end - t_start))), sagitta)
cxy: tuple[float, float],
radius: float,
width: float,
ts: tuple[float, float],
sagitta: float = 0.01,
dilation: float = 0.0,
) -> list[Polygon]:
t_start, t_end = numpy.radians(ts[0]), numpy.radians(ts[1])
num_segments = _get_num_segments(radius, abs(ts[1] - ts[0]), sagitta)
angles = numpy.linspace(t_start, t_end, num_segments + 1)
cos_a = numpy.cos(angles)
sin_a = numpy.sin(angles)
cx, cy = cxy
inner_radius = radius - width / 2.0 - dilation
outer_radius = radius + width / 2.0 + dilation
inner_points = numpy.stack([cx + inner_radius * cos_a, cy + inner_radius * sin_a], axis=1)
outer_points = numpy.stack([cx + outer_radius * cos_a[::-1], cy + outer_radius * sin_a[::-1]], axis=1)
# Concatenate inner and outer points to form the polygon ring
poly_points = numpy.concatenate([inner_points, outer_points])
return [Polygon(poly_points)]
cos_a = numpy.cos(angles)
sin_a = numpy.sin(angles)
inner_points = numpy.column_stack((cx + inner_radius * cos_a, cy + inner_radius * sin_a))
outer_points = numpy.column_stack((cx + outer_radius * cos_a[::-1], cy + outer_radius * sin_a[::-1]))
return [Polygon(numpy.concatenate((inner_points, outer_points), axis=0))]
def _clip_bbox(
cx: float,
cy: float,
radius: float,
width: float,
t_start: float,
t_end: float,
) -> Polygon:
"""
Generates a rotationally invariant bounding polygon for an arc.
"""
sweep = abs(t_end - t_start)
if sweep > 2 * numpy.pi:
sweep = sweep % (2 * numpy.pi)
mid_angle = (t_start + t_end) / 2.0
# Handle wrap-around for mid_angle
if abs(t_end - t_start) > numpy.pi:
mid_angle += numpy.pi
r_out = radius + width / 2.0
r_in = max(0.0, radius - width / 2.0)
half_sweep = sweep / 2.0
# Define vertices in local space (center at 0,0, symmetry axis along +X)
cos_hs = numpy.cos(half_sweep)
cos_hs2 = numpy.cos(half_sweep / 2.0)
# Distance to peak from center: r_out / cos(hs/2)
peak_r = r_out / cos_hs2
local_verts = [
[r_in * numpy.cos(-half_sweep), r_in * numpy.sin(-half_sweep)],
[r_out * numpy.cos(-half_sweep), r_out * numpy.sin(-half_sweep)],
[peak_r * numpy.cos(-half_sweep/2), peak_r * numpy.sin(-half_sweep/2)],
[peak_r * numpy.cos(half_sweep/2), peak_r * numpy.sin(half_sweep/2)],
[r_out * numpy.cos(half_sweep), r_out * numpy.sin(half_sweep)],
[r_in * numpy.cos(half_sweep), r_in * numpy.sin(half_sweep)],
[r_in, 0.0]
]
# Rotate and translate to world space
cos_m = numpy.cos(mid_angle)
sin_m = numpy.sin(mid_angle)
rot = numpy.array([[cos_m, -sin_m], [sin_m, cos_m]])
world_verts = (numpy.array(local_verts) @ rot.T) + [cx, cy]
return Polygon(world_verts)
def _clip_bbox(cxy: tuple[float, float], radius: float, width: float, ts: tuple[float, float], clip_margin: float) -> Polygon:
arc_poly = _get_arc_polygons(cxy, radius, width, ts)[0]
minx, miny, maxx, maxy = arc_poly.bounds
bbox_poly = box(minx, miny, maxx, maxy)
shrink = min(clip_margin, max(radius, width))
return bbox_poly.buffer(-shrink, join_style=2) if shrink > 0 else bbox_poly
def _apply_collision_model(
arc_poly: Polygon,
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon,
radius: float,
width: float,
cx: float = 0.0,
cy: float = 0.0,
clip_margin: float = 10.0,
t_start: float | None = None,
t_end: float | None = None,
) -> list[Polygon]:
"""
Applies the specified collision model to an arc geometry.
"""
arc_poly: Polygon,
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon,
radius: float,
width: float,
cxy: tuple[float, float],
clip_margin: float,
ts: tuple[float, float],
) -> list[Polygon]:
if isinstance(collision_type, Polygon):
# Translate the custom polygon to the bend center (cx, cy)
return [shapely.transform(collision_type, lambda x: x + [cx, cy])]
return [shapely_translate(collision_type, cxy[0], cxy[1])]
if collision_type == "arc":
return [arc_poly]
if collision_type == "clipped_bbox" and t_start is not None and t_end is not None:
return [_clip_bbox(cx, cy, radius, width, t_start, t_end)]
if collision_type == "clipped_bbox":
clipped = _clip_bbox(cxy, radius, width, ts, clip_margin)
return [clipped if not clipped.is_empty else box(*arc_poly.bounds)]
return [box(*arc_poly.bounds)]
# Bounding box of the high-fidelity arc (fallback for bbox or missing angles)
minx, miny, maxx, maxy = arc_poly.bounds
bbox_poly = box(minx, miny, maxx, maxy)
if collision_type == "bbox":
return [bbox_poly]
return [arc_poly]
class Straight:
@staticmethod
def generate(
start_port: Port,
length: float,
width: float,
dilation: float = 0.0,
) -> ComponentResult:
rot2 = rotation_matrix2(start_port.r)
length_f = _normalize_length(length)
disp = rot2 @ numpy.array((length_f, 0.0))
end_port = Port(start_port.x + disp[0], start_port.y + disp[1], start_port.r)
half_w = width / 2.0
pts = numpy.array(((0.0, half_w), (length_f, half_w), (length_f, -half_w), (0.0, -half_w)))
poly_points = (pts @ rot2.T) + numpy.array((start_port.x, start_port.y))
geometry = [Polygon(poly_points)]
dilated_geometry = None
if dilation > 0:
half_w_d = half_w + dilation
pts_d = numpy.array(((-dilation, half_w_d), (length_f + dilation, half_w_d), (length_f + dilation, -half_w_d), (-dilation, -half_w_d)))
poly_points_d = (pts_d @ rot2.T) + numpy.array((start_port.x, start_port.y))
dilated_geometry = [Polygon(poly_points_d)]
return ComponentResult(
geometry=geometry,
end_port=end_port,
length=abs(length_f),
move_type="Straight",
dilated_geometry=dilated_geometry,
actual_geometry=geometry,
dilated_actual_geometry=dilated_geometry,
)
class Bend90:
"""
Move generator for 90-degree waveguide bends.
"""
@staticmethod
def generate(
start_port: Port,
radius: float,
width: float,
direction: Literal["CW", "CCW"],
sagitta: float = 0.01,
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc",
clip_margin: float = 10.0,
dilation: float = 0.0,
snap_to_grid: bool = True,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
) -> ComponentResult:
"""
Generate a 90-degree bend.
"""
rad_start = numpy.radians(start_port.orientation)
# Center of the arc
if direction == "CCW":
cx = start_port.x + radius * numpy.cos(rad_start + numpy.pi / 2)
cy = start_port.y + radius * numpy.sin(rad_start + numpy.pi / 2)
t_start = rad_start - numpy.pi / 2
t_end = t_start + numpy.pi / 2
new_ori = (start_port.orientation + 90) % 360
else:
cx = start_port.x + radius * numpy.cos(rad_start - numpy.pi / 2)
cy = start_port.y + radius * numpy.sin(rad_start - numpy.pi / 2)
t_start = rad_start + numpy.pi / 2
t_end = t_start - numpy.pi / 2
new_ori = (start_port.orientation - 90) % 360
start_port: Port,
radius: float,
width: float,
direction: Literal["CW", "CCW"],
sagitta: float = 0.01,
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc",
clip_margin: float = 10.0,
dilation: float = 0.0,
) -> ComponentResult:
rot2 = rotation_matrix2(start_port.r)
sign = 1 if direction == "CCW" else -1
# Snap the end point to the grid
ex_raw = cx + radius * numpy.cos(t_end)
ey_raw = cy + radius * numpy.sin(t_end)
if snap_to_grid:
ex = snap_search_grid(ex_raw, snap_size)
ey = snap_search_grid(ey_raw, snap_size)
else:
ex, ey = ex_raw, ey_raw
# Slightly adjust radius and t_end to hit snapped point exactly
dx, dy = ex - cx, ey - cy
actual_radius = numpy.sqrt(dx**2 + dy**2)
t_end_snapped = numpy.arctan2(dy, dx)
# Ensure directionality and approx 90 degree sweep
if direction == "CCW":
while t_end_snapped <= t_start:
t_end_snapped += 2 * numpy.pi
while t_end_snapped > t_start + numpy.pi:
t_end_snapped -= 2 * numpy.pi
else:
while t_end_snapped >= t_start:
t_end_snapped -= 2 * numpy.pi
while t_end_snapped < t_start - numpy.pi:
t_end_snapped += 2 * numpy.pi
t_end = t_end_snapped
center_local = numpy.array((0.0, sign * radius))
end_local = numpy.array((radius, sign * radius))
center_xy = (rot2 @ center_local) + numpy.array((start_port.x, start_port.y))
end_xy = (rot2 @ end_local) + numpy.array((start_port.x, start_port.y))
end_port = Port(end_xy[0], end_xy[1], start_port.r + sign * 90)
end_port = Port(ex, ey, new_ori)
start_theta = start_port.r - sign * 90
end_theta = start_port.r
ts = (float(start_theta), float(end_theta))
arc_polys = _get_arc_polygons(cx, cy, actual_radius, width, t_start, t_end, sagitta)
arc_polys = _get_arc_polygons((float(center_xy[0]), float(center_xy[1])), radius, width, ts, sagitta)
collision_polys = _apply_collision_model(
arc_polys[0], collision_type, actual_radius, width, cx, cy, clip_margin, t_start, t_end
arc_polys[0],
collision_type,
radius,
width,
(float(center_xy[0]), float(center_xy[1])),
clip_margin,
ts,
)
proxy_geom = None
proxy_geometry = None
if collision_type == "arc":
# Auto-generate a clipped_bbox proxy for tiered collision checks
proxy_geom = _apply_collision_model(
arc_polys[0], "clipped_bbox", actual_radius, width, cx, cy, clip_margin, t_start, t_end
proxy_geometry = _apply_collision_model(
arc_polys[0],
"clipped_bbox",
radius,
width,
(float(center_xy[0]), float(center_xy[1])),
clip_margin,
ts,
)
dilated_geom = None
dilated_actual_geom = None
dilated_actual_geometry = None
dilated_geometry = None
if dilation > 0:
dilated_actual_geom = _get_arc_polygons(cx, cy, actual_radius, width, t_start, t_end, sagitta, dilation=dilation)
if collision_type == "arc":
dilated_geom = dilated_actual_geom
else:
dilated_geom = [p.buffer(dilation) for p in collision_polys]
# Pre-calculate grid indices for faster ComponentResult init
inv_snap = 1.0 / snap_size
rgx = int(round(ex * inv_snap))
rgy = int(round(ey * inv_snap))
rgo = int(round(new_ori))
dilated_actual_geometry = _get_arc_polygons((float(center_xy[0]), float(center_xy[1])), radius, width, ts, sagitta, dilation=dilation)
dilated_geometry = dilated_actual_geometry if collision_type == "arc" else [poly.buffer(dilation) for poly in collision_polys]
return ComponentResult(
geometry=collision_polys,
end_port=end_port,
length=actual_radius * numpy.abs(t_end - t_start),
dilated_geometry=dilated_geom,
proxy_geometry=proxy_geom,
geometry=collision_polys,
end_port=end_port,
length=abs(radius) * numpy.pi / 2.0,
move_type="Bend90",
dilated_geometry=dilated_geometry,
proxy_geometry=proxy_geometry,
actual_geometry=arc_polys,
dilated_actual_geometry=dilated_actual_geom,
move_type='Bend90',
snap_size=snap_size,
rel_gx=rgx, rel_gy=rgy, rel_go=rgo
dilated_actual_geometry=dilated_actual_geometry,
)
class SBend:
"""
Move generator for parametric S-bends.
"""
@staticmethod
def generate(
start_port: Port,
offset: float,
radius: float,
width: float,
sagitta: float = 0.01,
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc",
clip_margin: float = 10.0,
dilation: float = 0.0,
snap_to_grid: bool = True,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
) -> ComponentResult:
"""
Generate a parametric S-bend (two tangent arcs).
"""
start_port: Port,
offset: float,
radius: float,
width: float,
sagitta: float = 0.01,
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc",
clip_margin: float = 10.0,
dilation: float = 0.0,
) -> ComponentResult:
if abs(offset) >= 2 * radius:
raise ValueError(f"SBend offset {offset} must be less than 2*radius {2 * radius}")
theta_init = numpy.arccos(1 - abs(offset) / (2 * radius))
dx_init = 2 * radius * numpy.sin(theta_init)
rad_start = numpy.radians(start_port.orientation)
# Target point
ex_raw = start_port.x + dx_init * numpy.cos(rad_start) - offset * numpy.sin(rad_start)
ey_raw = start_port.y + dx_init * numpy.sin(rad_start) + offset * numpy.cos(rad_start)
sign = 1 if offset >= 0 else -1
theta = numpy.arccos(1.0 - abs(offset) / (2.0 * radius))
dx = 2.0 * radius * numpy.sin(theta)
theta_deg = float(numpy.degrees(theta))
if snap_to_grid:
ex = snap_search_grid(ex_raw, snap_size)
ey = snap_search_grid(ey_raw, snap_size)
else:
ex, ey = ex_raw, ey_raw
rot2 = rotation_matrix2(start_port.r)
end_local = numpy.array((dx, offset))
end_xy = (rot2 @ end_local) + numpy.array((start_port.x, start_port.y))
end_port = Port(end_xy[0], end_xy[1], start_port.r)
end_port = Port(ex, ey, start_port.orientation)
c1_local = numpy.array((0.0, sign * radius))
c2_local = numpy.array((dx, offset - sign * radius))
c1_xy = (rot2 @ c1_local) + numpy.array((start_port.x, start_port.y))
c2_xy = (rot2 @ c2_local) + numpy.array((start_port.x, start_port.y))
# Solve for theta and radius that hit (ex, ey) exactly
local_dx = (ex - start_port.x) * numpy.cos(rad_start) + (ey - start_port.y) * numpy.sin(rad_start)
local_dy = -(ex - start_port.x) * numpy.sin(rad_start) + (ey - start_port.y) * numpy.cos(rad_start)
# tan(theta / 2) = local_dy / local_dx
theta = 2 * numpy.arctan2(abs(local_dy), local_dx)
if abs(theta) < TOLERANCE_ANGULAR:
# De-generate to straight
actual_len = numpy.sqrt(local_dx**2 + local_dy**2)
return Straight.generate(start_port, actual_len, width, snap_to_grid=False, dilation=dilation, snap_size=snap_size)
ts1 = (float(start_port.r - sign * 90), float(start_port.r - sign * 90 + sign * theta_deg))
second_base = start_port.r + (90 if sign > 0 else 270)
ts2 = (float(second_base + sign * theta_deg), float(second_base))
denom = (2 * (1 - numpy.cos(theta)))
if abs(denom) < TOLERANCE_LINEAR:
raise ValueError("SBend calculation failed: radius denominator zero")
actual_radius = abs(local_dy) / denom
# Safety Check: Reject SBends with tiny radii that would cause self-overlap
if actual_radius < width:
raise ValueError(f"SBend actual_radius {actual_radius:.3f} is too small (width={width})")
arc1 = _get_arc_polygons((float(c1_xy[0]), float(c1_xy[1])), radius, width, ts1, sagitta)[0]
arc2 = _get_arc_polygons((float(c2_xy[0]), float(c2_xy[1])), radius, width, ts2, sagitta)[0]
actual_geometry = [arc1, arc2]
geometry = [
_apply_collision_model(arc1, collision_type, radius, width, (float(c1_xy[0]), float(c1_xy[1])), clip_margin, ts1)[0],
_apply_collision_model(arc2, collision_type, radius, width, (float(c2_xy[0]), float(c2_xy[1])), clip_margin, ts2)[0],
]
# Limit radius to prevent giant arcs
if actual_radius > 100000.0:
actual_len = numpy.sqrt(local_dx**2 + local_dy**2)
return Straight.generate(start_port, actual_len, width, snap_to_grid=False, dilation=dilation, snap_size=snap_size)
direction = 1 if local_dy > 0 else -1
c1_angle = rad_start + direction * numpy.pi / 2
cx1 = start_port.x + actual_radius * numpy.cos(c1_angle)
cy1 = start_port.y + actual_radius * numpy.sin(c1_angle)
ts1, te1 = c1_angle + numpy.pi, c1_angle + numpy.pi + direction * theta
c2_angle = rad_start - direction * numpy.pi / 2
cx2 = ex + actual_radius * numpy.cos(c2_angle)
cy2 = ey + actual_radius * numpy.sin(c2_angle)
te2 = c2_angle + numpy.pi
ts2 = te2 + direction * theta
arc1 = _get_arc_polygons(cx1, cy1, actual_radius, width, ts1, te1, sagitta)[0]
arc2 = _get_arc_polygons(cx2, cy2, actual_radius, width, ts2, te2, sagitta)[0]
arc_polys = [arc1, arc2]
# Use the provided collision model for primary geometry
col1 = _apply_collision_model(arc1, collision_type, actual_radius, width, cx1, cy1, clip_margin, ts1, te1)[0]
col2 = _apply_collision_model(arc2, collision_type, actual_radius, width, cx2, cy2, clip_margin, ts2, te2)[0]
collision_polys = [col1, col2]
proxy_geom = None
proxy_geometry = None
if collision_type == "arc":
# Auto-generate proxies
p1 = _apply_collision_model(arc1, "clipped_bbox", actual_radius, width, cx1, cy1, clip_margin, ts1, te1)[0]
p2 = _apply_collision_model(arc2, "clipped_bbox", actual_radius, width, cx2, cy2, clip_margin, ts2, te2)[0]
proxy_geom = [p1, p2]
proxy_geometry = [
_apply_collision_model(arc1, "clipped_bbox", radius, width, (float(c1_xy[0]), float(c1_xy[1])), clip_margin, ts1)[0],
_apply_collision_model(arc2, "clipped_bbox", radius, width, (float(c2_xy[0]), float(c2_xy[1])), clip_margin, ts2)[0],
]
dilated_geom = None
dilated_actual_geom = None
dilated_actual_geometry = None
dilated_geometry = None
if dilation > 0:
d1 = _get_arc_polygons(cx1, cy1, actual_radius, width, ts1, te1, sagitta, dilation=dilation)[0]
d2 = _get_arc_polygons(cx2, cy2, actual_radius, width, ts2, te2, sagitta, dilation=dilation)[0]
dilated_actual_geom = [d1, d2]
if collision_type == "arc":
dilated_geom = dilated_actual_geom
else:
dilated_geom = [p.buffer(dilation) for p in collision_polys]
# Pre-calculate grid indices for faster ComponentResult init
inv_snap = 1.0 / snap_size
rgx = int(round(ex * inv_snap))
rgy = int(round(ey * inv_snap))
rgo = int(round(start_port.orientation))
dilated_actual_geometry = [
_get_arc_polygons((float(c1_xy[0]), float(c1_xy[1])), radius, width, ts1, sagitta, dilation=dilation)[0],
_get_arc_polygons((float(c2_xy[0]), float(c2_xy[1])), radius, width, ts2, sagitta, dilation=dilation)[0],
]
dilated_geometry = dilated_actual_geometry if collision_type == "arc" else [poly.buffer(dilation) for poly in geometry]
return ComponentResult(
geometry=collision_polys,
end_port=end_port,
length=2 * actual_radius * theta,
dilated_geometry=dilated_geom,
proxy_geometry=proxy_geom,
actual_geometry=arc_polys,
dilated_actual_geometry=dilated_actual_geom,
move_type='SBend',
snap_size=snap_size,
rel_gx=rgx, rel_gy=rgy, rel_go=rgo
geometry=geometry,
end_port=end_port,
length=2.0 * radius * theta,
move_type="SBend",
dilated_geometry=dilated_geometry,
proxy_geometry=proxy_geometry,
actual_geometry=actual_geometry,
dilated_actual_geometry=dilated_actual_geometry,
)

View file

@ -1,77 +1,160 @@
from __future__ import annotations
from collections.abc import Iterator
from typing import Self
import numpy
from numpy.typing import ArrayLike, NDArray
# 1nm snap (0.001 µm)
GRID_SNAP_UM = 0.001
def _normalize_angle(angle_deg: int | float) -> int:
angle = int(round(angle_deg)) % 360
if angle % 90 != 0:
raise ValueError(f"Port angle must be Manhattan (multiple of 90), got {angle_deg!r}")
return angle
def snap_nm(value: float) -> float:
"""
Snap a coordinate to the nearest 1nm (0.001 um).
"""
return round(value * 1000) / 1000
def _as_int32_triplet(value: ArrayLike) -> NDArray[numpy.int32]:
arr = numpy.asarray(value, dtype=numpy.int32)
if arr.shape != (3,):
raise ValueError(f"Port array must have shape (3,), got {arr.shape}")
arr = arr.copy()
arr[2] = _normalize_angle(int(arr[2]))
return arr
from inire.constants import TOLERANCE_LINEAR
class Port:
"""
A port defined by (x, y, orientation) in micrometers.
Port represented as an ndarray-backed (x, y, r) triple with int32 storage.
"""
__slots__ = ('x', 'y', 'orientation')
def __init__(
self,
x: float,
y: float,
orientation: float,
snap: bool = True
) -> None:
if snap:
self.x = round(x * 1000) / 1000
self.y = round(y * 1000) / 1000
# Faster orientation normalization for common cases
if 0 <= orientation < 360:
self.orientation = float(orientation)
else:
self.orientation = float(orientation % 360)
else:
self.x = x
self.y = y
self.orientation = float(orientation)
__slots__ = ("_xyr",)
def __init__(self, x: int | float, y: int | float, r: int | float) -> None:
self._xyr = numpy.array(
(int(round(x)), int(round(y)), _normalize_angle(r)),
dtype=numpy.int32,
)
@classmethod
def from_array(cls, xyr: ArrayLike) -> Self:
obj = cls.__new__(cls)
obj._xyr = _as_int32_triplet(xyr)
return obj
@property
def x(self) -> int:
return int(self._xyr[0])
@x.setter
def x(self, val: int | float) -> None:
self._xyr[0] = int(round(val))
@property
def y(self) -> int:
return int(self._xyr[1])
@y.setter
def y(self, val: int | float) -> None:
self._xyr[1] = int(round(val))
@property
def r(self) -> int:
return int(self._xyr[2])
@r.setter
def r(self, val: int | float) -> None:
self._xyr[2] = _normalize_angle(val)
@property
def orientation(self) -> int:
return self.r
@orientation.setter
def orientation(self, val: int | float) -> None:
self.r = val
@property
def xyr(self) -> NDArray[numpy.int32]:
return self._xyr
@xyr.setter
def xyr(self, val: ArrayLike) -> None:
self._xyr = _as_int32_triplet(val)
def __repr__(self) -> str:
return f'Port(x={self.x}, y={self.y}, orientation={self.orientation})'
return f"Port(x={self.x}, y={self.y}, r={self.r})"
def __iter__(self) -> Iterator[int]:
return iter((self.x, self.y, self.r))
def __len__(self) -> int:
return 3
def __getitem__(self, item: int | slice) -> int | NDArray[numpy.int32]:
return self._xyr[item]
def __array__(self, dtype: numpy.dtype | None = None) -> NDArray[numpy.int32]:
return numpy.asarray(self._xyr, dtype=dtype)
def __eq__(self, other: object) -> bool:
if not isinstance(other, Port):
return False
return (abs(self.x - other.x) < TOLERANCE_LINEAR and
abs(self.y - other.y) < TOLERANCE_LINEAR and
abs(self.orientation - other.orientation) < TOLERANCE_LINEAR)
return bool(numpy.array_equal(self._xyr, other._xyr))
def __hash__(self) -> int:
return hash((round(self.x, 6), round(self.y, 6), round(self.orientation, 6)))
return hash(self.as_tuple())
def copy(self) -> Self:
return type(self).from_array(self._xyr.copy())
def as_tuple(self) -> tuple[int, int, int]:
return (self.x, self.y, self.r)
def translate(self, dxy: ArrayLike) -> Self:
dxy_arr = numpy.asarray(dxy, dtype=numpy.int32)
if dxy_arr.shape == (2,):
return type(self)(self.x + int(dxy_arr[0]), self.y + int(dxy_arr[1]), self.r)
if dxy_arr.shape == (3,):
return type(self)(self.x + int(dxy_arr[0]), self.y + int(dxy_arr[1]), self.r + int(dxy_arr[2]))
raise ValueError(f"Translation must have shape (2,) or (3,), got {dxy_arr.shape}")
def __add__(self, other: ArrayLike) -> Self:
return self.translate(other)
def __sub__(self, other: ArrayLike | Self) -> NDArray[numpy.int32]:
if isinstance(other, Port):
return self._xyr - other._xyr
return self._xyr - numpy.asarray(other, dtype=numpy.int32)
def translate_port(port: Port, dx: float, dy: float) -> Port:
"""
Translate a port by (dx, dy).
"""
return Port(port.x + dx, port.y + dy, port.orientation)
ROT2_0 = numpy.array(((1, 0), (0, 1)), dtype=numpy.int32)
ROT2_90 = numpy.array(((0, -1), (1, 0)), dtype=numpy.int32)
ROT2_180 = numpy.array(((-1, 0), (0, -1)), dtype=numpy.int32)
ROT2_270 = numpy.array(((0, 1), (-1, 0)), dtype=numpy.int32)
def rotate_port(port: Port, angle: float, origin: tuple[float, float] = (0, 0)) -> Port:
"""
Rotate a port by a multiple of 90 degrees around an origin.
"""
ox, oy = origin
px, py = port.x, port.y
def rotation_matrix2(rotation_deg: int) -> NDArray[numpy.int32]:
quadrant = (_normalize_angle(rotation_deg) // 90) % 4
return (ROT2_0, ROT2_90, ROT2_180, ROT2_270)[quadrant]
rad = numpy.radians(angle)
qx = snap_nm(ox + numpy.cos(rad) * (px - ox) - numpy.sin(rad) * (py - oy))
qy = snap_nm(oy + numpy.sin(rad) * (px - ox) + numpy.cos(rad) * (py - oy))
return Port(qx, qy, port.orientation + angle)
def rotation_matrix3(rotation_deg: int) -> NDArray[numpy.int32]:
rot2 = rotation_matrix2(rotation_deg)
rot3 = numpy.zeros((3, 3), dtype=numpy.int32)
rot3[:2, :2] = rot2
rot3[2, 2] = 1
return rot3
def translate_port(port: Port, dx: int | float, dy: int | float) -> Port:
return Port(port.x + dx, port.y + dy, port.r)
def rotate_port(port: Port, angle: int | float, origin: tuple[int | float, int | float] = (0, 0)) -> Port:
angle_i = _normalize_angle(angle)
rot = rotation_matrix2(angle_i)
origin_xy = numpy.array((int(round(origin[0])), int(round(origin[1]))), dtype=numpy.int32)
rel = numpy.array((port.x, port.y), dtype=numpy.int32) - origin_xy
rotated = origin_xy + rot @ rel
return Port(int(rotated[0]), int(rotated[1]), port.r + angle_i)

File diff suppressed because it is too large Load diff

View file

@ -4,13 +4,15 @@ from dataclasses import dataclass, field
from typing import Literal, Any
VisibilityGuidanceMode = Literal["off", "exact_corner", "tangent_corner"]
@dataclass
class RouterConfig:
"""Configuration parameters for the A* Router."""
node_limit: int = 1000000
snap_size: float = 5.0
# Sparse Sampling Configuration
max_straight_length: float = 2000.0
num_straight_samples: int = 5
@ -29,6 +31,7 @@ class RouterConfig:
sbend_penalty: float = 500.0
bend_collision_type: Literal["arc", "bbox", "clipped_bbox"] | Any = "arc"
bend_clip_margin: float = 10.0
visibility_guidance: VisibilityGuidanceMode = "tangent_corner"
@dataclass

View file

@ -3,8 +3,9 @@ from __future__ import annotations
from typing import TYPE_CHECKING, Any
import numpy as np
from inire.router.config import CostConfig
from inire.constants import TOLERANCE_LINEAR
from inire.router.config import CostConfig
if TYPE_CHECKING:
from shapely.geometry import Polygon
@ -15,50 +16,33 @@ if TYPE_CHECKING:
class CostEvaluator:
"""
Calculates total path and proximity costs.
"""
__slots__ = ('collision_engine', 'danger_map', 'config', 'unit_length_cost', 'greedy_h_weight', 'congestion_penalty',
'_target_x', '_target_y', '_target_ori', '_target_cos', '_target_sin', '_min_radius')
collision_engine: CollisionEngine
""" The engine for intersection checks """
danger_map: DangerMap
""" Pre-computed grid for heuristic proximity costs """
config: Any
""" Parameter configuration (CostConfig or RouterConfig) """
unit_length_cost: float
greedy_h_weight: float
congestion_penalty: float
""" Cached weight values for performance """
__slots__ = (
"collision_engine",
"danger_map",
"config",
"unit_length_cost",
"greedy_h_weight",
"congestion_penalty",
"_target_x",
"_target_y",
"_target_r",
"_target_cos",
"_target_sin",
"_min_radius",
)
def __init__(
self,
collision_engine: CollisionEngine,
danger_map: DangerMap | None = None,
unit_length_cost: float = 1.0,
greedy_h_weight: float = 1.5,
congestion_penalty: float = 10000.0,
bend_penalty: float = 250.0,
sbend_penalty: float = 500.0,
min_bend_radius: float = 50.0,
) -> None:
"""
Initialize the Cost Evaluator.
Args:
collision_engine: The engine for intersection checks.
danger_map: Pre-computed grid for heuristic proximity costs.
unit_length_cost: Cost multiplier per micrometer of path length.
greedy_h_weight: Heuristic weighting (A* greedy factor).
congestion_penalty: Multiplier for path overlaps in negotiated congestion.
bend_penalty: Base cost for 90-degree bends.
sbend_penalty: Base cost for parametric S-bends.
min_bend_radius: Minimum radius for 90-degree bends (used for alignment heuristic).
"""
self,
collision_engine: CollisionEngine,
danger_map: DangerMap | None = None,
unit_length_cost: float = 1.0,
greedy_h_weight: float = 1.5,
congestion_penalty: float = 10000.0,
bend_penalty: float = 250.0,
sbend_penalty: float | None = None,
min_bend_radius: float = 50.0,
) -> None:
actual_sbend_penalty = 2.0 * bend_penalty if sbend_penalty is None else sbend_penalty
self.collision_engine = collision_engine
self.danger_map = danger_map
self.config = CostConfig(
@ -66,189 +50,133 @@ class CostEvaluator:
greedy_h_weight=greedy_h_weight,
congestion_penalty=congestion_penalty,
bend_penalty=bend_penalty,
sbend_penalty=sbend_penalty,
sbend_penalty=actual_sbend_penalty,
min_bend_radius=min_bend_radius,
)
# Use config values
self.unit_length_cost = self.config.unit_length_cost
self.greedy_h_weight = self.config.greedy_h_weight
self.congestion_penalty = self.config.congestion_penalty
# Pre-cache configuration flags for fast path
self._refresh_cached_config()
# Target cache
self._target_x = 0.0
self._target_y = 0.0
self._target_ori = 0.0
self._target_r = 0
self._target_cos = 1.0
self._target_sin = 0.0
def _refresh_cached_config(self) -> None:
""" Sync internal caches with the current self.config object. """
if hasattr(self.config, 'min_bend_radius'):
if hasattr(self.config, "min_bend_radius"):
self._min_radius = self.config.min_bend_radius
elif hasattr(self.config, 'bend_radii') and self.config.bend_radii:
elif hasattr(self.config, "bend_radii") and self.config.bend_radii:
self._min_radius = min(self.config.bend_radii)
else:
self._min_radius = 50.0
if hasattr(self.config, 'unit_length_cost'):
if hasattr(self.config, "unit_length_cost"):
self.unit_length_cost = self.config.unit_length_cost
if hasattr(self.config, 'greedy_h_weight'):
if hasattr(self.config, "greedy_h_weight"):
self.greedy_h_weight = self.config.greedy_h_weight
if hasattr(self.config, 'congestion_penalty'):
if hasattr(self.config, "congestion_penalty"):
self.congestion_penalty = self.config.congestion_penalty
def set_target(self, target: Port) -> None:
""" Pre-calculate target-dependent values for faster heuristic. """
self._target_x = target.x
self._target_y = target.y
self._target_ori = target.orientation
rad = np.radians(target.orientation)
self._target_r = target.r
rad = np.radians(target.r)
self._target_cos = np.cos(rad)
self._target_sin = np.sin(rad)
def g_proximity(self, x: float, y: float) -> float:
"""
Get proximity cost from the Danger Map.
Args:
x, y: Coordinate to check.
Returns:
Proximity cost at location.
"""
if self.danger_map is None:
return 0.0
return self.danger_map.get_cost(x, y)
def h_manhattan(self, current: Port, target: Port) -> float:
"""
Heuristic: weighted Manhattan distance + mandatory turn penalties.
"""
tx, ty = target.x, target.y
# Avoid repeated trig for target orientation
if (abs(tx - self._target_x) > TOLERANCE_LINEAR or
abs(ty - self._target_y) > TOLERANCE_LINEAR or
abs(target.orientation - self._target_ori) > 0.1):
self.set_target(target)
if abs(tx - self._target_x) > TOLERANCE_LINEAR or abs(ty - self._target_y) > TOLERANCE_LINEAR or target.r != self._target_r:
self.set_target(target)
dx = abs(current.x - tx)
dy = abs(current.y - ty)
dist = dx + dy
bp = self.config.bend_penalty
penalty = 0.0
# 1. Orientation Difference
curr_ori = current.orientation
diff = abs(curr_ori - self._target_ori) % 360
if diff > 0.1:
if abs(diff - 180) < 0.1:
penalty += 2 * bp
else: # 90 or 270 degree rotation
penalty += 1 * bp
curr_r = current.r
diff = abs(curr_r - self._target_r) % 360
if diff > 0:
penalty += 2 * bp if diff == 180 else bp
# 2. Side Check (Entry half-plane)
v_dx = tx - current.x
v_dy = ty - current.y
side_proj = v_dx * self._target_cos + v_dy * self._target_sin
perp_dist = abs(v_dx * self._target_sin - v_dy * self._target_cos)
if side_proj < -0.1 or (side_proj < self._min_radius and perp_dist > 0.1):
if side_proj < 0 or (side_proj < self._min_radius and perp_dist > 0):
penalty += 2 * bp
# 3. Traveling Away
# Optimization: avoid np.radians/cos/sin if current_ori is standard 0,90,180,270
if curr_ori == 0: c_cos, c_sin = 1.0, 0.0
elif curr_ori == 90: c_cos, c_sin = 0.0, 1.0
elif curr_ori == 180: c_cos, c_sin = -1.0, 0.0
elif curr_ori == 270: c_cos, c_sin = 0.0, -1.0
if curr_r == 0:
c_cos, c_sin = 1.0, 0.0
elif curr_r == 90:
c_cos, c_sin = 0.0, 1.0
elif curr_r == 180:
c_cos, c_sin = -1.0, 0.0
else:
curr_rad = np.radians(curr_ori)
c_cos, c_sin = np.cos(curr_rad), np.sin(curr_rad)
move_proj = v_dx * c_cos + v_dy * c_sin
if move_proj < -0.1:
penalty += 2 * bp
c_cos, c_sin = 0.0, -1.0
# 4. Jog Alignment
if diff < 0.1:
if perp_dist > 0.1:
penalty += 2 * bp
move_proj = v_dx * c_cos + v_dy * c_sin
if move_proj < 0:
penalty += 2 * bp
if diff == 0 and perp_dist > 0:
penalty += 2 * bp
return self.greedy_h_weight * (dist + penalty)
def evaluate_move(
self,
geometry: list[Polygon] | None,
end_port: Port,
net_width: float,
net_id: str,
start_port: Port | None = None,
length: float = 0.0,
dilated_geometry: list[Polygon] | None = None,
skip_static: bool = False,
skip_congestion: bool = False,
penalty: float = 0.0,
) -> float:
"""
Calculate the cost of a single move (Straight, Bend, SBend).
Args:
geometry: List of polygons in the move.
end_port: Port at the end of the move.
net_width: Width of the waveguide (unused).
net_id: Identifier for the net.
start_port: Port at the start of the move.
length: Physical path length of the move.
dilated_geometry: Pre-calculated dilated polygons.
skip_static: If True, bypass static collision checks.
skip_congestion: If True, bypass congestion checks.
penalty: Fixed cost penalty for the move type.
Returns:
Total cost of the move, or 1e15 if invalid.
"""
_ = net_width # Unused
# 1. Boundary Check
self,
geometry: list[Polygon] | None,
end_port: Port,
net_width: float,
net_id: str,
start_port: Port | None = None,
length: float = 0.0,
dilated_geometry: list[Polygon] | None = None,
skip_static: bool = False,
skip_congestion: bool = False,
penalty: float = 0.0,
) -> float:
_ = net_width
danger_map = self.danger_map
if danger_map is not None:
if not danger_map.is_within_bounds(end_port.x, end_port.y):
return 1e15
if danger_map is not None and not danger_map.is_within_bounds(end_port.x, end_port.y):
return 1e15
total_cost = length * self.unit_length_cost + penalty
# 2. Collision Check
if not skip_static or not skip_congestion:
collision_engine = self.collision_engine
# Ensure geometry is provided if collision checks are enabled
if geometry is None:
return 1e15
return 1e15
collision_engine = self.collision_engine
for i, poly in enumerate(geometry):
dil_poly = dilated_geometry[i] if dilated_geometry else None
# Hard Collision (Static obstacles)
if not skip_static:
if collision_engine.check_collision(
poly, net_id, buffer_mode='static', start_port=start_port, end_port=end_port,
dilated_geometry=dil_poly
):
return 1e15
# Soft Collision (Negotiated Congestion)
if not skip_static and collision_engine.check_collision(
poly,
net_id,
buffer_mode="static",
start_port=start_port,
end_port=end_port,
dilated_geometry=dil_poly,
):
return 1e15
if not skip_congestion:
overlaps = collision_engine.check_collision(
poly, net_id, buffer_mode='congestion', dilated_geometry=dil_poly
)
overlaps = collision_engine.check_collision(poly, net_id, buffer_mode="congestion", dilated_geometry=dil_poly)
if isinstance(overlaps, int) and overlaps > 0:
total_cost += overlaps * self.congestion_penalty
# 3. Proximity cost from Danger Map
if danger_map is not None:
total_cost += danger_map.get_cost(end_port.x, end_port.y)
cost_s = danger_map.get_cost(start_port.x, start_port.y) if start_port else 0.0
cost_e = danger_map.get_cost(end_port.x, end_port.y)
if start_port:
mid_x = (start_port.x + end_port.x) / 2.0
mid_y = (start_port.y + end_port.y) / 2.0
cost_m = danger_map.get_cost(mid_x, mid_y)
total_cost += length * (cost_s + cost_m + cost_e) / 3.0
else:
total_cost += length * cost_e
return total_cost

View file

@ -3,6 +3,8 @@ from __future__ import annotations
from typing import TYPE_CHECKING
import numpy
import shapely
from scipy.spatial import cKDTree
from functools import lru_cache
if TYPE_CHECKING:
from shapely.geometry import Polygon
@ -10,36 +12,15 @@ if TYPE_CHECKING:
class DangerMap:
"""
A pre-computed grid for heuristic proximity costs, vectorized for performance.
A proximity cost evaluator using a KD-Tree of obstacle boundary points.
Scales with obstacle perimeter rather than design area.
"""
__slots__ = ('minx', 'miny', 'maxx', 'maxy', 'resolution', 'safety_threshold', 'k', 'width_cells', 'height_cells', 'grid')
minx: float
miny: float
maxx: float
maxy: float
""" Boundary coordinates of the map """
resolution: float
""" Grid cell size in micrometers """
safety_threshold: float
""" Distance below which proximity costs are applied """
k: float
""" Cost multiplier constant """
width_cells: int
height_cells: int
""" Grid dimensions in cells """
grid: numpy.ndarray
""" 2D array of pre-computed costs """
__slots__ = ('minx', 'miny', 'maxx', 'maxy', 'resolution', 'safety_threshold', 'k', 'tree')
def __init__(
self,
bounds: tuple[float, float, float, float],
resolution: float = 1.0,
resolution: float = 5.0,
safety_threshold: float = 10.0,
k: float = 1.0,
) -> None:
@ -48,7 +29,7 @@ class DangerMap:
Args:
bounds: (minx, miny, maxx, maxy) in um.
resolution: Cell size (um).
resolution: Sampling resolution for obstacle boundaries (um).
safety_threshold: Proximity limit (um).
k: Penalty multiplier.
"""
@ -56,79 +37,62 @@ class DangerMap:
self.resolution = resolution
self.safety_threshold = safety_threshold
self.k = k
# Grid dimensions
self.width_cells = int(numpy.ceil((self.maxx - self.minx) / self.resolution))
self.height_cells = int(numpy.ceil((self.maxy - self.miny) / self.resolution))
self.grid = numpy.zeros((self.width_cells, self.height_cells), dtype=numpy.float32)
self.tree: cKDTree | None = None
def precompute(self, obstacles: list[Polygon]) -> None:
"""
Pre-compute the proximity costs for the entire grid using vectorized operations.
Args:
obstacles: List of static obstacle geometries.
Pre-compute the proximity tree by sampling obstacle boundaries.
"""
from scipy.ndimage import distance_transform_edt
# 1. Create a binary mask of obstacles
mask = numpy.ones((self.width_cells, self.height_cells), dtype=bool)
# Create coordinate grids
x_coords = numpy.linspace(self.minx + self.resolution/2, self.maxx - self.resolution/2, self.width_cells)
y_coords = numpy.linspace(self.miny + self.resolution/2, self.maxy - self.resolution/2, self.height_cells)
xv, yv = numpy.meshgrid(x_coords, y_coords, indexing='ij')
all_points = []
for poly in obstacles:
# Use shapely.contains_xy for fast vectorized point-in-polygon check
in_poly = shapely.contains_xy(poly, xv, yv)
mask[in_poly] = False
# Sample exterior
exterior = poly.exterior
dist = 0
while dist < exterior.length:
pt = exterior.interpolate(dist)
all_points.append((pt.x, pt.y))
dist += self.resolution
# Sample interiors (holes)
for interior in poly.interiors:
dist = 0
while dist < interior.length:
pt = interior.interpolate(dist)
all_points.append((pt.x, pt.y))
dist += self.resolution
# 2. Distance transform (mask=True for empty space)
distances = distance_transform_edt(mask) * self.resolution
# 3. Proximity cost: k / d^2 if d < threshold, else 0
# Cap distances at a small epsilon (e.g. 0.1um) to avoid division by zero
safe_distances = numpy.maximum(distances, 0.1)
self.grid = numpy.where(
distances < self.safety_threshold,
self.k / (safe_distances**2),
0.0
).astype(numpy.float32)
if all_points:
self.tree = cKDTree(numpy.array(all_points))
else:
self.tree = None
# Clear cache when tree changes
self._get_cost_quantized.cache_clear()
def is_within_bounds(self, x: float, y: float) -> bool:
"""
Check if a coordinate is within the design bounds.
Args:
x, y: Coordinate to check.
Returns:
True if within [min, max] for both axes.
"""
return self.minx <= x <= self.maxx and self.miny <= y <= self.maxy
def get_cost(self, x: float, y: float) -> float:
"""
Get the proximity cost at a specific coordinate.
Args:
x, y: Coordinate to look up.
Returns:
Pre-computed cost, or 1e15 if out of bounds.
Get the proximity cost at a specific coordinate using the KD-Tree.
Coordinates are quantized to 1nm to improve cache performance.
"""
# Clamp to grid range to handle upper boundary exactly
ix = int((x - self.minx) / self.resolution)
iy = int((y - self.miny) / self.resolution)
# Handle exact upper boundary
if ix == self.width_cells and abs(x - self.maxx) < 1e-9:
ix = self.width_cells - 1
if iy == self.height_cells and abs(y - self.maxy) < 1e-9:
iy = self.height_cells - 1
qx_milli = int(round(x * 1000))
qy_milli = int(round(y * 1000))
return self._get_cost_quantized(qx_milli, qy_milli)
if 0 <= ix < self.width_cells and 0 <= iy < self.height_cells:
return float(self.grid[ix, iy])
return 1e15 # Outside bounds
@lru_cache(maxsize=100000)
def _get_cost_quantized(self, qx_milli: int, qy_milli: int) -> float:
qx = qx_milli / 1000.0
qy = qy_milli / 1000.0
if not self.is_within_bounds(qx, qy):
return 1e15
if self.tree is None:
return 0.0
dist, _ = self.tree.query([qx, qy], distance_upper_bound=self.safety_threshold)
if dist >= self.safety_threshold:
return 0.0
safe_dist = max(dist, 0.1)
return float(self.k / (safe_dist ** 2))

View file

@ -1,13 +1,16 @@
from __future__ import annotations
import logging
import time
import math
import random
import time
from dataclasses import dataclass
from typing import TYPE_CHECKING, Callable, Literal, Any
from typing import TYPE_CHECKING, Any, Callable, Literal
from inire.router.astar import route_astar, AStarMetrics
from inire.constants import TOLERANCE_LINEAR
import numpy
from inire.geometry.components import Bend90, Straight
from inire.router.astar import AStarMetrics, route_astar
if TYPE_CHECKING:
from inire.geometry.components import ComponentResult
@ -20,75 +23,37 @@ logger = logging.getLogger(__name__)
@dataclass
class RoutingResult:
"""
Result of a single net routing operation.
"""
net_id: str
""" Identifier for the net """
path: list[ComponentResult]
""" List of moves forming the path """
is_valid: bool
""" Whether the path is collision-free and reached the target """
collisions: int
""" Number of detected collisions/overlaps """
reached_target: bool = False
""" Whether the final port matches the target port """
class PathFinder:
"""
Multi-net router using Negotiated Congestion.
"""
__slots__ = ('context', 'metrics', 'max_iterations', 'base_congestion_penalty',
'use_tiered_strategy', 'congestion_multiplier', 'accumulated_expanded_nodes', 'warm_start')
context: AStarContext
""" The A* persistent state (config, caches, evaluator) """
metrics: AStarMetrics
""" Performance metrics for search operations """
max_iterations: int
""" Maximum number of rip-up and reroute iterations """
base_congestion_penalty: float
""" Starting penalty for overlaps """
congestion_multiplier: float
""" Multiplier for congestion penalty per iteration """
use_tiered_strategy: bool
""" If True, use simpler collision models in early iterations for speed """
warm_start: Literal['shortest', 'longest', 'user'] | None
""" Heuristic sorting for the initial greedy pass """
__slots__ = (
"context",
"metrics",
"max_iterations",
"base_congestion_penalty",
"use_tiered_strategy",
"congestion_multiplier",
"accumulated_expanded_nodes",
"warm_start",
"refine_paths",
)
def __init__(
self,
context: AStarContext,
metrics: AStarMetrics | None = None,
max_iterations: int = 10,
base_congestion_penalty: float = 100.0,
congestion_multiplier: float = 1.5,
use_tiered_strategy: bool = True,
warm_start: Literal['shortest', 'longest', 'user'] | None = 'shortest',
) -> None:
"""
Initialize the PathFinder.
Args:
context: The A* search context (evaluator, config, caches).
metrics: Optional metrics container.
max_iterations: Maximum number of rip-up and reroute iterations.
base_congestion_penalty: Starting penalty for overlaps.
congestion_multiplier: Multiplier for congestion penalty per iteration.
use_tiered_strategy: Whether to use simplified collision models in early iterations.
warm_start: Initial ordering strategy for a fast greedy pass.
"""
self,
context: AStarContext,
metrics: AStarMetrics | None = None,
max_iterations: int = 10,
base_congestion_penalty: float = 100.0,
congestion_multiplier: float = 1.5,
use_tiered_strategy: bool = True,
warm_start: Literal["shortest", "longest", "user"] | None = "shortest",
refine_paths: bool = False,
) -> None:
self.context = context
self.metrics = metrics if metrics is not None else AStarMetrics()
self.max_iterations = max_iterations
@ -96,114 +61,219 @@ class PathFinder:
self.congestion_multiplier = congestion_multiplier
self.use_tiered_strategy = use_tiered_strategy
self.warm_start = warm_start
self.accumulated_expanded_nodes: list[tuple[float, float, float]] = []
self.refine_paths = refine_paths
self.accumulated_expanded_nodes: list[tuple[int, int, int]] = []
@property
def cost_evaluator(self) -> CostEvaluator:
return self.context.cost_evaluator
def _perform_greedy_pass(
self,
netlist: dict[str, tuple[Port, Port]],
net_widths: dict[str, float],
order: Literal['shortest', 'longest', 'user']
) -> dict[str, list[ComponentResult]]:
"""
Internal greedy pass: route nets sequentially and freeze them as static.
"""
self,
netlist: dict[str, tuple[Port, Port]],
net_widths: dict[str, float],
order: Literal["shortest", "longest", "user"],
) -> dict[str, list[ComponentResult]]:
all_net_ids = list(netlist.keys())
if order != 'user':
def get_dist(nid):
s, t = netlist[nid]
return abs(t.x - s.x) + abs(t.y - s.y)
all_net_ids.sort(key=get_dist, reverse=(order == 'longest'))
greedy_paths = {}
temp_obj_ids = []
logger.info(f"PathFinder: Starting Greedy Warm-Start ({order} order)...")
if order != "user":
all_net_ids.sort(
key=lambda nid: abs(netlist[nid][1].x - netlist[nid][0].x) + abs(netlist[nid][1].y - netlist[nid][0].y),
reverse=(order == "longest"),
)
greedy_paths: dict[str, list[ComponentResult]] = {}
temp_obj_ids: list[int] = []
greedy_node_limit = min(self.context.config.node_limit, 2000)
for net_id in all_net_ids:
start, target = netlist[net_id]
width = net_widths.get(net_id, 2.0)
# Heuristic max cost for fail-fast
h_start = self.cost_evaluator.h_manhattan(start, target)
max_cost_limit = max(h_start * 3.0, 2000.0)
max_cost_limit = max(h_start * 3.0, 2000.0)
path = route_astar(
start, target, width, context=self.context, metrics=self.metrics,
net_id=net_id, skip_congestion=True, max_cost=max_cost_limit
start,
target,
width,
context=self.context,
metrics=self.metrics,
net_id=net_id,
skip_congestion=True,
max_cost=max_cost_limit,
self_collision_check=True,
node_limit=greedy_node_limit,
)
if path:
greedy_paths[net_id] = path
# Freeze as static
for res in path:
geoms = res.actual_geometry if res.actual_geometry is not None else res.geometry
for poly in geoms:
obj_id = self.cost_evaluator.collision_engine.add_static_obstacle(poly)
temp_obj_ids.append(obj_id)
# Clean up temporary static obstacles
if not path:
continue
greedy_paths[net_id] = path
for res in path:
geoms = res.actual_geometry if res.actual_geometry is not None else res.geometry
dilated_geoms = res.dilated_actual_geometry if res.dilated_actual_geometry else res.dilated_geometry
for i, poly in enumerate(geoms):
dilated = dilated_geoms[i] if dilated_geoms else None
obj_id = self.cost_evaluator.collision_engine.add_static_obstacle(poly, dilated_geometry=dilated)
temp_obj_ids.append(obj_id)
self.context.clear_static_caches()
for obj_id in temp_obj_ids:
self.cost_evaluator.collision_engine.remove_static_obstacle(obj_id)
logger.info(f"PathFinder: Greedy Warm-Start finished. Seeding {len(greedy_paths)}/{len(netlist)} nets.")
return greedy_paths
def _has_self_collision(self, path: list[ComponentResult]) -> bool:
"""
Quickly check if a path intersects itself.
"""
if not path:
return False
num_components = len(path)
for i in range(num_components):
comp_i = path[i]
for i, comp_i in enumerate(path):
tb_i = comp_i.total_bounds
for j in range(i + 2, num_components): # Skip immediate neighbors
for j in range(i + 2, len(path)):
comp_j = path[j]
tb_j = comp_j.total_bounds
# AABB Check
if (tb_i[0] < tb_j[2] and tb_i[2] > tb_j[0] and
tb_i[1] < tb_j[3] and tb_i[3] > tb_j[1]):
# Real geometry check
if tb_i[0] < tb_j[2] and tb_i[2] > tb_j[0] and tb_i[1] < tb_j[3] and tb_i[3] > tb_j[1]:
for p_i in comp_i.geometry:
for p_j in comp_j.geometry:
if p_i.intersects(p_j) and not p_i.touches(p_j):
return True
return False
def _path_cost(self, path: list[ComponentResult]) -> float:
total = 0.0
bend_penalty = self.context.config.bend_penalty
sbend_penalty = self.context.config.sbend_penalty
for comp in path:
total += comp.length
if comp.move_type == "Bend90":
radius = comp.length * 2.0 / math.pi if comp.length > 0 else 0.0
if radius > 0:
total += bend_penalty * (10.0 / radius) ** 0.5
else:
total += bend_penalty
elif comp.move_type == "SBend":
total += sbend_penalty
return total
def _extract_geometry(self, path: list[ComponentResult]) -> tuple[list[Any], list[Any]]:
all_geoms = []
all_dilated = []
for res in path:
all_geoms.extend(res.geometry)
if res.dilated_geometry:
all_dilated.extend(res.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
all_dilated.extend([p.buffer(dilation) for p in res.geometry])
return all_geoms, all_dilated
def _to_local(self, start: Port, point: Port) -> tuple[int, int]:
dx = point.x - start.x
dy = point.y - start.y
if start.r == 0:
return dx, dy
if start.r == 90:
return dy, -dx
if start.r == 180:
return -dx, -dy
return -dy, dx
def _build_same_orientation_dogleg(
self,
start: Port,
target: Port,
net_width: float,
radius: float,
side_extent: float,
) -> list[ComponentResult] | None:
local_dx, local_dy = self._to_local(start, target)
if abs(local_dy) > 0 or local_dx < 4.0 * radius - 0.01:
return None
side_abs = abs(side_extent)
side_length = side_abs - 2.0 * radius
if side_length < self.context.config.min_straight_length - 0.01:
return None
forward_length = local_dx - 4.0 * radius
if forward_length < -0.01:
return None
first_dir = "CCW" if side_extent > 0 else "CW"
second_dir = "CW" if side_extent > 0 else "CCW"
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
path: list[ComponentResult] = []
curr = start
for direction, straight_len in (
(first_dir, side_length),
(second_dir, forward_length),
(second_dir, side_length),
(first_dir, None),
):
bend = Bend90.generate(curr, radius, net_width, direction, dilation=dilation)
path.append(bend)
curr = bend.end_port
if straight_len is None:
continue
if straight_len > 0.01:
straight = Straight.generate(curr, straight_len, net_width, dilation=dilation)
path.append(straight)
curr = straight.end_port
if curr != target:
return None
return path
def _refine_path(
self,
net_id: str,
start: Port,
target: Port,
net_width: float,
path: list[ComponentResult],
) -> list[ComponentResult]:
if not path or start.r != target.r:
return path
bend_count = sum(1 for comp in path if comp.move_type == "Bend90")
if bend_count < 5:
return path
side_extents = []
local_points = [self._to_local(start, start)]
local_points.extend(self._to_local(start, comp.end_port) for comp in path)
min_side = min(point[1] for point in local_points)
max_side = max(point[1] for point in local_points)
if min_side < -0.01:
side_extents.append(float(min_side))
if max_side > 0.01:
side_extents.append(float(max_side))
if not side_extents:
return path
best_path = path
best_cost = self._path_cost(path)
collision_engine = self.cost_evaluator.collision_engine
for radius in self.context.config.bend_radii:
for side_extent in side_extents:
candidate = self._build_same_orientation_dogleg(start, target, net_width, radius, side_extent)
if candidate is None:
continue
is_valid, collisions = collision_engine.verify_path(net_id, candidate)
if not is_valid or collisions != 0:
continue
candidate_cost = self._path_cost(candidate)
if candidate_cost + 1e-6 < best_cost:
best_cost = candidate_cost
best_path = candidate
return best_path
def route_all(
self,
netlist: dict[str, tuple[Port, Port]],
net_widths: dict[str, float],
store_expanded: bool = False,
iteration_callback: Callable[[int, dict[str, RoutingResult]], None] | None = None,
shuffle_nets: bool = False,
sort_nets: Literal['shortest', 'longest', 'user', None] = None,
initial_paths: dict[str, list[ComponentResult]] | None = None,
seed: int | None = None,
) -> dict[str, RoutingResult]:
"""
Route all nets in the netlist using Negotiated Congestion.
Args:
netlist: Mapping of net_id to (start_port, target_port).
net_widths: Mapping of net_id to waveguide width.
store_expanded: Whether to store expanded nodes for ALL iterations and nets.
iteration_callback: Optional callback(iteration_idx, current_results).
shuffle_nets: Whether to randomize the order of nets each iteration.
sort_nets: Heuristic sorting for the initial iteration order (overrides self.warm_start).
initial_paths: Pre-computed paths to use for Iteration 0 (overrides warm_start).
seed: Optional seed for randomization (enables reproducibility).
Returns:
Mapping of net_id to RoutingResult.
"""
self,
netlist: dict[str, tuple[Port, Port]],
net_widths: dict[str, float],
store_expanded: bool = False,
iteration_callback: Callable[[int, dict[str, RoutingResult]], None] | None = None,
shuffle_nets: bool = False,
sort_nets: Literal["shortest", "longest", "user", None] = None,
initial_paths: dict[str, list[ComponentResult]] | None = None,
seed: int | None = None,
) -> dict[str, RoutingResult]:
results: dict[str, RoutingResult] = {}
self.cost_evaluator.congestion_penalty = self.base_congestion_penalty
self.accumulated_expanded_nodes = []
@ -212,63 +282,44 @@ class PathFinder:
start_time = time.monotonic()
num_nets = len(netlist)
session_timeout = max(60.0, 10.0 * num_nets * self.max_iterations)
all_net_ids = list(netlist.keys())
needs_sc = set() # Nets requiring self-collision avoidance
# Determine initial paths (Warm Start)
if initial_paths is None:
ws_order = sort_nets if sort_nets is not None else self.warm_start
if ws_order is not None:
initial_paths = self._perform_greedy_pass(netlist, net_widths, ws_order)
self.context.clear_static_caches()
needs_sc: set[str] = set()
# Apply initial sorting heuristic if requested (for the main NC loop)
if sort_nets:
def get_dist(nid):
s, t = netlist[nid]
return abs(t.x - s.x) + abs(t.y - s.y)
if sort_nets != 'user':
all_net_ids.sort(key=get_dist, reverse=(sort_nets == 'longest'))
if initial_paths is None:
ws_order = sort_nets if sort_nets is not None else self.warm_start
if ws_order is not None:
initial_paths = self._perform_greedy_pass(netlist, net_widths, ws_order)
self.context.clear_static_caches()
if sort_nets and sort_nets != "user":
all_net_ids.sort(
key=lambda nid: abs(netlist[nid][1].x - netlist[nid][0].x) + abs(netlist[nid][1].y - netlist[nid][0].y),
reverse=(sort_nets == "longest"),
)
for iteration in range(self.max_iterations):
any_congestion = False
# Clear accumulation for this iteration so callback gets fresh data
self.accumulated_expanded_nodes = []
self.metrics.reset_per_route()
logger.info(f'PathFinder Iteration {iteration}...')
# 0. Shuffle nets if requested
if shuffle_nets:
# Use a new seed based on iteration for deterministic different orders
if shuffle_nets and (iteration > 0 or initial_paths is None):
it_seed = (seed + iteration) if seed is not None else None
random.Random(it_seed).shuffle(all_net_ids)
# Sequence through nets
for net_id in all_net_ids:
start, target = netlist[net_id]
# Timeout check
elapsed = time.monotonic() - start_time
if elapsed > session_timeout:
logger.warning(f'PathFinder TIMEOUT after {elapsed:.2f}s')
return self._finalize_results(results, netlist)
if time.monotonic() - start_time > session_timeout:
self.cost_evaluator.collision_engine.dynamic_tree = None
self.cost_evaluator.collision_engine._ensure_dynamic_tree()
return self.verify_all_nets(results, netlist)
width = net_widths.get(net_id, 2.0)
# 1. Rip-up existing path
self.cost_evaluator.collision_engine.remove_path(net_id)
path: list[ComponentResult] | None = None
# 2. Reroute or Use Initial Path
path = None
# Warm Start Logic: Use provided path for Iteration 0
if iteration == 0 and initial_paths and net_id in initial_paths:
path = initial_paths[net_id]
logger.debug(f' Net {net_id} used Warm Start path.')
else:
# Standard Routing Logic
target_coll_model = self.context.config.bend_collision_type
coll_model = target_coll_model
skip_cong = False
@ -276,165 +327,103 @@ class PathFinder:
skip_cong = True
if target_coll_model == "arc":
coll_model = "clipped_bbox"
base_node_limit = self.context.config.node_limit
current_node_limit = base_node_limit
net_start = time.monotonic()
path = route_astar(
start, target, width, context=self.context, metrics=self.metrics,
net_id=net_id, bend_collision_type=coll_model, return_partial=True,
store_expanded=store_expanded, skip_congestion=skip_cong,
start,
target,
width,
context=self.context,
metrics=self.metrics,
net_id=net_id,
bend_collision_type=coll_model,
return_partial=True,
store_expanded=store_expanded,
skip_congestion=skip_cong,
self_collision_check=(net_id in needs_sc),
node_limit=current_node_limit
node_limit=self.context.config.node_limit,
)
if store_expanded and self.metrics.last_expanded_nodes:
self.accumulated_expanded_nodes.extend(self.metrics.last_expanded_nodes)
logger.debug(f' Net {net_id} routed in {time.monotonic() - net_start:.4f}s using {coll_model}')
if path:
# Check if reached exactly (relative to snapped target)
last_p = path[-1].end_port
snap = self.context.config.snap_size
from inire.geometry.components import snap_search_grid
reached = (abs(last_p.x - snap_search_grid(target.x, snap)) < TOLERANCE_LINEAR and
abs(last_p.y - snap_search_grid(target.y, snap)) < TOLERANCE_LINEAR and
abs(last_p.orientation - target.orientation) < 0.1)
# Check for self-collision if not already handled by router
if reached and net_id not in needs_sc:
if self._has_self_collision(path):
logger.info(f' Net {net_id} detected self-collision. Enabling protection for next iteration.')
needs_sc.add(net_id)
any_congestion = True
# 3. Add to index (even if partial) so other nets negotiate around it
all_geoms = []
all_dilated = []
for res in path:
all_geoms.extend(res.geometry)
if res.dilated_geometry:
all_dilated.extend(res.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
all_dilated.extend([p.buffer(dilation) for p in res.geometry])
self.cost_evaluator.collision_engine.add_path(net_id, all_geoms, dilated_geometry=all_dilated)
# Check if this new path has any congestion
collision_count = 0
if reached:
verif_geoms = []
verif_dilated = []
for res in path:
is_proxy = (res.actual_geometry is not None)
g = res.actual_geometry if is_proxy else res.geometry
verif_geoms.extend(g)
if is_proxy:
if res.dilated_actual_geometry:
verif_dilated.extend(res.dilated_actual_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
else:
if res.dilated_geometry:
verif_dilated.extend(res.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
self.cost_evaluator.collision_engine._ensure_dynamic_tree()
if self.cost_evaluator.collision_engine.dynamic_tree:
# Vectorized query for all polygons in the path
res_indices, tree_indices = self.cost_evaluator.collision_engine.dynamic_tree.query(verif_dilated, predicate='intersects')
for hit_idx in tree_indices:
obj_id = self.cost_evaluator.collision_engine.dynamic_obj_ids[hit_idx]
other_net_id, _ = self.cost_evaluator.collision_engine.dynamic_geometries[obj_id]
if other_net_id != net_id:
collision_count += 1
if collision_count > 0:
any_congestion = True
logger.debug(f' Net {net_id}: reached={reached}, collisions={collision_count}')
results[net_id] = RoutingResult(net_id, path, (collision_count == 0 and reached), collision_count, reached_target=reached)
else:
if not path:
results[net_id] = RoutingResult(net_id, [], False, 0, reached_target=False)
any_congestion = True # Total failure might need a retry with different ordering
any_congestion = True
continue
last_p = path[-1].end_port
reached = last_p == target
if reached and net_id not in needs_sc and self._has_self_collision(path):
needs_sc.add(net_id)
any_congestion = True
all_geoms = []
all_dilated = []
for res in path:
all_geoms.extend(res.geometry)
if res.dilated_geometry:
all_dilated.extend(res.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
all_dilated.extend([p.buffer(dilation) for p in res.geometry])
self.cost_evaluator.collision_engine.add_path(net_id, all_geoms, dilated_geometry=all_dilated)
collision_count = 0
if reached:
is_valid, collision_count = self.cost_evaluator.collision_engine.verify_path(net_id, path)
any_congestion = any_congestion or not is_valid
results[net_id] = RoutingResult(net_id, path, reached and collision_count == 0, collision_count, reached_target=reached)
if iteration_callback:
iteration_callback(iteration, results)
if not any_congestion:
break
self.cost_evaluator.congestion_penalty *= self.congestion_multiplier
return self._finalize_results(results, netlist)
if self.refine_paths and results:
for net_id in all_net_ids:
res = results.get(net_id)
if not res or not res.path or not res.reached_target or not res.is_valid:
continue
start, target = netlist[net_id]
width = net_widths.get(net_id, 2.0)
self.cost_evaluator.collision_engine.remove_path(net_id)
refined_path = self._refine_path(net_id, start, target, width, res.path)
all_geoms, all_dilated = self._extract_geometry(refined_path)
self.cost_evaluator.collision_engine.add_path(net_id, all_geoms, dilated_geometry=all_dilated)
results[net_id] = RoutingResult(
net_id=net_id,
path=refined_path,
is_valid=res.is_valid,
collisions=res.collisions,
reached_target=res.reached_target,
)
def _finalize_results(
self,
results: dict[str, RoutingResult],
netlist: dict[str, tuple[Port, Port]],
) -> dict[str, RoutingResult]:
"""
Final check: re-verify all nets against the final static paths.
"""
logger.debug(f'Finalizing results for nets: {list(results.keys())}')
final_results = {}
for net_id in netlist:
self.cost_evaluator.collision_engine.dynamic_tree = None
self.cost_evaluator.collision_engine._ensure_dynamic_tree()
return self.verify_all_nets(results, netlist)
def verify_all_nets(
self,
results: dict[str, RoutingResult],
netlist: dict[str, tuple[Port, Port]],
) -> dict[str, RoutingResult]:
final_results: dict[str, RoutingResult] = {}
for net_id, (_, target_p) in netlist.items():
res = results.get(net_id)
if not res or not res.path:
final_results[net_id] = RoutingResult(net_id, [], False, 0)
continue
if not res.reached_target:
# Skip re-verification for partial paths to avoid massive performance hit
final_results[net_id] = res
continue
collision_count = 0
verif_geoms = []
verif_dilated = []
for comp in res.path:
is_proxy = (comp.actual_geometry is not None)
g = comp.actual_geometry if is_proxy else comp.geometry
verif_geoms.extend(g)
if is_proxy:
if comp.dilated_actual_geometry:
verif_dilated.extend(comp.dilated_actual_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
else:
if comp.dilated_geometry:
verif_dilated.extend(comp.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
self.cost_evaluator.collision_engine._ensure_dynamic_tree()
if self.cost_evaluator.collision_engine.dynamic_tree:
# Vectorized query
res_indices, tree_indices = self.cost_evaluator.collision_engine.dynamic_tree.query(verif_dilated, predicate='intersects')
for hit_idx in tree_indices:
obj_id = self.cost_evaluator.collision_engine.dynamic_obj_ids[hit_idx]
other_net_id, _ = self.cost_evaluator.collision_engine.dynamic_geometries[obj_id]
if other_net_id != net_id:
collision_count += 1
target_p = netlist[net_id][1]
last_p = res.path[-1].end_port
snap = self.context.config.snap_size
from inire.geometry.components import snap_search_grid
reached = (abs(last_p.x - snap_search_grid(target_p.x, snap)) < TOLERANCE_LINEAR and
abs(last_p.y - snap_search_grid(target_p.y, snap)) < TOLERANCE_LINEAR and
abs(last_p.orientation - target_p.orientation) < 0.1)
final_results[net_id] = RoutingResult(net_id, res.path, (collision_count == 0 and reached), collision_count, reached_target=reached)
reached = last_p == target_p
is_valid, collisions = self.cost_evaluator.collision_engine.verify_path(net_id, res.path)
final_results[net_id] = RoutingResult(
net_id=net_id,
path=res.path,
is_valid=(is_valid and reached),
collisions=collisions,
reached_target=reached,
)
return final_results

View file

@ -16,7 +16,7 @@ class VisibilityManager:
"""
Manages corners of static obstacles for sparse A* / Visibility Graph jumps.
"""
__slots__ = ('collision_engine', 'corners', 'corner_index', '_corner_graph', '_static_visibility_cache')
__slots__ = ('collision_engine', 'corners', 'corner_index', '_corner_graph', '_static_visibility_cache', '_built_static_version')
def __init__(self, collision_engine: CollisionEngine) -> None:
self.collision_engine = collision_engine
@ -24,12 +24,28 @@ class VisibilityManager:
self.corner_index = rtree.index.Index()
self._corner_graph: dict[int, list[tuple[float, float, float]]] = {}
self._static_visibility_cache: dict[tuple[int, int], list[tuple[float, float, float]]] = {}
self._built_static_version = -1
self._build()
def clear_cache(self) -> None:
"""
Reset all static visibility data.
"""
self.corners = []
self.corner_index = rtree.index.Index()
self._corner_graph = {}
self._static_visibility_cache = {}
self._build()
def _ensure_current(self) -> None:
if self._built_static_version != self.collision_engine._static_version:
self.clear_cache()
def _build(self) -> None:
"""
Extract corners and pre-compute corner-to-corner visibility.
"""
self._built_static_version = self.collision_engine._static_version
raw_corners = []
for obj_id, poly in self.collision_engine.static_dilated.items():
coords = list(poly.exterior.coords)
@ -45,7 +61,7 @@ class VisibilityManager:
if not raw_corners:
return
# Deduplicate and snap to 1nm
# Deduplicate repeated corner coordinates
seen = set()
for x, y in raw_corners:
sx, sy = round(x, 3), round(y, 3)
@ -81,6 +97,7 @@ class VisibilityManager:
Find all corners visible from the origin.
Returns list of (x, y, distance).
"""
self._ensure_current()
if max_dist < 0:
return []
@ -123,3 +140,20 @@ class VisibilityManager:
self._static_visibility_cache[cache_key] = visible
return visible
def get_corner_visibility(self, origin: Port, max_dist: float = 1000.0) -> list[tuple[float, float, float]]:
"""
Return precomputed visibility only when the origin is already at a known corner.
This avoids the expensive arbitrary-point visibility scan in hot search paths.
"""
self._ensure_current()
if max_dist < 0:
return []
ox, oy = round(origin.x, 3), round(origin.y, 3)
nearby = list(self.corner_index.intersection((ox - 0.001, oy - 0.001, ox + 0.001, oy + 0.001)))
for idx in nearby:
cx, cy = self.corners[idx]
if abs(cx - ox) < 1e-4 and abs(cy - oy) < 1e-4 and idx in self._corner_graph:
return [corner for corner in self._corner_graph[idx] if corner[2] <= max_dist]
return []

View file

@ -0,0 +1,311 @@
from __future__ import annotations
from dataclasses import dataclass
from time import perf_counter
from typing import Callable
from shapely.geometry import Polygon, box
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.router.astar import AStarContext, AStarMetrics
from inire.router.cost import CostEvaluator
from inire.router.danger_map import DangerMap
from inire.router.pathfinder import PathFinder, RoutingResult
@dataclass(frozen=True)
class ScenarioOutcome:
duration_s: float
total_results: int
valid_results: int
reached_targets: int
@dataclass(frozen=True)
class ScenarioDefinition:
name: str
run: Callable[[], ScenarioOutcome]
def _build_router(
*,
bounds: tuple[float, float, float, float],
clearance: float = 2.0,
obstacles: list[Polygon] | None = None,
evaluator_kwargs: dict[str, float] | None = None,
context_kwargs: dict[str, object] | None = None,
pathfinder_kwargs: dict[str, object] | None = None,
) -> tuple[CollisionEngine, CostEvaluator, AStarContext, AStarMetrics, PathFinder]:
static_obstacles = obstacles or []
engine = CollisionEngine(clearance=clearance)
for obstacle in static_obstacles:
engine.add_static_obstacle(obstacle)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute(static_obstacles)
evaluator = CostEvaluator(engine, danger_map, **(evaluator_kwargs or {}))
context = AStarContext(evaluator, **(context_kwargs or {}))
metrics = AStarMetrics()
pathfinder = PathFinder(context, metrics, **(pathfinder_kwargs or {}))
return engine, evaluator, context, metrics, pathfinder
def _summarize(results: dict[str, RoutingResult], duration_s: float) -> ScenarioOutcome:
return ScenarioOutcome(
duration_s=duration_s,
total_results=len(results),
valid_results=sum(1 for result in results.values() if result.is_valid),
reached_targets=sum(1 for result in results.values() if result.reached_target),
)
def run_example_01() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(bounds=(0, 0, 100, 100), context_kwargs={"bend_radii": [10.0]})
netlist = {"net1": (Port(10, 50, 0), Port(90, 50, 0))}
t0 = perf_counter()
results = pathfinder.route_all(netlist, {"net1": 2.0})
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_02() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 100, 100),
evaluator_kwargs={
"greedy_h_weight": 1.5,
"bend_penalty": 50.0,
"sbend_penalty": 150.0,
},
context_kwargs={
"bend_radii": [10.0],
"sbend_radii": [10.0],
},
pathfinder_kwargs={"base_congestion_penalty": 1000.0},
)
netlist = {
"horizontal": (Port(10, 50, 0), Port(90, 50, 0)),
"vertical_up": (Port(45, 10, 90), Port(45, 90, 90)),
"vertical_down": (Port(55, 90, 270), Port(55, 10, 270)),
}
widths = {net_id: 2.0 for net_id in netlist}
t0 = perf_counter()
results = pathfinder.route_all(netlist, widths)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_03() -> ScenarioOutcome:
engine, _, _, _, pathfinder = _build_router(bounds=(0, -50, 100, 50), context_kwargs={"bend_radii": [10.0]})
t0 = perf_counter()
results_a = pathfinder.route_all({"netA": (Port(10, 0, 0), Port(90, 0, 0))}, {"netA": 2.0})
engine.lock_net("netA")
results_b = pathfinder.route_all({"netB": (Port(50, -20, 90), Port(50, 20, 90))}, {"netB": 2.0})
t1 = perf_counter()
return _summarize({**results_a, **results_b}, t1 - t0)
def run_example_04() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 100, 100),
evaluator_kwargs={
"unit_length_cost": 1.0,
"bend_penalty": 10.0,
"sbend_penalty": 20.0,
},
context_kwargs={
"node_limit": 50000,
"bend_radii": [10.0, 30.0],
"sbend_offsets": [5.0],
"bend_penalty": 10.0,
"sbend_penalty": 20.0,
},
)
netlist = {
"sbend_only": (Port(10, 50, 0), Port(60, 55, 0)),
"multi_radii": (Port(10, 10, 0), Port(90, 90, 0)),
}
widths = {"sbend_only": 2.0, "multi_radii": 2.0}
t0 = perf_counter()
results = pathfinder.route_all(netlist, widths)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_05() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 200, 200),
evaluator_kwargs={"bend_penalty": 50.0},
context_kwargs={"bend_radii": [20.0]},
)
netlist = {
"u_turn": (Port(50, 50, 0), Port(50, 70, 180)),
"loop": (Port(100, 100, 90), Port(100, 80, 270)),
"zig_zag": (Port(20, 150, 0), Port(180, 150, 0)),
}
widths = {net_id: 2.0 for net_id in netlist}
t0 = perf_counter()
results = pathfinder.route_all(netlist, widths)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_06() -> ScenarioOutcome:
bounds = (-20, -20, 170, 170)
obstacles = [
box(40, 110, 60, 130),
box(40, 60, 60, 80),
box(40, 10, 60, 30),
]
engine = CollisionEngine(clearance=2.0)
for obstacle in obstacles:
engine.add_static_obstacle(obstacle)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute(obstacles)
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
contexts = [
AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="arc"),
AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="bbox"),
AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="clipped_bbox", bend_clip_margin=1.0),
]
netlists = [
{"arc_model": (Port(10, 120, 0), Port(90, 140, 90))},
{"bbox_model": (Port(10, 70, 0), Port(90, 90, 90))},
{"clipped_model": (Port(10, 20, 0), Port(90, 40, 90))},
]
widths = [
{"arc_model": 2.0},
{"bbox_model": 2.0},
{"clipped_model": 2.0},
]
t0 = perf_counter()
combined_results: dict[str, RoutingResult] = {}
for context, netlist, net_widths in zip(contexts, netlists, widths, strict=True):
pathfinder = PathFinder(context, use_tiered_strategy=False)
combined_results.update(pathfinder.route_all(netlist, net_widths))
t1 = perf_counter()
return _summarize(combined_results, t1 - t0)
def run_example_07() -> ScenarioOutcome:
bounds = (0, 0, 1000, 1000)
obstacles = [
box(450, 0, 550, 400),
box(450, 600, 550, 1000),
]
_, evaluator, _, metrics, pathfinder = _build_router(
bounds=bounds,
clearance=6.0,
obstacles=obstacles,
evaluator_kwargs={
"greedy_h_weight": 1.5,
"unit_length_cost": 0.1,
"bend_penalty": 100.0,
"sbend_penalty": 400.0,
"congestion_penalty": 100.0,
},
context_kwargs={
"node_limit": 2000000,
"bend_radii": [50.0],
"sbend_radii": [50.0],
},
pathfinder_kwargs={
"max_iterations": 15,
"base_congestion_penalty": 100.0,
"congestion_multiplier": 1.4,
},
)
num_nets = 10
start_x = 50
start_y_base = 500 - (num_nets * 10.0) / 2.0
end_x = 950
end_y_base = 100
end_y_pitch = 800.0 / (num_nets - 1)
netlist = {}
for index in range(num_nets):
sy = int(round(start_y_base + index * 10.0))
ey = int(round(end_y_base + index * end_y_pitch))
netlist[f"net_{index:02d}"] = (Port(start_x, sy, 0), Port(end_x, ey, 0))
def iteration_callback(idx: int, current_results: dict[str, RoutingResult]) -> None:
new_greedy = max(1.1, 1.5 - ((idx + 1) / 10.0) * 0.4)
evaluator.greedy_h_weight = new_greedy
metrics.reset_per_route()
t0 = perf_counter()
results = pathfinder.route_all(
netlist,
dict.fromkeys(netlist, 2.0),
store_expanded=True,
iteration_callback=iteration_callback,
shuffle_nets=True,
seed=42,
)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_08() -> ScenarioOutcome:
bounds = (0, 0, 150, 150)
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
metrics = AStarMetrics()
netlist = {"custom_bend": (Port(20, 20, 0), Port(100, 100, 90))}
widths = {"custom_bend": 2.0}
context_std = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[])
context_custom = AStarContext(
evaluator,
bend_radii=[10.0],
bend_collision_type=Polygon([(-10, -10), (10, -10), (10, 10), (-10, 10)]),
sbend_radii=[],
)
t0 = perf_counter()
results_std = PathFinder(context_std, metrics).route_all(netlist, widths)
results_custom = PathFinder(context_custom, AStarMetrics(), use_tiered_strategy=False).route_all(
{"custom_model": netlist["custom_bend"]},
{"custom_model": 2.0},
)
t1 = perf_counter()
return _summarize({**results_std, **results_custom}, t1 - t0)
def run_example_09() -> ScenarioOutcome:
obstacles = [
box(35, 35, 45, 65),
box(55, 35, 65, 65),
]
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 100, 100),
obstacles=obstacles,
evaluator_kwargs={"bend_penalty": 50.0, "sbend_penalty": 150.0},
context_kwargs={"node_limit": 3, "bend_radii": [10.0]},
pathfinder_kwargs={"warm_start": None},
)
netlist = {"budget_limited_net": (Port(10, 50, 0), Port(85, 60, 180))}
t0 = perf_counter()
results = pathfinder.route_all(netlist, {"budget_limited_net": 2.0})
t1 = perf_counter()
return _summarize(results, t1 - t0)
SCENARIOS: tuple[ScenarioDefinition, ...] = (
ScenarioDefinition("example_01_simple_route", run_example_01),
ScenarioDefinition("example_02_congestion_resolution", run_example_02),
ScenarioDefinition("example_03_locked_paths", run_example_03),
ScenarioDefinition("example_04_sbends_and_radii", run_example_04),
ScenarioDefinition("example_05_orientation_stress", run_example_05),
ScenarioDefinition("example_06_bend_collision_models", run_example_06),
ScenarioDefinition("example_07_large_scale_routing", run_example_07),
ScenarioDefinition("example_08_custom_bend_geometry", run_example_08),
ScenarioDefinition("example_09_unroutable_best_effort", run_example_09),
)

View file

@ -1,6 +1,8 @@
import pytest
from shapely.geometry import Polygon
import inire.router.astar as astar_module
from inire.geometry.components import SBend, Straight
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.router.astar import AStarContext, route_astar
@ -19,7 +21,7 @@ def basic_evaluator() -> CostEvaluator:
def test_astar_straight(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, snap_size=1.0)
context = AStarContext(basic_evaluator)
start = Port(0, 0, 0)
target = Port(50, 0, 0)
path = route_astar(start, target, net_width=2.0, context=context)
@ -35,7 +37,7 @@ def test_astar_straight(basic_evaluator: CostEvaluator) -> None:
def test_astar_bend(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, snap_size=1.0, bend_radii=[10.0])
context = AStarContext(basic_evaluator, bend_radii=[10.0])
start = Port(0, 0, 0)
# 20um right, 20um up. Needs a 10um bend and a 10um bend.
target = Port(20, 20, 0)
@ -56,7 +58,7 @@ def test_astar_obstacle(basic_evaluator: CostEvaluator) -> None:
basic_evaluator.collision_engine.add_static_obstacle(obstacle)
basic_evaluator.danger_map.precompute([obstacle])
context = AStarContext(basic_evaluator, snap_size=1.0, bend_radii=[10.0], node_limit=1000000)
context = AStarContext(basic_evaluator, bend_radii=[10.0], node_limit=1000000)
start = Port(0, 0, 0)
target = Port(60, 0, 0)
path = route_astar(start, target, net_width=2.0, context=context)
@ -70,20 +72,218 @@ def test_astar_obstacle(basic_evaluator: CostEvaluator) -> None:
assert validation["total_length"] > 50.0
def test_astar_snap_to_target_lookahead(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, snap_size=1.0)
# Target is NOT on 1um grid
def test_astar_uses_integerized_ports(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator)
start = Port(0, 0, 0)
target = Port(10.1, 0, 0)
path = route_astar(start, target, net_width=2.0, context=context)
assert path is not None
result = RoutingResult(net_id="test", path=path, is_valid=True, collisions=0)
# Under the new Enforce Grid policy, the router snaps the target internally to 10.0.
# We validate against the snapped target.
from inire.geometry.components import snap_search_grid
target_snapped = Port(snap_search_grid(target.x, 1.0), snap_search_grid(target.y, 1.0), target.orientation, snap=False)
validation = validate_routing_result(result, [], clearance=2.0, expected_start=start, expected_end=target_snapped)
assert target.x == 10
validation = validate_routing_result(result, [], clearance=2.0, expected_start=start, expected_end=target)
assert validation["is_valid"], f"Validation failed: {validation.get('reason')}"
def test_expand_moves_only_shortens_consecutive_straights(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(basic_evaluator, min_straight_length=5.0, max_straight_length=100.0)
prev_result = Straight.generate(Port(0, 0, 0), 20.0, width=2.0, dilation=1.0)
current = astar_module.AStarNode(
prev_result.end_port,
g_cost=prev_result.length,
h_cost=0.0,
component_result=prev_result,
)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(80, 0, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = [params[0] for move_class, params in emitted if move_class == "S"]
assert straight_lengths
assert all(length < prev_result.length for length in straight_lengths)
def test_expand_moves_does_not_chain_sbends(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(basic_evaluator, sbend_radii=[10.0], sbend_offsets=[5.0], max_straight_length=100.0)
prev_result = SBend.generate(Port(0, 0, 0), 5.0, 10.0, width=2.0, dilation=1.0)
current = astar_module.AStarNode(
prev_result.end_port,
g_cost=prev_result.length,
h_cost=0.0,
component_result=prev_result,
)
emitted: list[str] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append(args[9])
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(60, 10, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
assert "SB" not in emitted
assert emitted
def test_expand_moves_adds_sbend_aligned_straight_stop_points(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(
basic_evaluator,
bend_radii=[10.0],
sbend_radii=[10.0],
max_straight_length=150.0,
)
current = astar_module.AStarNode(Port(0, 0, 0), g_cost=0.0, h_cost=0.0)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(100, 10, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = {params[0] for move_class, params in emitted if move_class == "S"}
sbend_span = astar_module._sbend_forward_span(10.0, 10.0)
assert sbend_span is not None
assert int(round(100.0 - sbend_span)) in straight_lengths
assert int(round(100.0 - 2.0 * sbend_span)) in straight_lengths
def test_expand_moves_adds_exact_corner_visibility_stop_points(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(
basic_evaluator,
bend_radii=[10.0],
max_straight_length=150.0,
visibility_guidance="exact_corner",
)
current = astar_module.AStarNode(Port(0, 0, 0), g_cost=0.0, h_cost=0.0)
monkeypatch.setattr(
astar_module.VisibilityManager,
"get_corner_visibility",
lambda self, origin, max_dist=0.0: [(40.0, 10.0, 41.23), (75.0, -15.0, 76.48)],
)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(120, 20, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = {params[0] for move_class, params in emitted if move_class == "S"}
assert 40 in straight_lengths
assert 75 in straight_lengths
def test_expand_moves_adds_tangent_corner_visibility_stop_points(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
class DummyCornerIndex:
def intersection(self, bounds: tuple[float, float, float, float]) -> list[int]:
return [0, 1]
context = AStarContext(
basic_evaluator,
bend_radii=[10.0],
sbend_radii=[],
max_straight_length=150.0,
visibility_guidance="tangent_corner",
)
current = astar_module.AStarNode(Port(0, 0, 0), g_cost=0.0, h_cost=0.0)
monkeypatch.setattr(astar_module.VisibilityManager, "_ensure_current", lambda self: None)
context.visibility_manager.corners = [(50.0, 10.0), (80.0, -10.0)]
context.visibility_manager.corner_index = DummyCornerIndex()
monkeypatch.setattr(
type(context.cost_evaluator.collision_engine),
"ray_cast",
lambda self, origin, angle_deg, max_dist=2000.0, net_width=None: max_dist,
)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(120, 20, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = {params[0] for move_class, params in emitted if move_class == "S"}
assert 40 in straight_lengths
assert 70 in straight_lengths

View file

@ -0,0 +1,92 @@
import pytest
import numpy
from shapely.geometry import Polygon
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.geometry.components import Straight
from inire.router.cost import CostEvaluator
from inire.router.danger_map import DangerMap
from inire.router.astar import AStarContext
from inire.router.pathfinder import PathFinder, RoutingResult
def test_clearance_thresholds():
"""
Check that clearance is correctly calculated:
two paths slightly beyond, exactly at, and slightly violating.
"""
# Clearance = 2.0, Width = 2.0
# Required Centerline-to-Centerline = (2+2)/2 + 2.0 = 4.0
ce = CollisionEngine(clearance=2.0)
# Net 1: Centerline at y=0
p1 = Port(0, 0, 0)
res1 = Straight.generate(p1, 50.0, width=2.0, dilation=1.0)
ce.add_path("net1", res1.geometry, dilated_geometry=res1.dilated_geometry)
# Net 2: Parallel to Net 1
# 1. Beyond minimum spacing: y=5. Gap = 5 - 2 = 3 > 2. OK.
p2_ok = Port(0, 5, 0)
res2_ok = Straight.generate(p2_ok, 50.0, width=2.0, dilation=1.0)
is_v, count = ce.verify_path("net2", [res2_ok])
assert is_v, f"Gap 3 should be valid, but got {count} collisions"
# 2. Exactly at: y=4.0. Gap = 4.0 - 2.0 = 2.0. OK.
p2_exact = Port(0, 4, 0)
res2_exact = Straight.generate(p2_exact, 50.0, width=2.0, dilation=1.0)
is_v, count = ce.verify_path("net2", [res2_exact])
assert is_v, f"Gap exactly 2.0 should be valid, but got {count} collisions"
# 3. Slightly violating: y=3.999. Gap = 3.999 - 2.0 = 1.999 < 2.0. FAIL.
p2_fail = Port(0, 3, 0)
res2_fail = Straight.generate(p2_fail, 50.0, width=2.0, dilation=1.0)
is_v, count = ce.verify_path("net2", [res2_fail])
assert not is_v, "Gap 1.999 should be invalid"
assert count > 0
def test_verify_all_nets_cases():
"""
Validate that verify_all_nets catches some common cases and doesn't flag reasonable non-failing cases.
"""
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=(0, 0, 100, 100))
danger_map.precompute([])
evaluator = CostEvaluator(collision_engine=engine, danger_map=danger_map)
context = AStarContext(cost_evaluator=evaluator)
pf = PathFinder(context, warm_start=None, max_iterations=1)
# Case 1: Parallel paths exactly at clearance (Should be VALID)
netlist_parallel_ok = {
"net1": (Port(0, 50, 0), Port(100, 50, 0)),
"net2": (Port(0, 54, 0), Port(100, 54, 0)),
}
net_widths = {"net1": 2.0, "net2": 2.0}
results = pf.route_all(netlist_parallel_ok, net_widths)
assert results["net1"].is_valid, f"Exactly at clearance should be valid, collisions={results['net1'].collisions}"
assert results["net2"].is_valid
# Case 2: Parallel paths slightly within clearance (Should be INVALID)
netlist_parallel_fail = {
"net3": (Port(0, 20, 0), Port(100, 20, 0)),
"net4": (Port(0, 23, 0), Port(100, 23, 0)),
}
# Reset engine
engine.remove_path("net1")
engine.remove_path("net2")
results_p = pf.route_all(netlist_parallel_fail, net_widths)
# verify_all_nets should flag both as invalid because they cross-collide
assert not results_p["net3"].is_valid
assert not results_p["net4"].is_valid
# Case 3: Crossing paths (Should be INVALID)
netlist_cross = {
"net5": (Port(0, 75, 0), Port(100, 75, 0)),
"net6": (Port(50, 0, 90), Port(50, 100, 90)),
}
engine.remove_path("net3")
engine.remove_path("net4")
results_c = pf.route_all(netlist_cross, net_widths)
assert not results_c["net5"].is_valid
assert not results_c["net6"].is_valid

View file

@ -2,6 +2,7 @@ from shapely.geometry import Polygon
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.geometry.components import Straight
def test_collision_detection() -> None:
@ -38,7 +39,7 @@ def test_safety_zone() -> None:
engine.add_static_obstacle(obstacle)
# Port exactly on the boundary
start_port = Port(10.0, 12.0, 0)
start_port = Port(10, 12, 0)
# Move starting from this port that overlaps the obstacle by 1nm
# (Inside the 2nm safety zone)
@ -59,3 +60,46 @@ def test_configurable_max_net_width() -> None:
# Dilated test_poly bounds: (14, 19, 17, 26).
# obstacle: (20, 20, 25, 25). No physical collision.
assert not engine.is_collision(test_poly, net_width=2.0)
def test_ray_cast_width_clearance() -> None:
# Clearance = 2.0um, Width = 2.0um.
# Centerline to obstacle edge must be >= W/2 + C = 1.0 + 2.0 = 3.0um.
engine = CollisionEngine(clearance=2.0)
# Obstacle at x=10 to 20
obstacle = Polygon([(10, 0), (20, 0), (20, 100), (10, 100)])
engine.add_static_obstacle(obstacle)
# 1. Parallel move at x=6. Gap = 10 - 6 = 4.0. Clearly OK.
start_ok = Port(6, 50, 90)
reach_ok = engine.ray_cast(start_ok, 90, max_dist=10.0, net_width=2.0)
assert reach_ok >= 10.0
# 2. Parallel move at x=8. Gap = 10 - 8 = 2.0. COLLISION.
start_fail = Port(8, 50, 90)
reach_fail = engine.ray_cast(start_fail, 90, max_dist=10.0, net_width=2.0)
assert reach_fail < 10.0
def test_check_move_static_clearance() -> None:
engine = CollisionEngine(clearance=2.0)
obstacle = Polygon([(10, 0), (20, 0), (20, 100), (10, 100)])
engine.add_static_obstacle(obstacle)
# Straight move of length 10 at x=8 (Width 2.0)
# Gap = 10 - 8 = 2.0 < 3.0. COLLISION.
start = Port(8, 0, 90)
res = Straight.generate(start, 10.0, width=2.0, dilation=1.0) # dilation = C/2
assert engine.check_move_static(res, start_port=start, net_width=2.0)
# Move at x=7. Gap = 3.0 == minimum. OK.
start_ok = Port(7, 0, 90)
res_ok = Straight.generate(start_ok, 10.0, width=2.0, dilation=1.0)
assert not engine.check_move_static(res_ok, start_port=start_ok, net_width=2.0)
# 3. Same exact-boundary case.
start_exact = Port(7, 0, 90)
res_exact = Straight.generate(start_exact, 10.0, width=2.0, dilation=1.0)
assert not engine.check_move_static(res_exact, start_port=start_exact, net_width=2.0)

View file

@ -8,7 +8,7 @@ def test_straight_generation() -> None:
start = Port(0, 0, 0)
length = 10.0
width = 2.0
result = Straight.generate(start, length, width, snap_size=1.0)
result = Straight.generate(start, length, width)
assert result.end_port.x == 10.0
assert result.end_port.y == 0.0
@ -29,13 +29,13 @@ def test_bend90_generation() -> None:
width = 2.0
# CW bend
result_cw = Bend90.generate(start, radius, width, direction="CW", snap_size=1.0)
result_cw = Bend90.generate(start, radius, width, direction="CW")
assert result_cw.end_port.x == 10.0
assert result_cw.end_port.y == -10.0
assert result_cw.end_port.orientation == 270.0
# CCW bend
result_ccw = Bend90.generate(start, radius, width, direction="CCW", snap_size=1.0)
result_ccw = Bend90.generate(start, radius, width, direction="CCW")
assert result_ccw.end_port.x == 10.0
assert result_ccw.end_port.y == 10.0
assert result_ccw.end_port.orientation == 90.0
@ -47,7 +47,7 @@ def test_sbend_generation() -> None:
radius = 10.0
width = 2.0
result = SBend.generate(start, offset, radius, width, snap_size=1.0)
result = SBend.generate(start, offset, radius, width)
assert result.end_port.y == 5.0
assert result.end_port.orientation == 0.0
assert len(result.geometry) == 2 # Optimization: returns individual arcs
@ -57,13 +57,27 @@ def test_sbend_generation() -> None:
SBend.generate(start, 25.0, 10.0, 2.0)
def test_sbend_generation_negative_offset_keeps_second_arc_below_centerline() -> None:
start = Port(0, 0, 0)
offset = -5.0
radius = 10.0
width = 2.0
result = SBend.generate(start, offset, radius, width)
assert result.end_port.y == -5.0
second_arc_minx, second_arc_miny, second_arc_maxx, second_arc_maxy = result.geometry[1].bounds
assert second_arc_maxy <= width / 2.0 + 1e-6
assert second_arc_miny < -width / 2.0
def test_bend_collision_models() -> None:
start = Port(0, 0, 0)
radius = 10.0
width = 2.0
# 1. BBox model
res_bbox = Bend90.generate(start, radius, width, direction="CCW", collision_type="bbox", snap_size=1.0)
res_bbox = Bend90.generate(start, radius, width, direction="CCW", collision_type="bbox")
# Arc CCW R=10 from (0,0,0) ends at (10,10,90).
# Waveguide width is 2.0, so bbox will be slightly larger than (0,0,10,10)
minx, miny, maxx, maxy = res_bbox.geometry[0].bounds
@ -73,7 +87,7 @@ def test_bend_collision_models() -> None:
assert maxy >= 10.0 - 1e-6
# 2. Clipped BBox model
res_clipped = Bend90.generate(start, radius, width, direction="CCW", collision_type="clipped_bbox", clip_margin=1.0, snap_size=1.0)
res_clipped = Bend90.generate(start, radius, width, direction="CCW", collision_type="clipped_bbox", clip_margin=1.0)
# Area should be less than full bbox
assert res_clipped.geometry[0].area < res_bbox.geometry[0].area
@ -84,11 +98,11 @@ def test_sbend_collision_models() -> None:
radius = 10.0
width = 2.0
res_bbox = SBend.generate(start, offset, radius, width, collision_type="bbox", snap_size=1.0)
res_bbox = SBend.generate(start, offset, radius, width, collision_type="bbox")
# Geometry should be a list of individual bbox polygons for each arc
assert len(res_bbox.geometry) == 2
res_arc = SBend.generate(start, offset, radius, width, collision_type="arc", snap_size=1.0)
res_arc = SBend.generate(start, offset, radius, width, collision_type="arc")
area_bbox = sum(p.area for p in res_bbox.geometry)
area_arc = sum(p.area for p in res_arc.geometry)
assert area_bbox > area_arc
@ -101,8 +115,7 @@ def test_sbend_continuity() -> None:
radius = 20.0
width = 1.0
# We use snap_size=1.0 so that (10-offset) = 6.0 is EXACTLY hit.
res = SBend.generate(start, offset, radius, width, snap_size=1.0)
res = SBend.generate(start, offset, radius, width)
# Target orientation should be same as start
assert abs(res.end_port.orientation - 90.0) < 1e-6
@ -142,7 +155,7 @@ def test_component_transform_invariance() -> None:
radius = 10.0
width = 2.0
res0 = Bend90.generate(start0, radius, width, direction="CCW", snap_size=1.0)
res0 = Bend90.generate(start0, radius, width, direction="CCW")
# Transform: Translate (10, 10) then Rotate 90
dx, dy = 10.0, 5.0
@ -153,7 +166,7 @@ def test_component_transform_invariance() -> None:
# 2. Generate at transformed start
start_transformed = rotate_port(translate_port(start0, dx, dy), angle)
res_transformed = Bend90.generate(start_transformed, radius, width, direction="CCW", snap_size=1.0)
res_transformed = Bend90.generate(start_transformed, radius, width, direction="CCW")
assert abs(res_transformed.end_port.x - p_end_transformed.x) < 1e-6
assert abs(res_transformed.end_port.y - p_end_transformed.y) < 1e-6

View file

@ -19,7 +19,7 @@ def basic_evaluator() -> CostEvaluator:
def test_astar_sbend(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, snap_size=1.0, sbend_offsets=[2.0, 5.0])
context = AStarContext(basic_evaluator, sbend_offsets=[2.0, 5.0])
# Start at (0,0), target at (50, 2) -> 2um lateral offset
# This matches one of our discretized SBend offsets.
start = Port(0, 0, 0)
@ -39,7 +39,7 @@ def test_astar_sbend(basic_evaluator: CostEvaluator) -> None:
def test_pathfinder_negotiated_congestion_resolution(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, snap_size=1.0, bend_radii=[5.0, 10.0])
context = AStarContext(basic_evaluator, bend_radii=[5.0, 10.0])
# Increase base penalty to force detour immediately
pf = PathFinder(context, max_iterations=10, base_congestion_penalty=1000.0)
@ -59,5 +59,10 @@ def test_pathfinder_negotiated_congestion_resolution(basic_evaluator: CostEvalua
results = pf.route_all(netlist, net_widths)
assert len(results) == 2
assert results["net1"].reached_target
assert results["net2"].reached_target
assert results["net1"].is_valid
assert results["net2"].is_valid
assert results["net1"].collisions == 0
assert results["net2"].collisions == 0

View file

@ -1,3 +1,4 @@
from shapely.geometry import Polygon
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.router.cost import CostEvaluator
@ -37,3 +38,30 @@ def test_cost_calculation() -> None:
# Side check: 2*bp = 20.
# Total = 1.1 * (20 + 40) = 66.0
assert h_away >= h_90
def test_danger_map_kd_tree_and_cache() -> None:
# Test that KD-Tree based danger map works and uses cache
bounds = (0, 0, 1000, 1000)
dm = DangerMap(bounds, resolution=1.0, safety_threshold=10.0)
# Square obstacle at (100, 100) to (110, 110)
obstacle = Polygon([(100, 100), (110, 100), (110, 110), (100, 110)])
dm.precompute([obstacle])
# 1. High cost near boundary
cost_near = dm.get_cost(100.5, 100.5)
assert cost_near > 1.0
# 2. Zero cost far away
cost_far = dm.get_cost(500, 500)
assert cost_far == 0.0
# 3. Check cache usage (internal detail check)
# We can check if calling it again is fast or just verify it returns same result
cost_near_2 = dm.get_cost(100.5, 100.5)
assert cost_near_2 == cost_near
# 4. Out of bounds
assert dm.get_cost(-1, -1) >= 1e12

View file

@ -0,0 +1,63 @@
from __future__ import annotations
import os
import statistics
import pytest
from inire.tests.example_scenarios import SCENARIOS, ScenarioDefinition, ScenarioOutcome
RUN_PERFORMANCE = os.environ.get("INIRE_RUN_PERFORMANCE") == "1"
PERFORMANCE_REPEATS = 3
REGRESSION_FACTOR = 1.5
# Baselines are measured from the current code path without plotting.
BASELINE_SECONDS = {
"example_01_simple_route": 0.0035,
"example_02_congestion_resolution": 0.2666,
"example_03_locked_paths": 0.2304,
"example_04_sbends_and_radii": 1.8734,
"example_05_orientation_stress": 0.5630,
"example_06_bend_collision_models": 5.2382,
"example_07_large_scale_routing": 1.2081,
"example_08_custom_bend_geometry": 4.2111,
"example_09_unroutable_best_effort": 0.0056,
}
EXPECTED_OUTCOMES = {
"example_01_simple_route": {"total_results": 1, "valid_results": 1, "reached_targets": 1},
"example_02_congestion_resolution": {"total_results": 3, "valid_results": 3, "reached_targets": 3},
"example_03_locked_paths": {"total_results": 2, "valid_results": 2, "reached_targets": 2},
"example_04_sbends_and_radii": {"total_results": 2, "valid_results": 2, "reached_targets": 2},
"example_05_orientation_stress": {"total_results": 3, "valid_results": 3, "reached_targets": 3},
"example_06_bend_collision_models": {"total_results": 3, "valid_results": 3, "reached_targets": 3},
"example_07_large_scale_routing": {"total_results": 10, "valid_results": 10, "reached_targets": 10},
"example_08_custom_bend_geometry": {"total_results": 2, "valid_results": 1, "reached_targets": 2},
"example_09_unroutable_best_effort": {"total_results": 1, "valid_results": 0, "reached_targets": 0},
}
def _assert_expected_outcome(name: str, outcome: ScenarioOutcome) -> None:
expected = EXPECTED_OUTCOMES[name]
assert outcome.total_results == expected["total_results"]
assert outcome.valid_results == expected["valid_results"]
assert outcome.reached_targets == expected["reached_targets"]
@pytest.mark.performance
@pytest.mark.skipif(not RUN_PERFORMANCE, reason="set INIRE_RUN_PERFORMANCE=1 to run runtime regression checks")
@pytest.mark.parametrize("scenario", SCENARIOS, ids=[scenario.name for scenario in SCENARIOS])
def test_example_like_runtime_regression(scenario: ScenarioDefinition) -> None:
timings = []
for _ in range(PERFORMANCE_REPEATS):
outcome = scenario.run()
_assert_expected_outcome(scenario.name, outcome)
timings.append(outcome.duration_s)
median_runtime = statistics.median(timings)
assert median_runtime <= BASELINE_SECONDS[scenario.name] * REGRESSION_FACTOR, (
f"{scenario.name} median runtime {median_runtime:.4f}s exceeded "
f"{REGRESSION_FACTOR:.1f}x baseline {BASELINE_SECONDS[scenario.name]:.4f}s "
f"from timings {timings!r}"
)

View file

@ -2,15 +2,13 @@ from typing import Any
import pytest
from hypothesis import given, settings, strategies as st
from shapely.geometry import Polygon
from shapely.geometry import Point, Polygon
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.router.astar import AStarContext, route_astar
from inire.router.cost import CostEvaluator
from inire.router.danger_map import DangerMap
from inire.router.pathfinder import RoutingResult
from inire.utils.validation import validate_routing_result
@st.composite
@ -30,9 +28,17 @@ def random_port(draw: Any) -> Port:
return Port(x, y, orientation)
def _port_has_required_clearance(port: Port, obstacles: list[Polygon], clearance: float, net_width: float) -> bool:
point = Point(float(port.x), float(port.y))
required_gap = (net_width / 2.0) + clearance
return all(point.distance(obstacle) >= required_gap for obstacle in obstacles)
@settings(max_examples=3, deadline=None)
@given(obstacles=st.lists(random_obstacle(), min_size=0, max_size=3), start=random_port(), target=random_port())
def test_fuzz_astar_no_crash(obstacles: list[Polygon], start: Port, target: Port) -> None:
net_width = 2.0
clearance = 2.0
engine = CollisionEngine(clearance=2.0)
for obs in obstacles:
engine.add_static_obstacle(obs)
@ -48,17 +54,14 @@ def test_fuzz_astar_no_crash(obstacles: list[Polygon], start: Port, target: Port
try:
path = route_astar(start, target, net_width=2.0, context=context)
# Analytic Correctness: if path is returned, verify it's collision-free
if path:
result = RoutingResult(net_id="default", path=path, is_valid=True, collisions=0)
validation = validate_routing_result(
result,
obstacles,
clearance=2.0,
expected_start=start,
expected_end=target,
)
assert validation["is_valid"], f"Validation failed: {validation.get('reason')}"
# This is a crash-smoke test rather than a full correctness proof.
# If a full path is returned, it should at least terminate at the requested target.
endpoints_are_clear = (
_port_has_required_clearance(start, obstacles, clearance, net_width)
and _port_has_required_clearance(target, obstacles, clearance, net_width)
)
if path and endpoints_are_clear:
assert path[-1].end_port == target
except Exception as e:
# Unexpected exceptions are failures

View file

@ -33,3 +33,86 @@ def test_pathfinder_parallel(basic_evaluator: CostEvaluator) -> None:
assert results["net2"].is_valid
assert results["net1"].collisions == 0
assert results["net2"].collisions == 0
def test_pathfinder_crossing_detection(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator)
# Force a crossing by setting low iterations and low penalty
pf = PathFinder(context, max_iterations=1, base_congestion_penalty=1.0, warm_start=None)
# Net 1: (0, 25) -> (100, 25) Horizontal
# Net 2: (50, 0) -> (50, 50) Vertical
netlist = {
"net1": (Port(0, 25, 0), Port(100, 25, 0)),
"net2": (Port(50, 0, 90), Port(50, 50, 90)),
}
net_widths = {"net1": 2.0, "net2": 2.0}
results = pf.route_all(netlist, net_widths)
# Both should be invalid because they cross
assert not results["net1"].is_valid
assert not results["net2"].is_valid
assert results["net1"].collisions > 0
assert results["net2"].collisions > 0
def test_pathfinder_refine_paths_reduces_locked_detour_bends() -> None:
bounds = (0, -50, 100, 50)
def build_pathfinder(*, refine_paths: bool) -> tuple[CollisionEngine, PathFinder]:
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=250.0, sbend_penalty=500.0)
context = AStarContext(evaluator, bend_radii=[10.0])
return engine, PathFinder(context, refine_paths=refine_paths)
base_engine, base_pf = build_pathfinder(refine_paths=False)
base_pf.route_all({"netA": (Port(10, 0, 0), Port(90, 0, 0))}, {"netA": 2.0})
base_engine.lock_net("netA")
base_result = base_pf.route_all({"netB": (Port(50, -20, 90), Port(50, 20, 90))}, {"netB": 2.0})["netB"]
refined_engine, refined_pf = build_pathfinder(refine_paths=True)
refined_pf.route_all({"netA": (Port(10, 0, 0), Port(90, 0, 0))}, {"netA": 2.0})
refined_engine.lock_net("netA")
refined_result = refined_pf.route_all({"netB": (Port(50, -20, 90), Port(50, 20, 90))}, {"netB": 2.0})["netB"]
base_bends = sum(1 for comp in base_result.path if comp.move_type == "Bend90")
refined_bends = sum(1 for comp in refined_result.path if comp.move_type == "Bend90")
assert base_result.is_valid
assert refined_result.is_valid
assert refined_bends < base_bends
assert refined_pf._path_cost(refined_result.path) < base_pf._path_cost(base_result.path)
def test_pathfinder_refine_paths_simplifies_triple_crossing_detours() -> None:
bounds = (0, 0, 100, 100)
netlist = {
"horizontal": (Port(10, 50, 0), Port(90, 50, 0)),
"vertical_up": (Port(45, 10, 90), Port(45, 90, 90)),
"vertical_down": (Port(55, 90, 270), Port(55, 10, 270)),
}
net_widths = {net_id: 2.0 for net_id in netlist}
def build_pathfinder(*, refine_paths: bool) -> PathFinder:
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, bend_penalty=250.0, sbend_penalty=500.0)
context = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[10.0])
return PathFinder(context, base_congestion_penalty=1000.0, refine_paths=refine_paths)
base_results = build_pathfinder(refine_paths=False).route_all(netlist, net_widths)
refined_results = build_pathfinder(refine_paths=True).route_all(netlist, net_widths)
for net_id in ("vertical_up", "vertical_down"):
base_result = base_results[net_id]
refined_result = refined_results[net_id]
base_bends = sum(1 for comp in base_result.path if comp.move_type == "Bend90")
refined_bends = sum(1 for comp in refined_result.path if comp.move_type == "Bend90")
assert base_result.is_valid
assert refined_result.is_valid
assert refined_bends < base_bends

View file

@ -15,8 +15,8 @@ def port_strategy(draw: Any) -> Port:
def test_port_snapping() -> None:
p = Port(0.123456, 0.654321, 90)
assert p.x == 0.123
assert p.y == 0.654
assert p.x == 0
assert p.y == 1
@given(p=port_strategy())
@ -38,14 +38,13 @@ def test_port_transform_invariants(p: Port) -> None:
)
def test_translate_snapping(p: Port, dx: float, dy: float) -> None:
p_trans = translate_port(p, dx, dy)
# Check that snapped result is indeed multiple of GRID_SNAP_UM (0.001 um = 1nm)
assert abs(p_trans.x * 1000 - round(p_trans.x * 1000)) < 1e-6
assert abs(p_trans.y * 1000 - round(p_trans.y * 1000)) < 1e-6
assert isinstance(p_trans.x, int)
assert isinstance(p_trans.y, int)
def test_orientation_normalization() -> None:
p = Port(0, 0, 360)
assert p.orientation == 0.0
assert p.orientation == 0
p2 = Port(0, 0, -90)
assert p2.orientation == 270.0
assert p2.orientation == 270

View file

@ -3,64 +3,49 @@ from inire.geometry.primitives import Port
from inire.router.astar import route_astar, AStarContext
from inire.router.cost import CostEvaluator
from inire.geometry.collision import CollisionEngine
from inire.geometry.components import snap_search_grid
class TestVariableGrid(unittest.TestCase):
class TestIntegerPorts(unittest.TestCase):
def setUp(self):
self.ce = CollisionEngine(clearance=2.0)
self.cost = CostEvaluator(self.ce)
def test_grid_1_0(self):
""" Test routing with a 1.0um grid. """
context = AStarContext(self.cost, snap_size=1.0)
start = Port(0.0, 0.0, 0.0)
# 12.3 should snap to 12.0 on a 1.0um grid
target = Port(12.3, 0.0, 0.0, snap=False)
path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path)
last_port = path[-1].end_port
self.assertEqual(last_port.x, 12.0)
# Verify component relative grid coordinates
# rel_gx = round(x / snap)
# For x=12.0, snap=1.0 -> rel_gx=12
self.assertEqual(path[-1].rel_gx, 12)
def test_route_reaches_integer_target(self):
context = AStarContext(self.cost)
start = Port(0, 0, 0)
target = Port(12, 0, 0)
def test_grid_2_5(self):
""" Test routing with a 2.5um grid. """
context = AStarContext(self.cost, snap_size=2.5)
start = Port(0.0, 0.0, 0.0)
# 7.5 is a multiple of 2.5, should be reached exactly
target = Port(7.5, 0.0, 0.0, snap=False)
path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path)
last_port = path[-1].end_port
self.assertEqual(last_port.x, 7.5)
# rel_gx = 7.5 / 2.5 = 3
self.assertEqual(path[-1].rel_gx, 3)
def test_grid_10_0(self):
""" Test routing with a large 10.0um grid. """
context = AStarContext(self.cost, snap_size=10.0)
start = Port(0.0, 0.0, 0.0)
# 15.0 should snap to 20.0 (ties usually round to even or nearest,
# but 15.0 is exactly between 10 and 20.
# snap_search_grid uses round(val/snap)*snap. round(1.5) is 2 in Python 3.
target = Port(15.0, 0.0, 0.0, snap=False)
path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path)
last_port = path[-1].end_port
self.assertEqual(last_port.x, 20.0)
# rel_gx = 20.0 / 10.0 = 2
self.assertEqual(path[-1].rel_gx, 2)
self.assertEqual(last_port.x, 12)
self.assertEqual(last_port.y, 0)
self.assertEqual(last_port.r, 0)
def test_port_constructor_rounds_to_integer_lattice(self):
context = AStarContext(self.cost)
start = Port(0.0, 0.0, 0.0)
target = Port(12.3, 0.0, 0.0)
path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path)
self.assertEqual(target.x, 12)
last_port = path[-1].end_port
self.assertEqual(last_port.x, 12)
def test_half_step_inputs_use_integerized_targets(self):
context = AStarContext(self.cost)
start = Port(0.0, 0.0, 0.0)
target = Port(7.5, 0.0, 0.0)
path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path)
self.assertEqual(target.x, 8)
last_port = path[-1].end_port
self.assertEqual(last_port.x, 8)
if __name__ == '__main__':
unittest.main()

View file

@ -12,12 +12,12 @@ if TYPE_CHECKING:
def validate_routing_result(
result: RoutingResult,
static_obstacles: list[Polygon],
clearance: float,
expected_start: Port | None = None,
expected_end: Port | None = None,
) -> dict[str, Any]:
result: RoutingResult,
static_obstacles: list[Polygon],
clearance: float,
expected_start: Port | None = None,
expected_end: Port | None = None,
) -> dict[str, Any]:
"""
Perform a high-precision validation of a routed path.
@ -47,11 +47,11 @@ def validate_routing_result(
# Boundary check
if expected_end:
last_port = result.path[-1].end_port
dist_to_end = numpy.sqrt((last_port.x - expected_end.x)**2 + (last_port.y - expected_end.y)**2)
dist_to_end = numpy.sqrt(((last_port[:2] - expected_end[:2])**2).sum())
if dist_to_end > 0.005:
connectivity_errors.append(f"Final port position mismatch: {dist_to_end*1000:.2f}nm")
if abs(last_port.orientation - expected_end.orientation) > 0.1:
connectivity_errors.append(f"Final port orientation mismatch: {last_port.orientation} vs {expected_end.orientation}")
if abs(last_port[2] - expected_end[2]) > 0.1:
connectivity_errors.append(f"Final port orientation mismatch: {last_port[2]} vs {expected_end[2]}")
# 2. Geometry Buffering
dilation_half = clearance / 2.0

View file

@ -69,7 +69,7 @@ def plot_routing_results(
# 2. Plot "Actual" Geometry (The high-fidelity shape used for fabrication)
# Use comp.actual_geometry if it exists (should be the arc)
actual_geoms_to_plot = comp.actual_geometry if comp.actual_geometry is not None else comp.geometry
for poly in actual_geoms_to_plot:
if isinstance(poly, MultiPolygon):
geoms = list(poly.geoms)
@ -87,7 +87,7 @@ def plot_routing_results(
# 3. Plot subtle port orientation arrow
p = comp.end_port
rad = numpy.radians(p.orientation)
ax.quiver(p.x, p.y, numpy.cos(rad), numpy.sin(rad), color="black",
ax.quiver(p.x, p.y, numpy.cos(rad), numpy.sin(rad), color="black",
scale=40, width=0.002, alpha=0.2, pivot="tail", zorder=4)
if not res.path and not res.is_valid:
@ -99,15 +99,15 @@ def plot_routing_results(
if netlist:
for net_id, (start_p, target_p) in netlist.items():
for p in [start_p, target_p]:
rad = numpy.radians(p.orientation)
ax.quiver(p.x, p.y, numpy.cos(rad), numpy.sin(rad), color="black",
rad = numpy.radians(p[2])
ax.quiver(*p[:2], numpy.cos(rad), numpy.sin(rad), color="black",
scale=25, width=0.004, pivot="tail", zorder=6)
ax.set_xlim(bounds[0], bounds[2])
ax.set_ylim(bounds[1], bounds[3])
ax.set_aspect("equal")
ax.set_title("Inire Routing Results (Dashed: Collision Proxy, Solid: Actual Geometry)")
# Legend handling for many nets
if len(results) < 25:
handles, labels = ax.get_legend_handles_labels()
@ -117,10 +117,11 @@ def plot_routing_results(
plt.grid(True, which='both', linestyle=':', alpha=0.5)
return fig, ax
def plot_danger_map(
danger_map: DangerMap,
ax: Axes | None = None,
resolution: float | None = None
) -> tuple[Figure, Axes]:
"""
Plot the pre-computed danger map as a heatmap.
@ -130,10 +131,30 @@ def plot_danger_map(
else:
fig = ax.get_figure()
# Generate a temporary grid for visualization
res = resolution if resolution is not None else max(1.0, (danger_map.maxx - danger_map.minx) / 200.0)
x_coords = numpy.arange(danger_map.minx + res/2, danger_map.maxx, res)
y_coords = numpy.arange(danger_map.miny + res/2, danger_map.maxy, res)
xv, yv = numpy.meshgrid(x_coords, y_coords, indexing='ij')
if danger_map.tree is not None:
points = numpy.stack([xv.ravel(), yv.ravel()], axis=1)
dists, _ = danger_map.tree.query(points, distance_upper_bound=danger_map.safety_threshold)
# Apply cost function
safe_dists = numpy.maximum(dists, 0.1)
grid_flat = numpy.where(
dists < danger_map.safety_threshold,
danger_map.k / (safe_dists**2),
0.0
)
grid = grid_flat.reshape(xv.shape)
else:
grid = numpy.zeros(xv.shape)
# Need to transpose because grid is [x, y] and imshow expects [row, col] (y, x)
# Also origin='lower' to match coordinates
im = ax.imshow(
danger_map.grid.T,
grid.T,
origin='lower',
extent=[danger_map.minx, danger_map.maxx, danger_map.miny, danger_map.maxy],
cmap='YlOrRd',
@ -192,27 +213,27 @@ def plot_expansion_density(
return fig, ax
x, y, _ = zip(*nodes)
# Create 2D histogram
h, xedges, yedges = numpy.histogram2d(
x, y,
bins=bins,
x, y,
bins=bins,
range=[[bounds[0], bounds[2]], [bounds[1], bounds[3]]]
)
# Plot as image
im = ax.imshow(
h.T,
origin='lower',
h.T,
origin='lower',
extent=[bounds[0], bounds[2], bounds[1], bounds[3]],
cmap='plasma',
interpolation='nearest',
alpha=0.7
)
plt.colorbar(im, ax=ax, label='Expansion Count')
ax.set_title("Search Expansion Density")
ax.set_xlim(bounds[0], bounds[2])
ax.set_ylim(bounds[1], bounds[3])
return fig, ax

View file

@ -77,4 +77,6 @@ lint.ignore = [
[tool.pytest.ini_options]
addopts = "-rsXx"
testpaths = ["inire"]
markers = [
"performance: opt-in runtime regression checks against example-like routing scenarios",
]