Compare commits

..

No commits in common. "6a28dcf3123274e3475b3077a2a05168a38f81e8" and "457451d3b2975c5ff3e9f216f706b8c1d24c7935" have entirely different histories.

38 changed files with 1905 additions and 2623 deletions

View file

@ -19,7 +19,6 @@ The `AStarContext` stores the configuration and persistent state for the A* sear
| `sbend_penalty` | `float` | 500.0 | Flat cost added for every S-bend. | | `sbend_penalty` | `float` | 500.0 | Flat cost added for every S-bend. |
| `bend_collision_type` | `str` | `"arc"` | Collision model for bends: `"arc"`, `"bbox"`, or `"clipped_bbox"`. | | `bend_collision_type` | `str` | `"arc"` | Collision model for bends: `"arc"`, `"bbox"`, or `"clipped_bbox"`. |
| `bend_clip_margin` | `float` | 10.0 | Extra space (µm) around the waveguide for clipped models. | | `bend_clip_margin` | `float` | 10.0 | Extra space (µm) around the waveguide for clipped models. |
| `visibility_guidance` | `str` | `"tangent_corner"` | Visibility-driven straight candidate mode: `"off"`, `"exact_corner"`, or `"tangent_corner"`. |
## 2. AStarMetrics ## 2. AStarMetrics
@ -87,12 +86,6 @@ If the router produces many small bends instead of a long straight line:
2. Ensure `straight_lengths` includes larger values like `25.0` or `100.0`. 2. Ensure `straight_lengths` includes larger values like `25.0` or `100.0`.
3. Decrease `greedy_h_weight` closer to `1.0`. 3. Decrease `greedy_h_weight` closer to `1.0`.
### Visibility Guidance
The router can bias straight stop points using static obstacle corners.
- **`"tangent_corner"`**: Default. Proposes straight lengths that set up a clean tangent bend around nearby visible corners. This helps obstacle-dense layouts more than open space.
- **`"exact_corner"`**: Only uses precomputed corner-to-corner visibility when the current search state already lands on an obstacle corner.
- **`"off"`**: Disables visibility-derived straight candidates entirely.
### Handling Congestion ### Handling Congestion
In multi-net designs, if nets are overlapping: In multi-net designs, if nets are overlapping:
1. Increase `congestion_penalty` in `CostEvaluator`. 1. Increase `congestion_penalty` in `CostEvaluator`.

View file

@ -23,7 +23,7 @@ def main() -> None:
# 2. Configure Router # 2. Configure Router
evaluator = CostEvaluator(engine, danger_map) evaluator = CostEvaluator(engine, danger_map)
context = AStarContext(evaluator, bend_radii=[10.0]) context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0])
metrics = AStarMetrics() metrics = AStarMetrics()
pf = PathFinder(context, metrics) pf = PathFinder(context, metrics)

View file

@ -17,8 +17,8 @@ def main() -> None:
danger_map.precompute([]) danger_map.precompute([])
# Configure a router with high congestion penalties # Configure a router with high congestion penalties
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, bend_penalty=250.0, sbend_penalty=500.0) evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, bend_penalty=50.0, sbend_penalty=150.0)
context = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[10.0]) context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], sbend_radii=[10.0])
metrics = AStarMetrics() metrics = AStarMetrics()
pf = PathFinder(context, metrics, base_congestion_penalty=1000.0) pf = PathFinder(context, metrics, base_congestion_penalty=1000.0)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 72 KiB

After

Width:  |  Height:  |  Size: 67 KiB

Before After
Before After

View file

@ -16,8 +16,8 @@ def main() -> None:
danger_map = DangerMap(bounds=bounds) danger_map = DangerMap(bounds=bounds)
danger_map.precompute([]) danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=250.0, sbend_penalty=500.0) evaluator = CostEvaluator(engine, danger_map)
context = AStarContext(evaluator, bend_radii=[10.0]) context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0])
metrics = AStarMetrics() metrics = AStarMetrics()
pf = PathFinder(context, metrics) pf = PathFinder(context, metrics)

View file

@ -28,6 +28,7 @@ def main() -> None:
context = AStarContext( context = AStarContext(
evaluator, evaluator,
node_limit=50000, node_limit=50000,
snap_size=1.0,
bend_radii=[10.0, 30.0], bend_radii=[10.0, 30.0],
sbend_offsets=[5.0], # Use a simpler offset sbend_offsets=[5.0], # Use a simpler offset
bend_penalty=10.0, bend_penalty=10.0,

Binary file not shown.

Before

Width:  |  Height:  |  Size: 92 KiB

After

Width:  |  Height:  |  Size: 101 KiB

Before After
Before After

View file

@ -17,7 +17,7 @@ def main() -> None:
danger_map.precompute([]) danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0) evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0)
context = AStarContext(evaluator, bend_radii=[20.0]) context = AStarContext(evaluator, snap_size=5.0, bend_radii=[20.0])
metrics = AStarMetrics() metrics = AStarMetrics()
pf = PathFinder(context, metrics) pf = PathFinder(context, metrics)

Binary file not shown.

Before

Width:  |  Height:  |  Size: 84 KiB

After

Width:  |  Height:  |  Size: 80 KiB

Before After
Before After

View file

@ -33,15 +33,15 @@ def main() -> None:
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0) evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
# Scenario 1: Standard 'arc' model (High fidelity) # Scenario 1: Standard 'arc' model (High fidelity)
context_arc = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="arc") context_arc = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type="arc")
netlist_arc = {"arc_model": (Port(10, 120, 0), Port(90, 140, 90))} netlist_arc = {"arc_model": (Port(10, 120, 0), Port(90, 140, 90))}
# Scenario 2: 'bbox' model (Conservative axis-aligned box) # Scenario 2: 'bbox' model (Conservative axis-aligned box)
context_bbox = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="bbox") context_bbox = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type="bbox")
netlist_bbox = {"bbox_model": (Port(10, 70, 0), Port(90, 90, 90))} netlist_bbox = {"bbox_model": (Port(10, 70, 0), Port(90, 90, 90))}
# Scenario 3: 'clipped_bbox' model (Balanced) # Scenario 3: 'clipped_bbox' model (Balanced)
context_clipped = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="clipped_bbox", bend_clip_margin=1.0) context_clipped = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type="clipped_bbox", bend_clip_margin=1.0)
netlist_clipped = {"clipped_model": (Port(10, 20, 0), Port(90, 40, 90))} netlist_clipped = {"clipped_model": (Port(10, 20, 0), Port(90, 40, 90))}
# 2. Route each scenario # 2. Route each scenario

View file

@ -10,7 +10,7 @@ from inire.utils.visualization import plot_routing_results, plot_danger_map, plo
from shapely.geometry import box from shapely.geometry import box
def main() -> None: def main() -> None:
print("Running Example 07: Fan-Out (10 Nets, 50um Radius)...") print("Running Example 07: Fan-Out (10 Nets, 50um Radius, 5um Grid)...")
# 1. Setup Environment # 1. Setup Environment
bounds = (0, 0, 1000, 1000) bounds = (0, 0, 1000, 1000)
@ -29,7 +29,7 @@ def main() -> None:
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, unit_length_cost=0.1, bend_penalty=100.0, sbend_penalty=400.0, congestion_penalty=100.0) evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, unit_length_cost=0.1, bend_penalty=100.0, sbend_penalty=400.0, congestion_penalty=100.0)
context = AStarContext(evaluator, node_limit=2000000, bend_radii=[50.0], sbend_radii=[50.0]) context = AStarContext(evaluator, node_limit=2000000, snap_size=5.0, bend_radii=[50.0], sbend_radii=[50.0])
metrics = AStarMetrics() metrics = AStarMetrics()
pf = PathFinder(context, metrics, max_iterations=15, base_congestion_penalty=100.0, congestion_multiplier=1.4) pf = PathFinder(context, metrics, max_iterations=15, base_congestion_penalty=100.0, congestion_multiplier=1.4)
@ -44,8 +44,8 @@ def main() -> None:
end_y_pitch = 800.0 / (num_nets - 1) end_y_pitch = 800.0 / (num_nets - 1)
for i in range(num_nets): for i in range(num_nets):
sy = int(round(start_y_base + i * 10.0)) sy = round((start_y_base + i * 10.0) / 5.0) * 5.0
ey = int(round(end_y_base + i * end_y_pitch)) ey = round((end_y_base + i * end_y_pitch) / 5.0) * 5.0
netlist[f"net_{i:02d}"] = (Port(start_x, sy, 0), Port(end_x, ey, 0)) netlist[f"net_{i:02d}"] = (Port(start_x, sy, 0), Port(end_x, ey, 0))
net_widths = {nid: 2.0 for nid in netlist} net_widths = {nid: 2.0 for nid in netlist}
@ -60,7 +60,38 @@ def main() -> None:
total_collisions = sum(r.collisions for r in current_results.values()) total_collisions = sum(r.collisions for r in current_results.values())
total_nodes = metrics.nodes_expanded total_nodes = metrics.nodes_expanded
# Identify Hotspots
hotspots = {}
overlap_matrix = {} # (net_a, net_b) -> count
for nid, res in current_results.items():
if not res.path:
continue
for comp in res.path:
for poly in comp.geometry:
# Check what it overlaps with
overlaps = engine.dynamic_index.intersection(poly.bounds)
for other_obj_id in overlaps:
if other_obj_id in engine.dynamic_geometries:
other_nid, other_poly = engine.dynamic_geometries[other_obj_id]
if other_nid != nid:
if poly.intersects(other_poly):
# Record hotspot
cx, cy = poly.centroid.x, poly.centroid.y
grid_key = (int(cx/20)*20, int(cy/20)*20)
hotspots[grid_key] = hotspots.get(grid_key, 0) + 1
# Record pair
pair = tuple(sorted((nid, other_nid)))
overlap_matrix[pair] = overlap_matrix.get(pair, 0) + 1
print(f" Iteration {idx} finished. Successes: {successes}/{len(netlist)}, Collisions: {total_collisions}") print(f" Iteration {idx} finished. Successes: {successes}/{len(netlist)}, Collisions: {total_collisions}")
if overlap_matrix:
top_pairs = sorted(overlap_matrix.items(), key=lambda x: x[1], reverse=True)[:3]
print(f" Top Conflicts: {top_pairs}")
if hotspots:
top_hotspots = sorted(hotspots.items(), key=lambda x: x[1], reverse=True)[:3]
print(f" Top Hotspots: {top_hotspots}")
# Adaptive Greediness: Decay from 1.5 to 1.1 over 10 iterations # Adaptive Greediness: Decay from 1.5 to 1.1 over 10 iterations
new_greedy = max(1.1, 1.5 - ((idx + 1) / 10.0) * 0.4) new_greedy = max(1.1, 1.5 - ((idx + 1) / 10.0) * 0.4)
@ -73,12 +104,46 @@ def main() -> None:
'Congestion': total_collisions, 'Congestion': total_collisions,
'Nodes': total_nodes 'Nodes': total_nodes
}) })
# Save plots only for certain iterations to save time
# if idx % 20 == 0 or idx == pf.max_iterations - 1:
if True:
# Save a plot of this iteration's result
fig, ax = plot_routing_results(current_results, obstacles, bounds, netlist=netlist)
plot_danger_map(danger_map, ax=ax)
# Overlay failures: show where they stopped
for nid, res in current_results.items():
if not res.is_valid and res.path:
last_p = res.path[-1].end_port
target_p = netlist[nid][1]
dist = abs(last_p.x - target_p.x) + abs(last_p.y - target_p.y)
ax.scatter(last_p.x, last_p.y, color='red', marker='x', s=100)
ax.text(last_p.x, last_p.y, f" {nid} (rem: {dist:.0f}um)", color='red', fontsize=8)
fig.savefig(f"examples/07_iteration_{idx:02d}.png")
import matplotlib.pyplot as plt
plt.close(fig)
# Plot Expansion Density if data is available
if pf.accumulated_expanded_nodes:
fig_d, ax_d = plot_expansion_density(pf.accumulated_expanded_nodes, bounds)
fig_d.savefig(f"examples/07_iteration_{idx:02d}_density.png")
plt.close(fig_d)
metrics.reset_per_route() metrics.reset_per_route()
import cProfile, pstats
profiler = cProfile.Profile()
profiler.enable()
t0 = time.perf_counter() t0 = time.perf_counter()
results = pf.route_all(netlist, net_widths, store_expanded=True, iteration_callback=iteration_callback, shuffle_nets=True, seed=42) results = pf.route_all(netlist, net_widths, store_expanded=True, iteration_callback=iteration_callback, shuffle_nets=True, seed=42)
t1 = time.perf_counter() t1 = time.perf_counter()
profiler.disable()
# Final stats
stats = pstats.Stats(profiler).sort_stats('tottime')
stats.print_stats(20)
print(f"Routing took {t1-t0:.4f}s") print(f"Routing took {t1-t0:.4f}s")
# 4. Check Results # 4. Check Results
@ -92,15 +157,28 @@ def main() -> None:
print(f"\nFinal: Routed {success_count}/{len(netlist)} nets successfully.") print(f"\nFinal: Routed {success_count}/{len(netlist)} nets successfully.")
for nid, res in results.items(): for nid, res in results.items():
target_p = netlist[nid][1]
if not res.is_valid: if not res.is_valid:
print(f" FAILED: {nid}, collisions={res.collisions}") last_p = res.path[-1].end_port if res.path else netlist[nid][0]
dist = abs(last_p.x - target_p.x) + abs(last_p.y - target_p.y)
print(f" FAILED: {nid} (Stopped {dist:.1f}um from target)")
else: else:
print(f" {nid}: SUCCESS") types = [move.move_type for move in res.path]
from collections import Counter
counts = Counter(types)
print(f" {nid}: {len(res.path)} segments, {dict(counts)}")
# 5. Visualize # 5. Visualize
fig, ax = plot_routing_results(results, obstacles, bounds, netlist=netlist) fig, ax = plot_routing_results(results, obstacles, bounds, netlist=netlist)
# Overlay Danger Map
plot_danger_map(danger_map, ax=ax) plot_danger_map(danger_map, ax=ax)
# Overlay Expanded Nodes from last routed net (as an example)
if metrics.last_expanded_nodes:
print(f"Plotting {len(metrics.last_expanded_nodes)} expanded nodes for the last net...")
plot_expanded_nodes(metrics.last_expanded_nodes, ax=ax, color='blue', alpha=0.1)
fig.savefig("examples/07_large_scale_routing.png") fig.savefig("examples/07_large_scale_routing.png")
print("Saved plot to examples/07_large_scale_routing.png") print("Saved plot to examples/07_large_scale_routing.png")

Binary file not shown.

Before

Width:  |  Height:  |  Size: 75 KiB

After

Width:  |  Height:  |  Size: 90 KiB

Before After
Before After

View file

@ -19,7 +19,7 @@ def main() -> None:
danger_map.precompute([]) danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0) evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
context = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[]) context = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], sbend_radii=[])
metrics = AStarMetrics() metrics = AStarMetrics()
pf = PathFinder(context, metrics) pf = PathFinder(context, metrics)
@ -40,7 +40,7 @@ def main() -> None:
print("Routing with custom collision model...") print("Routing with custom collision model...")
# Override bend_collision_type with a literal Polygon # Override bend_collision_type with a literal Polygon
context_custom = AStarContext(evaluator, bend_radii=[10.0], bend_collision_type=custom_poly, sbend_radii=[]) context_custom = AStarContext(evaluator, snap_size=1.0, bend_radii=[10.0], bend_collision_type=custom_poly, sbend_radii=[])
metrics_custom = AStarMetrics() metrics_custom = AStarMetrics()
results_custom = PathFinder(context_custom, metrics_custom, use_tiered_strategy=False).route_all( results_custom = PathFinder(context_custom, metrics_custom, use_tiered_strategy=False).route_all(
{"custom_model": netlist["custom_bend"]}, {"custom_model": 2.0} {"custom_model": netlist["custom_bend"]}, {"custom_model": 2.0}

View file

@ -8,49 +8,51 @@ from inire.utils.visualization import plot_routing_results
from shapely.geometry import box from shapely.geometry import box
def main() -> None: def main() -> None:
print("Running Example 09: Best-Effort Under Tight Search Budget...") print("Running Example 09: Best-Effort (Unroutable Net)...")
# 1. Setup Environment # 1. Setup Environment
bounds = (0, 0, 100, 100) bounds = (0, 0, 100, 100)
engine = CollisionEngine(clearance=2.0) engine = CollisionEngine(clearance=2.0)
# A small obstacle cluster keeps the partial route visually interesting. # Create a 'cage' that completely blocks the target
obstacles = [ cage = [
box(35, 35, 45, 65), box(70, 30, 75, 70), # Left wall
box(55, 35, 65, 65), box(70, 70, 95, 75), # Top wall
box(70, 25, 95, 30), # Bottom wall
] ]
for obs in obstacles: for obs in cage:
engine.add_static_obstacle(obs) engine.add_static_obstacle(obs)
danger_map = DangerMap(bounds=bounds) danger_map = DangerMap(bounds=bounds)
danger_map.precompute(obstacles) danger_map.precompute(cage)
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0) evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
# Keep the search budget intentionally tiny so the router returns a partial path. # Use a low node limit to fail faster
context = AStarContext(evaluator, node_limit=3, bend_radii=[10.0]) context = AStarContext(evaluator, node_limit=2000, snap_size=1.0, bend_radii=[10.0])
metrics = AStarMetrics() metrics = AStarMetrics()
pf = PathFinder(context, metrics, warm_start=None) # Enable partial path return (handled internally by PathFinder calling route_astar with return_partial=True)
pf = PathFinder(context, metrics)
# 2. Define Netlist: reaching the target requires additional turns the search budget cannot afford. # 2. Define Netlist: start outside, target inside the cage
netlist = { netlist = {
"budget_limited_net": (Port(10, 50, 0), Port(85, 60, 180)), "trapped_net": (Port(10, 50, 0), Port(85, 50, 0)),
} }
net_widths = {"budget_limited_net": 2.0} net_widths = {"trapped_net": 2.0}
# 3. Route # 3. Route
print("Routing with a deliberately tiny node budget (should return a partial path)...") print("Routing net into a cage (should fail and return partial)...")
results = pf.route_all(netlist, net_widths) results = pf.route_all(netlist, net_widths)
# 4. Check Results # 4. Check Results
res = results["budget_limited_net"] res = results["trapped_net"]
if not res.reached_target: if not res.is_valid:
print(f"Target not reached as expected. Partial path length: {len(res.path)} segments.") print(f"Net failed to route as expected. Partial path length: {len(res.path)} segments.")
else: else:
print("The route unexpectedly reached the target. Increase difficulty or reduce the node budget further.") print("Wait, it found a way in? Check the cage geometry!")
# 5. Visualize # 5. Visualize
fig, ax = plot_routing_results(results, obstacles, bounds, netlist=netlist) fig, ax = plot_routing_results(results, cage, bounds, netlist=netlist)
fig.savefig("examples/09_unroutable_best_effort.png") fig.savefig("examples/09_unroutable_best_effort.png")
print("Saved plot to examples/09_unroutable_best_effort.png") print("Saved plot to examples/09_unroutable_best_effort.png")

View file

@ -23,13 +23,12 @@ class CollisionEngine:
'clearance', 'max_net_width', 'safety_zone_radius', 'clearance', 'max_net_width', 'safety_zone_radius',
'static_index', 'static_geometries', 'static_dilated', 'static_prepared', 'static_index', 'static_geometries', 'static_dilated', 'static_prepared',
'static_is_rect', 'static_tree', 'static_obj_ids', 'static_safe_cache', 'static_is_rect', 'static_tree', 'static_obj_ids', 'static_safe_cache',
'static_grid', 'grid_cell_size', '_static_id_counter', '_net_specific_trees', 'static_grid', 'grid_cell_size', '_static_id_counter',
'_net_specific_is_rect', '_net_specific_bounds',
'dynamic_index', 'dynamic_geometries', 'dynamic_dilated', 'dynamic_prepared', 'dynamic_index', 'dynamic_geometries', 'dynamic_dilated', 'dynamic_prepared',
'dynamic_tree', 'dynamic_obj_ids', 'dynamic_grid', '_dynamic_id_counter', 'dynamic_tree', 'dynamic_obj_ids', 'dynamic_grid', '_dynamic_id_counter',
'metrics', '_dynamic_tree_dirty', '_dynamic_net_ids_array', '_inv_grid_cell_size', 'metrics', '_dynamic_tree_dirty', '_dynamic_net_ids_array', '_inv_grid_cell_size',
'_static_bounds_array', '_static_is_rect_array', '_locked_nets', '_static_bounds_array', '_static_is_rect_array', '_locked_nets',
'_static_raw_tree', '_static_raw_obj_ids', '_dynamic_bounds_array', '_static_version' '_static_raw_tree', '_static_raw_obj_ids', '_dynamic_bounds_array'
) )
def __init__( def __init__(
@ -54,10 +53,6 @@ class CollisionEngine:
self._static_is_rect_array: numpy.ndarray | None = None self._static_is_rect_array: numpy.ndarray | None = None
self._static_raw_tree: STRtree | None = None self._static_raw_tree: STRtree | None = None
self._static_raw_obj_ids: list[int] = [] self._static_raw_obj_ids: list[int] = []
self._net_specific_trees: dict[tuple[float, float], STRtree] = {}
self._net_specific_is_rect: dict[tuple[float, float], numpy.ndarray] = {}
self._net_specific_bounds: dict[tuple[float, float], numpy.ndarray] = {}
self._static_version = 0
self.static_safe_cache: set[tuple] = set() self.static_safe_cache: set[tuple] = set()
self.static_grid: dict[tuple[int, int], list[int]] = {} self.static_grid: dict[tuple[int, int], list[int]] = {}
@ -101,21 +96,22 @@ class CollisionEngine:
f" Congestion: {m['congestion_tree_queries']} checks\n" f" Congestion: {m['congestion_tree_queries']} checks\n"
f" Safety Zone: {m['safety_zone_checks']} full intersections performed") f" Safety Zone: {m['safety_zone_checks']} full intersections performed")
def add_static_obstacle(self, polygon: Polygon, dilated_geometry: Polygon | None = None) -> int: def add_static_obstacle(self, polygon: Polygon) -> int:
obj_id = self._static_id_counter obj_id = self._static_id_counter
self._static_id_counter += 1 self._static_id_counter += 1
# Preserve existing dilation if provided, else use default C/2 # Consistent with Wi/2 + C/2 separation:
if dilated_geometry is not None: # Buffer static obstacles by half clearance.
dilated = dilated_geometry # Checkers must also buffer waveguide by Wi/2 + C/2.
else:
dilated = polygon.buffer(self.clearance / 2.0, join_style=2) dilated = polygon.buffer(self.clearance / 2.0, join_style=2)
self.static_geometries[obj_id] = polygon self.static_geometries[obj_id] = polygon
self.static_dilated[obj_id] = dilated self.static_dilated[obj_id] = dilated
self.static_prepared[obj_id] = prep(dilated) self.static_prepared[obj_id] = prep(dilated)
self.static_index.insert(obj_id, dilated.bounds) self.static_index.insert(obj_id, dilated.bounds)
self._invalidate_static_caches() self.static_tree = None
self._static_raw_tree = None
self.static_grid = {}
b = dilated.bounds b = dilated.bounds
area = (b[2] - b[0]) * (b[3] - b[1]) area = (b[2] - b[0]) * (b[3] - b[1])
self.static_is_rect[obj_id] = (abs(dilated.area - area) < 1e-4) self.static_is_rect[obj_id] = (abs(dilated.area - area) < 1e-4)
@ -135,21 +131,10 @@ class CollisionEngine:
del self.static_dilated[obj_id] del self.static_dilated[obj_id]
del self.static_prepared[obj_id] del self.static_prepared[obj_id]
del self.static_is_rect[obj_id] del self.static_is_rect[obj_id]
self._invalidate_static_caches()
def _invalidate_static_caches(self) -> None:
self.static_tree = None self.static_tree = None
self._static_bounds_array = None
self._static_is_rect_array = None
self.static_obj_ids = []
self._static_raw_tree = None self._static_raw_tree = None
self._static_raw_obj_ids = []
self.static_grid = {} self.static_grid = {}
self._net_specific_trees.clear()
self._net_specific_is_rect.clear()
self._net_specific_bounds.clear()
self.static_safe_cache.clear()
self._static_version += 1
def _ensure_static_tree(self) -> None: def _ensure_static_tree(self) -> None:
if self.static_tree is None and self.static_dilated: if self.static_tree is None and self.static_dilated:
@ -159,37 +144,6 @@ class CollisionEngine:
self._static_bounds_array = numpy.array([g.bounds for g in geoms]) self._static_bounds_array = numpy.array([g.bounds for g in geoms])
self._static_is_rect_array = numpy.array([self.static_is_rect[i] for i in self.static_obj_ids]) self._static_is_rect_array = numpy.array([self.static_is_rect[i] for i in self.static_obj_ids])
def _ensure_net_static_tree(self, net_width: float) -> STRtree:
"""
Lazily generate a tree where obstacles are dilated by (net_width/2 + clearance).
"""
key = (round(net_width, 4), round(self.clearance, 4))
if key in self._net_specific_trees:
return self._net_specific_trees[key]
# Physical separation must be >= clearance.
# Centerline to raw obstacle edge must be >= net_width/2 + clearance.
total_dilation = net_width / 2.0 + self.clearance
geoms = []
is_rect_list = []
bounds_list = []
for obj_id in sorted(self.static_geometries.keys()):
poly = self.static_geometries[obj_id]
dilated = poly.buffer(total_dilation, join_style=2)
geoms.append(dilated)
b = dilated.bounds
bounds_list.append(b)
area = (b[2] - b[0]) * (b[3] - b[1])
is_rect_list.append(abs(dilated.area - area) < 1e-4)
tree = STRtree(geoms)
self._net_specific_trees[key] = tree
self._net_specific_is_rect[key] = numpy.array(is_rect_list, dtype=bool)
self._net_specific_bounds[key] = numpy.array(bounds_list)
return tree
def _ensure_static_raw_tree(self) -> None: def _ensure_static_raw_tree(self) -> None:
if self._static_raw_tree is None and self.static_geometries: if self._static_raw_tree is None and self.static_geometries:
self._static_raw_obj_ids = sorted(self.static_geometries.keys()) self._static_raw_obj_ids = sorted(self.static_geometries.keys())
@ -251,9 +205,7 @@ class CollisionEngine:
to_move = [obj_id for obj_id, (nid, _) in self.dynamic_geometries.items() if nid == net_id] to_move = [obj_id for obj_id, (nid, _) in self.dynamic_geometries.items() if nid == net_id]
for obj_id in to_move: for obj_id in to_move:
poly = self.dynamic_geometries[obj_id][1] poly = self.dynamic_geometries[obj_id][1]
dilated = self.dynamic_dilated[obj_id] self.add_static_obstacle(poly)
# Preserve dilation for perfect consistency
self.add_static_obstacle(poly, dilated_geometry=dilated)
# Remove from dynamic index (without triggering the locked-net guard) # Remove from dynamic index (without triggering the locked-net guard)
self.dynamic_tree = None self.dynamic_tree = None
@ -267,9 +219,9 @@ class CollisionEngine:
def unlock_net(self, net_id: str) -> None: def unlock_net(self, net_id: str) -> None:
self._locked_nets.discard(net_id) self._locked_nets.discard(net_id)
def check_move_straight_static(self, start_port: Port, length: float, net_width: float) -> bool: def check_move_straight_static(self, start_port: Port, length: float) -> bool:
self.metrics['static_straight_fast'] += 1 self.metrics['static_straight_fast'] += 1
reach = self.ray_cast(start_port, start_port.orientation, max_dist=length + 0.01, net_width=net_width) reach = self.ray_cast(start_port, start_port.orientation, max_dist=length + 0.01)
return reach < length - 0.001 return reach < length - 0.001
def _is_in_safety_zone_fast(self, idx: int, start_port: Port | None, end_port: Port | None) -> bool: def _is_in_safety_zone_fast(self, idx: int, start_port: Port | None, end_port: Port | None) -> bool:
@ -284,19 +236,19 @@ class CollisionEngine:
b[1]-sz <= end_port.y <= b[3]+sz): return True b[1]-sz <= end_port.y <= b[3]+sz): return True
return False return False
def check_move_static(self, result: ComponentResult, start_port: Port | None = None, end_port: Port | None = None, net_width: float | None = None) -> bool: def check_move_static(self, result: ComponentResult, start_port: Port | None = None, end_port: Port | None = None) -> bool:
if not self.static_dilated: return False if not self.static_dilated: return False
self.metrics['static_tree_queries'] += 1 self.metrics['static_tree_queries'] += 1
self._ensure_static_tree() self._ensure_static_tree()
# 1. Fast total bounds check (Use dilated bounds to ensure clearance is caught) # 1. Fast total bounds check
tb = result.total_dilated_bounds if result.total_dilated_bounds else result.total_bounds tb = result.total_bounds
hits = self.static_tree.query(box(*tb)) hits = self.static_tree.query(box(*tb))
if hits.size == 0: return False if hits.size == 0: return False
# 2. Per-hit check # 2. Per-hit check
s_bounds = self._static_bounds_array s_bounds = self._static_bounds_array
move_poly_bounds = result.dilated_bounds if result.dilated_bounds else result.bounds move_poly_bounds = result.bounds
for hit_idx in hits: for hit_idx in hits:
obs_b = s_bounds[hit_idx] obs_b = s_bounds[hit_idx]
@ -314,6 +266,9 @@ class CollisionEngine:
if self._is_in_safety_zone_fast(hit_idx, start_port, end_port): if self._is_in_safety_zone_fast(hit_idx, start_port, end_port):
# If near port, we must use the high-precision check # If near port, we must use the high-precision check
obj_id = self.static_obj_ids[hit_idx] obj_id = self.static_obj_ids[hit_idx]
# Triggers lazy evaluation of geometry only if needed
poly_move = result.geometry[0] # Simplification: assume 1 poly for now or loop
# Actually, better loop over move polygons for high-fidelity
collision_found = False collision_found = False
for p_move in result.geometry: for p_move in result.geometry:
if not self._is_in_safety_zone(p_move, obj_id, start_port, end_port): if not self._is_in_safety_zone(p_move, obj_id, start_port, end_port):
@ -322,14 +277,13 @@ class CollisionEngine:
return True return True
# Not in safety zone and AABBs overlap - check real intersection # Not in safety zone and AABBs overlap - check real intersection
# This is the most common path for real collisions or near misses
obj_id = self.static_obj_ids[hit_idx] obj_id = self.static_obj_ids[hit_idx]
# Use dilated geometry (Wi/2 + C/2) against static_dilated (C/2) to get Wi/2 + C. raw_obstacle = self.static_geometries[obj_id]
# Touching means gap is exactly C. Intersection without touches means gap < C.
test_geoms = result.dilated_geometry if result.dilated_geometry else result.geometry test_geoms = result.dilated_geometry if result.dilated_geometry else result.geometry
static_obs_dilated = self.static_dilated[obj_id]
for i, p_test in enumerate(test_geoms): for i, p_test in enumerate(test_geoms):
if p_test.intersects(static_obs_dilated) and not p_test.touches(static_obs_dilated): if p_test.intersects(raw_obstacle):
return True return True
return False return False
@ -385,11 +339,11 @@ class CollisionEngine:
possible_total = (tb[0] < d_bounds[:, 2]) & (tb[2] > d_bounds[:, 0]) & \ possible_total = (tb[0] < d_bounds[:, 2]) & (tb[2] > d_bounds[:, 0]) & \
(tb[1] < d_bounds[:, 3]) & (tb[3] > d_bounds[:, 1]) (tb[1] < d_bounds[:, 3]) & (tb[3] > d_bounds[:, 1])
valid_hits_mask = (self._dynamic_net_ids_array != net_id) valid_hits = (self._dynamic_net_ids_array != net_id)
if not numpy.any(possible_total & valid_hits_mask): if not numpy.any(possible_total & valid_hits):
return 0 return 0
# 2. Per-polygon check using query # 2. Per-polygon AABB check using query on geometries (LAZY triggering)
geoms_to_test = result.dilated_geometry if result.dilated_geometry else result.geometry geoms_to_test = result.dilated_geometry if result.dilated_geometry else result.geometry
res_indices, tree_indices = self.dynamic_tree.query(geoms_to_test, predicate='intersects') res_indices, tree_indices = self.dynamic_tree.query(geoms_to_test, predicate='intersects')
@ -397,35 +351,8 @@ class CollisionEngine:
return 0 return 0
hit_net_ids = numpy.take(self._dynamic_net_ids_array, tree_indices) hit_net_ids = numpy.take(self._dynamic_net_ids_array, tree_indices)
valid_geoms_hits = (hit_net_ids != net_id)
# Group by other net_id to minimize 'touches' calls return int(numpy.sum(valid_geoms_hits))
unique_other_nets = numpy.unique(hit_net_ids[hit_net_ids != net_id])
if unique_other_nets.size == 0:
return 0
tree_geoms = self.dynamic_tree.geometries
real_hits_count = 0
for other_nid in unique_other_nets:
other_mask = (hit_net_ids == other_nid)
sub_tree_indices = tree_indices[other_mask]
sub_res_indices = res_indices[other_mask]
# Check if ANY hit for THIS other net is a real collision
found_real = False
for j in range(len(sub_tree_indices)):
p_test = geoms_to_test[sub_res_indices[j]]
p_tree = tree_geoms[sub_tree_indices[j]]
if not p_test.touches(p_tree):
# Add small area tolerance for numerical precision
if p_test.intersection(p_tree).area > 1e-7:
found_real = True
break
if found_real:
real_hits_count += 1
return real_hits_count
def _is_in_safety_zone(self, geometry: Polygon, obj_id: int, start_port: Port | None, end_port: Port | None) -> bool: def _is_in_safety_zone(self, geometry: Polygon, obj_id: int, start_port: Port | None, end_port: Port | None) -> bool:
""" """
@ -465,21 +392,17 @@ class CollisionEngine:
self._ensure_static_tree() self._ensure_static_tree()
if self.static_tree is None: return False if self.static_tree is None: return False
# Separation needed: Centerline-to-WallEdge >= Wi/2 + C. # Separation needed: (Wi + C)/2.
# static_tree has obstacles buffered by C/2. # static_dilated is buffered by C/2.
# geometry is physical waveguide (Wi/2 from centerline). # So we need geometry buffered by Wi/2.
# So we buffer geometry by C/2 to get Wi/2 + C/2. if dilated_geometry:
# Intersection means separation < (Wi/2 + C/2) + C/2 = Wi/2 + C.
if dilated_geometry is not None:
test_geom = dilated_geometry test_geom = dilated_geometry
else: else:
dist = self.clearance / 2.0 dist = (net_width / 2.0) if net_width is not None else 0.0
test_geom = geometry.buffer(dist + 1e-7, join_style=2) if dist > 0 else geometry test_geom = geometry.buffer(dist + 1e-7, join_style=2) if dist >= 0 else geometry
hits = self.static_tree.query(test_geom, predicate='intersects') hits = self.static_tree.query(test_geom, predicate='intersects')
tree_geoms = self.static_tree.geometries
for hit_idx in hits: for hit_idx in hits:
if test_geom.touches(tree_geoms[hit_idx]): continue
obj_id = self.static_obj_ids[hit_idx] obj_id = self.static_obj_ids[hit_idx]
if self._is_in_safety_zone(geometry, obj_id, start_port, end_port): continue if self._is_in_safety_zone(geometry, obj_id, start_port, end_port): continue
return True return True
@ -489,166 +412,60 @@ class CollisionEngine:
if self.dynamic_tree is None: return 0 if self.dynamic_tree is None: return 0
test_poly = dilated_geometry if dilated_geometry else geometry.buffer(self.clearance / 2.0) test_poly = dilated_geometry if dilated_geometry else geometry.buffer(self.clearance / 2.0)
hits = self.dynamic_tree.query(test_poly, predicate='intersects') hits = self.dynamic_tree.query(test_poly, predicate='intersects')
tree_geoms = self.dynamic_tree.geometries count = 0
hit_net_ids = []
for hit_idx in hits: for hit_idx in hits:
if test_poly.touches(tree_geoms[hit_idx]): continue
obj_id = self.dynamic_obj_ids[hit_idx] obj_id = self.dynamic_obj_ids[hit_idx]
other_id = self.dynamic_geometries[obj_id][0] if self.dynamic_geometries[obj_id][0] != net_id: count += 1
if other_id != net_id: return count
hit_net_ids.append(other_id)
return len(numpy.unique(hit_net_ids)) if hit_net_ids else 0
def is_collision(self, geometry: Polygon, net_id: str = 'default', net_width: float | None = None, start_port: Port | None = None, end_port: Port | None = None) -> bool: def is_collision(self, geometry: Polygon, net_id: str = 'default', net_width: float | None = None, start_port: Port | None = None, end_port: Port | None = None) -> bool:
""" Unified entry point for static collision checks. """ """ Unified entry point for static collision checks. """
result = self.check_collision(geometry, net_id, buffer_mode='static', start_port=start_port, end_port=end_port, net_width=net_width) result = self.check_collision(geometry, net_id, buffer_mode='static', start_port=start_port, end_port=end_port, net_width=net_width)
return bool(result) return bool(result)
def verify_path(self, net_id: str, components: list[ComponentResult]) -> tuple[bool, int]: def ray_cast(self, origin: Port, angle_deg: float, max_dist: float = 2000.0) -> float:
"""
Non-approximated, full-polygon intersection check of a path against all
static obstacles and other nets.
"""
collision_count = 0
# 1. Check against static obstacles
self._ensure_static_raw_tree()
if self._static_raw_tree is not None:
raw_geoms = self._static_raw_tree.geometries
for comp in components:
# Use ACTUAL geometry, not dilated/proxy
actual_geoms = comp.actual_geometry if comp.actual_geometry is not None else comp.geometry
for p_actual in actual_geoms:
# Physical separation must be >= clearance.
p_verify = p_actual.buffer(self.clearance, join_style=2)
hits = self._static_raw_tree.query(p_verify, predicate='intersects')
for hit_idx in hits:
p_obs = raw_geoms[hit_idx]
# If they ONLY touch, gap is exactly clearance. Valid.
if p_verify.touches(p_obs): continue
obj_id = self._static_raw_obj_ids[hit_idx]
if not self._is_in_safety_zone(p_actual, obj_id, None, None):
collision_count += 1
# 2. Check against other nets
self._ensure_dynamic_tree()
if self.dynamic_tree is not None:
tree_geoms = self.dynamic_tree.geometries
for comp in components:
# Robust fallback chain to ensure crossings are caught even with zero clearance
d_geoms = comp.dilated_actual_geometry or comp.dilated_geometry or comp.actual_geometry or comp.geometry
if not d_geoms: continue
# Ensure d_geoms is a list/array for STRtree.query
if not isinstance(d_geoms, (list, tuple, numpy.ndarray)):
d_geoms = [d_geoms]
res_indices, tree_indices = self.dynamic_tree.query(d_geoms, predicate='intersects')
if tree_indices.size > 0:
hit_net_ids = numpy.take(self._dynamic_net_ids_array, tree_indices)
net_id_str = str(net_id)
comp_hits = []
for i in range(len(tree_indices)):
if hit_net_ids[i] == net_id_str: continue
p_new = d_geoms[res_indices[i]]
p_tree = tree_geoms[tree_indices[i]]
if not p_new.touches(p_tree):
# Numerical tolerance for area overlap
if p_new.intersection(p_tree).area > 1e-7:
comp_hits.append(hit_net_ids[i])
if comp_hits:
collision_count += len(numpy.unique(comp_hits))
return (collision_count == 0), collision_count
def ray_cast(self, origin: Port, angle_deg: float, max_dist: float = 2000.0, net_width: float | None = None) -> float:
rad = numpy.radians(angle_deg) rad = numpy.radians(angle_deg)
cos_v, sin_v = numpy.cos(rad), numpy.sin(rad) cos_v, sin_v = numpy.cos(rad), numpy.sin(rad)
dx, dy = max_dist * cos_v, max_dist * sin_v dx, dy = max_dist * cos_v, max_dist * sin_v
min_x, max_x = sorted([origin.x, origin.x + dx]) min_x, max_x = sorted([origin.x, origin.x + dx])
min_y, max_y = sorted([origin.y, origin.y + dy]) min_y, max_y = sorted([origin.y, origin.y + dy])
key = None
if net_width is not None:
tree = self._ensure_net_static_tree(net_width)
key = (round(net_width, 4), round(self.clearance, 4))
is_rect_arr = self._net_specific_is_rect[key]
bounds_arr = self._net_specific_bounds[key]
else:
self._ensure_static_tree() self._ensure_static_tree()
tree = self.static_tree if self.static_tree is None: return max_dist
is_rect_arr = self._static_is_rect_array candidates = self.static_tree.query(box(min_x, min_y, max_x, max_y))
bounds_arr = self._static_bounds_array
if tree is None: return max_dist
candidates = tree.query(box(min_x, min_y, max_x, max_y))
if candidates.size == 0: return max_dist if candidates.size == 0: return max_dist
min_dist = max_dist min_dist = max_dist
inv_dx = 1.0 / dx if abs(dx) > 1e-12 else 1e30 inv_dx = 1.0 / dx if abs(dx) > 1e-12 else 1e30
inv_dy = 1.0 / dy if abs(dy) > 1e-12 else 1e30 inv_dy = 1.0 / dy if abs(dy) > 1e-12 else 1e30
b_arr = self._static_bounds_array[candidates]
tree_geoms = tree.geometries dist_sq = (b_arr[:, 0] - origin.x)**2 + (b_arr[:, 1] - origin.y)**2
ray_line = None
# Fast AABB-based pre-sort
candidates_bounds = bounds_arr[candidates]
# Distance to AABB min corner as heuristic
dist_sq = (candidates_bounds[:, 0] - origin.x)**2 + (candidates_bounds[:, 1] - origin.y)**2
sorted_indices = numpy.argsort(dist_sq) sorted_indices = numpy.argsort(dist_sq)
ray_line = None
for idx in sorted_indices: for i in sorted_indices:
c = candidates[idx] c = candidates[i]; b = self._static_bounds_array[c]
b = bounds_arr[c] if abs(dx) < 1e-12:
# Fast axis-aligned ray-AABB intersection
# (Standard Slab method)
if abs(dx) < 1e-12: # Vertical ray
if origin.x < b[0] or origin.x > b[2]: tx_min, tx_max = 1e30, -1e30 if origin.x < b[0] or origin.x > b[2]: tx_min, tx_max = 1e30, -1e30
else: tx_min, tx_max = -1e30, 1e30 else: tx_min, tx_max = -1e30, 1e30
else: else:
t1, t2 = (b[0] - origin.x) * inv_dx, (b[2] - origin.x) * inv_dx t1, t2 = (b[0] - origin.x) * inv_dx, (b[2] - origin.x) * inv_dx
tx_min, tx_max = min(t1, t2), max(t1, t2) tx_min, tx_max = min(t1, t2), max(t1, t2)
if abs(dy) < 1e-12:
if abs(dy) < 1e-12: # Horizontal ray
if origin.y < b[1] or origin.y > b[3]: ty_min, ty_max = 1e30, -1e30 if origin.y < b[1] or origin.y > b[3]: ty_min, ty_max = 1e30, -1e30
else: ty_min, ty_max = -1e30, 1e30 else: ty_min, ty_max = -1e30, 1e30
else: else:
t1, t2 = (b[1] - origin.y) * inv_dy, (b[3] - origin.y) * inv_dy t1, t2 = (b[1] - origin.y) * inv_dy, (b[3] - origin.y) * inv_dy
ty_min, ty_max = min(t1, t2), max(t1, t2) ty_min, ty_max = min(t1, t2), max(t1, t2)
t_min, t_max = max(tx_min, ty_min), min(tx_max, ty_max) t_min, t_max = max(tx_min, ty_min), min(tx_max, ty_max)
if t_max < 0 or t_min > t_max or t_min > 1.0 or t_min >= min_dist / max_dist: continue
# Intersection conditions if self._static_is_rect_array[c]:
if t_max < 0 or t_min > t_max or t_min > 1.0: continue min_dist = max(0.0, t_min * max_dist); continue
if ray_line is None: ray_line = LineString([(origin.x, origin.y), (origin.x + dx, origin.y + dy)])
# If hit is further than current min_dist, skip obj_id = self.static_obj_ids[c]
if t_min * max_dist >= min_dist: continue if self.static_prepared[obj_id].intersects(ray_line):
intersection = ray_line.intersection(self.static_dilated[obj_id])
# HIGH PRECISION CHECK
if is_rect_arr[c]:
# Rectangles are perfectly described by their AABB
min_dist = max(0.0, t_min * max_dist)
continue
# Fallback to full geometry check for non-rectangles (arcs, etc.)
if ray_line is None:
ray_line = LineString([(origin.x, origin.y), (origin.x + dx, origin.y + dy)])
obs_dilated = tree_geoms[c]
if obs_dilated.intersects(ray_line):
intersection = ray_line.intersection(obs_dilated)
if intersection.is_empty: continue if intersection.is_empty: continue
def get_dist(geom): def get_dist(geom):
if hasattr(geom, 'geoms'): return min(get_dist(g) for g in geom.geoms) if hasattr(geom, 'geoms'): return min(get_dist(g) for g in geom.geoms)
return numpy.sqrt((geom.coords[0][0] - origin.x)**2 + (geom.coords[0][1] - origin.y)**2) return numpy.sqrt((geom.coords[0][0] - origin.x)**2 + (geom.coords[0][1] - origin.y)**2)
d = get_dist(intersection) d = get_dist(intersection)
if d < min_dist: min_dist = d if d < min_dist: min_dist = d
return min_dist return min_dist

View file

@ -1,105 +1,326 @@
from __future__ import annotations from __future__ import annotations
from typing import Literal import math
from typing import Literal, cast, Any
import numpy import numpy
from shapely.affinity import translate as shapely_translate import shapely
from shapely.geometry import Polygon, box from shapely.geometry import Polygon, box, MultiPolygon
from shapely.ops import unary_union
from shapely.affinity import translate
from inire.constants import TOLERANCE_ANGULAR, TOLERANCE_LINEAR from inire.constants import DEFAULT_SEARCH_GRID_SNAP_UM, TOLERANCE_LINEAR, TOLERANCE_ANGULAR
from .primitives import Port, rotation_matrix2 from .primitives import Port
def _normalize_length(value: float) -> float: def snap_search_grid(value: float, snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM) -> float:
return float(value) """
Snap a coordinate to the nearest search grid unit.
"""
return round(value / snap_size) * snap_size
class ComponentResult: class ComponentResult:
"""
Standard container for generated move geometry and state.
Supports Lazy Evaluation for translation to improve performance.
"""
__slots__ = ( __slots__ = (
"geometry", '_geometry', '_dilated_geometry', '_proxy_geometry', '_actual_geometry', '_dilated_actual_geometry',
"dilated_geometry", 'end_port', 'length', 'move_type', '_bounds', '_dilated_bounds',
"proxy_geometry", '_total_bounds', '_total_dilated_bounds', '_bounds_cached', '_total_geom_list', '_offsets', '_coords_cache',
"actual_geometry", '_base_result', '_offset', 'rel_gx', 'rel_gy', 'rel_go'
"dilated_actual_geometry",
"end_port",
"length",
"move_type",
"_bounds",
"_total_bounds",
"_dilated_bounds",
"_total_dilated_bounds",
) )
def __init__( def __init__(
self, self,
geometry: list[Polygon], geometry: list[Polygon] | None = None,
end_port: Port, end_port: Port | None = None,
length: float, length: float = 0.0,
move_type: str,
dilated_geometry: list[Polygon] | None = None, dilated_geometry: list[Polygon] | None = None,
proxy_geometry: list[Polygon] | None = None, proxy_geometry: list[Polygon] | None = None,
actual_geometry: list[Polygon] | None = None, actual_geometry: list[Polygon] | None = None,
dilated_actual_geometry: list[Polygon] | None = None, dilated_actual_geometry: list[Polygon] | None = None,
skip_bounds: bool = False,
move_type: str = 'Unknown',
_total_geom_list: list[Polygon] | None = None,
_offsets: list[int] | None = None,
_coords_cache: numpy.ndarray | None = None,
_base_result: ComponentResult | None = None,
_offset: tuple[float, float] | None = None,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
rel_gx: int | None = None,
rel_gy: int | None = None,
rel_go: int | None = None
) -> None: ) -> None:
self.geometry = geometry
self.dilated_geometry = dilated_geometry
self.proxy_geometry = proxy_geometry
self.actual_geometry = actual_geometry
self.dilated_actual_geometry = dilated_actual_geometry
self.end_port = end_port self.end_port = end_port
self.length = float(length) self.length = length
self.move_type = move_type self.move_type = move_type
self._bounds = [poly.bounds for poly in self.geometry] self._base_result = _base_result
self._total_bounds = _combine_bounds(self._bounds) self._offset = _offset
self._bounds_cached = False
if self.dilated_geometry is None: if rel_gx is not None:
self.rel_gx = rel_gx
self.rel_gy = rel_gy
self.rel_go = rel_go
elif end_port:
inv_snap = 1.0 / snap_size
self.rel_gx = int(round(end_port.x * inv_snap))
self.rel_gy = int(round(end_port.y * inv_snap))
self.rel_go = int(round(end_port.orientation))
else:
self.rel_gx = 0; self.rel_gy = 0; self.rel_go = 0
if _base_result is not None:
# Lazy Mode
self._geometry = None
self._dilated_geometry = None
self._proxy_geometry = None
self._actual_geometry = None
self._dilated_actual_geometry = None
self._bounds = None
self._dilated_bounds = None
self._total_bounds = None
self._total_dilated_bounds = None
else:
# Eager Mode (Base Component)
self._geometry = geometry
self._dilated_geometry = dilated_geometry
self._proxy_geometry = proxy_geometry
self._actual_geometry = actual_geometry
self._dilated_actual_geometry = dilated_actual_geometry
# These are mostly legacy/unused but kept for slot safety
self._total_geom_list = _total_geom_list
self._offsets = _offsets
self._coords_cache = _coords_cache
if not skip_bounds and geometry:
# Use plain tuples for bounds to avoid NumPy overhead
self._bounds = [p.bounds for p in geometry]
b0 = self._bounds[0]
minx, miny, maxx, maxy = b0
for i in range(1, len(self._bounds)):
b = self._bounds[i]
if b[0] < minx: minx = b[0]
if b[1] < miny: miny = b[1]
if b[2] > maxx: maxx = b[2]
if b[3] > maxy: maxy = b[3]
self._total_bounds = (minx, miny, maxx, maxy)
if dilated_geometry is not None:
self._dilated_bounds = [p.bounds for p in dilated_geometry]
b0 = self._dilated_bounds[0]
minx, miny, maxx, maxy = b0
for i in range(1, len(self._dilated_bounds)):
b = self._dilated_bounds[i]
if b[0] < minx: minx = b[0]
if b[1] < miny: miny = b[1]
if b[2] > maxx: maxx = b[2]
if b[3] > maxy: maxy = b[3]
self._total_dilated_bounds = (minx, miny, maxx, maxy)
else:
self._dilated_bounds = None self._dilated_bounds = None
self._total_dilated_bounds = None self._total_dilated_bounds = None
else: else:
self._dilated_bounds = [poly.bounds for poly in self.dilated_geometry] self._bounds = None
self._total_dilated_bounds = _combine_bounds(self._dilated_bounds) self._total_bounds = None
self._dilated_bounds = None
self._total_dilated_bounds = None
self._bounds_cached = True
def _ensure_evaluated(self, attr_name: str) -> None:
if self._base_result is None:
return
# Check if specific attribute is already translated
internal_name = f'_{attr_name}'
if getattr(self, internal_name) is not None:
return
# Perform Translation for the specific attribute only
base_geoms = getattr(self._base_result, internal_name)
if base_geoms is None:
return
dx, dy = self._offset
# Use shapely.affinity.translate (imported at top level)
translated_geoms = [translate(p, dx, dy) for p in base_geoms]
setattr(self, internal_name, translated_geoms)
@property
def geometry(self) -> list[Polygon]:
self._ensure_evaluated('geometry')
return self._geometry
@property
def dilated_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('dilated_geometry')
return self._dilated_geometry
@property
def proxy_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('proxy_geometry')
return self._proxy_geometry
@property
def actual_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('actual_geometry')
return self._actual_geometry
@property
def dilated_actual_geometry(self) -> list[Polygon] | None:
self._ensure_evaluated('dilated_actual_geometry')
return self._dilated_actual_geometry
@property @property
def bounds(self) -> list[tuple[float, float, float, float]]: def bounds(self) -> list[tuple[float, float, float, float]]:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._bounds return self._bounds
@property @property
def total_bounds(self) -> tuple[float, float, float, float]: def total_bounds(self) -> tuple[float, float, float, float]:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._total_bounds return self._total_bounds
@property @property
def dilated_bounds(self) -> list[tuple[float, float, float, float]] | None: def dilated_bounds(self) -> list[tuple[float, float, float, float]] | None:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._dilated_bounds return self._dilated_bounds
@property @property
def total_dilated_bounds(self) -> tuple[float, float, float, float] | None: def total_dilated_bounds(self) -> tuple[float, float, float, float] | None:
if not self._bounds_cached:
self._ensure_bounds_evaluated()
return self._total_dilated_bounds return self._total_dilated_bounds
def translate(self, dx: int | float, dy: int | float) -> ComponentResult: def _ensure_bounds_evaluated(self) -> None:
if self._bounds_cached: return
base = self._base_result
if base is not None:
dx, dy = self._offset
# Direct tuple creation is much faster than NumPy for single AABBs
if base._bounds is not None:
self._bounds = [(b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy) for b in base._bounds]
if base._total_bounds is not None:
b = base._total_bounds
self._total_bounds = (b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy)
if base._dilated_bounds is not None:
self._dilated_bounds = [(b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy) for b in base._dilated_bounds]
if base._total_dilated_bounds is not None:
b = base._total_dilated_bounds
self._total_dilated_bounds = (b[0]+dx, b[1]+dy, b[2]+dx, b[3]+dy)
self._bounds_cached = True
def translate(self, dx: float, dy: float, rel_gx: int | None = None, rel_gy: int | None = None, rel_go: int | None = None) -> ComponentResult:
"""
Create a new ComponentResult translated by (dx, dy).
"""
new_port = Port(self.end_port.x + dx, self.end_port.y + dy, self.end_port.orientation, snap=False)
# LAZY TRANSLATE
if self._base_result:
base = self._base_result
current_offset = self._offset
new_offset = (current_offset[0] + dx, current_offset[1] + dy)
else:
base = self
new_offset = (dx, dy)
return ComponentResult( return ComponentResult(
geometry=[shapely_translate(poly, dx, dy) for poly in self.geometry], end_port=new_port,
end_port=self.end_port + [dx, dy, 0],
length=self.length, length=self.length,
move_type=self.move_type, move_type=self.move_type,
dilated_geometry=None if self.dilated_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.dilated_geometry], _base_result=base,
proxy_geometry=None if self.proxy_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.proxy_geometry], _offset=new_offset,
actual_geometry=None if self.actual_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.actual_geometry], rel_gx=rel_gx,
dilated_actual_geometry=None if self.dilated_actual_geometry is None else [shapely_translate(poly, dx, dy) for poly in self.dilated_actual_geometry], rel_gy=rel_gy,
rel_go=rel_go
) )
def _combine_bounds(bounds_list: list[tuple[float, float, float, float]]) -> tuple[float, float, float, float]: class Straight:
arr = numpy.asarray(bounds_list, dtype=numpy.float64) """
return ( Move generator for straight waveguide segments.
float(arr[:, 0].min()), """
float(arr[:, 1].min()), @staticmethod
float(arr[:, 2].max()), def generate(
float(arr[:, 3].max()), start_port: Port,
length: float,
width: float,
snap_to_grid: bool = True,
dilation: float = 0.0,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
) -> ComponentResult:
"""
Generate a straight waveguide segment.
"""
rad = numpy.radians(start_port.orientation)
cos_val = numpy.cos(rad)
sin_val = numpy.sin(rad)
ex = start_port.x + length * cos_val
ey = start_port.y + length * sin_val
if snap_to_grid:
ex = snap_search_grid(ex, snap_size)
ey = snap_search_grid(ey, snap_size)
end_port = Port(ex, ey, start_port.orientation)
actual_length = numpy.sqrt((end_port.x - start_port.x)**2 + (end_port.y - start_port.y)**2)
# Create polygons using vectorized points
half_w = width / 2.0
pts_raw = numpy.array([
[0, half_w],
[actual_length, half_w],
[actual_length, -half_w],
[0, -half_w]
])
# Rotation matrix (standard 2D rotation)
rot_matrix = numpy.array([[cos_val, -sin_val], [sin_val, cos_val]])
# Transform points: P' = R * P + T
poly_points = (pts_raw @ rot_matrix.T) + [start_port.x, start_port.y]
geom = [Polygon(poly_points)]
dilated_geom = None
if dilation > 0:
# Direct calculation of dilated rectangle instead of expensive buffer()
half_w_dil = half_w + dilation
pts_dil = numpy.array([
[-dilation, half_w_dil],
[actual_length + dilation, half_w_dil],
[actual_length + dilation, -half_w_dil],
[-dilation, -half_w_dil]
])
poly_points_dil = (pts_dil @ rot_matrix.T) + [start_port.x, start_port.y]
dilated_geom = [Polygon(poly_points_dil)]
# Pre-calculate grid indices for faster ComponentResult init
inv_snap = 1.0 / snap_size
rgx = int(round(ex * inv_snap))
rgy = int(round(ey * inv_snap))
rgo = int(round(start_port.orientation))
# For straight segments, geom IS the actual geometry
return ComponentResult(
geometry=geom, end_port=end_port, length=actual_length,
dilated_geometry=dilated_geom, actual_geometry=geom,
dilated_actual_geometry=dilated_geom, move_type='Straight',
snap_size=snap_size, rel_gx=rgx, rel_gy=rgy, rel_go=rgo
) )
def _get_num_segments(radius: float, angle_deg: float, sagitta: float = 0.01) -> int: def _get_num_segments(radius: float, angle_deg: float, sagitta: float = 0.01) -> int:
"""
Calculate number of segments for an arc to maintain a maximum sagitta.
"""
if radius <= 0: if radius <= 0:
return 1 return 1
ratio = max(0.0, min(1.0, 1.0 - sagitta / radius)) ratio = max(0.0, min(1.0, 1.0 - sagitta / radius))
@ -111,35 +332,86 @@ def _get_num_segments(radius: float, angle_deg: float, sagitta: float = 0.01) ->
def _get_arc_polygons( def _get_arc_polygons(
cxy: tuple[float, float], cx: float,
cy: float,
radius: float, radius: float,
width: float, width: float,
ts: tuple[float, float], t_start: float,
t_end: float,
sagitta: float = 0.01, sagitta: float = 0.01,
dilation: float = 0.0, dilation: float = 0.0,
) -> list[Polygon]: ) -> list[Polygon]:
t_start, t_end = numpy.radians(ts[0]), numpy.radians(ts[1]) """
num_segments = _get_num_segments(radius, abs(ts[1] - ts[0]), sagitta) Helper to generate arc-shaped polygons using vectorized NumPy operations.
"""
num_segments = _get_num_segments(radius, float(numpy.degrees(abs(t_end - t_start))), sagitta)
angles = numpy.linspace(t_start, t_end, num_segments + 1) angles = numpy.linspace(t_start, t_end, num_segments + 1)
cx, cy = cxy
inner_radius = radius - width / 2.0 - dilation
outer_radius = radius + width / 2.0 + dilation
cos_a = numpy.cos(angles) cos_a = numpy.cos(angles)
sin_a = numpy.sin(angles) sin_a = numpy.sin(angles)
inner_points = numpy.column_stack((cx + inner_radius * cos_a, cy + inner_radius * sin_a)) inner_radius = radius - width / 2.0 - dilation
outer_points = numpy.column_stack((cx + outer_radius * cos_a[::-1], cy + outer_radius * sin_a[::-1])) outer_radius = radius + width / 2.0 + dilation
return [Polygon(numpy.concatenate((inner_points, outer_points), axis=0))]
inner_points = numpy.stack([cx + inner_radius * cos_a, cy + inner_radius * sin_a], axis=1)
outer_points = numpy.stack([cx + outer_radius * cos_a[::-1], cy + outer_radius * sin_a[::-1]], axis=1)
# Concatenate inner and outer points to form the polygon ring
poly_points = numpy.concatenate([inner_points, outer_points])
return [Polygon(poly_points)]
def _clip_bbox(cxy: tuple[float, float], radius: float, width: float, ts: tuple[float, float], clip_margin: float) -> Polygon: def _clip_bbox(
arc_poly = _get_arc_polygons(cxy, radius, width, ts)[0] cx: float,
minx, miny, maxx, maxy = arc_poly.bounds cy: float,
bbox_poly = box(minx, miny, maxx, maxy) radius: float,
shrink = min(clip_margin, max(radius, width)) width: float,
return bbox_poly.buffer(-shrink, join_style=2) if shrink > 0 else bbox_poly t_start: float,
t_end: float,
) -> Polygon:
"""
Generates a rotationally invariant bounding polygon for an arc.
"""
sweep = abs(t_end - t_start)
if sweep > 2 * numpy.pi:
sweep = sweep % (2 * numpy.pi)
mid_angle = (t_start + t_end) / 2.0
# Handle wrap-around for mid_angle
if abs(t_end - t_start) > numpy.pi:
mid_angle += numpy.pi
r_out = radius + width / 2.0
r_in = max(0.0, radius - width / 2.0)
half_sweep = sweep / 2.0
# Define vertices in local space (center at 0,0, symmetry axis along +X)
cos_hs = numpy.cos(half_sweep)
cos_hs2 = numpy.cos(half_sweep / 2.0)
# Distance to peak from center: r_out / cos(hs/2)
peak_r = r_out / cos_hs2
local_verts = [
[r_in * numpy.cos(-half_sweep), r_in * numpy.sin(-half_sweep)],
[r_out * numpy.cos(-half_sweep), r_out * numpy.sin(-half_sweep)],
[peak_r * numpy.cos(-half_sweep/2), peak_r * numpy.sin(-half_sweep/2)],
[peak_r * numpy.cos(half_sweep/2), peak_r * numpy.sin(half_sweep/2)],
[r_out * numpy.cos(half_sweep), r_out * numpy.sin(half_sweep)],
[r_in * numpy.cos(half_sweep), r_in * numpy.sin(half_sweep)],
[r_in, 0.0]
]
# Rotate and translate to world space
cos_m = numpy.cos(mid_angle)
sin_m = numpy.sin(mid_angle)
rot = numpy.array([[cos_m, -sin_m], [sin_m, cos_m]])
world_verts = (numpy.array(local_verts) @ rot.T) + [cx, cy]
return Polygon(world_verts)
def _apply_collision_model( def _apply_collision_model(
@ -147,57 +419,39 @@ def _apply_collision_model(
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon, collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon,
radius: float, radius: float,
width: float, width: float,
cxy: tuple[float, float], cx: float = 0.0,
clip_margin: float, cy: float = 0.0,
ts: tuple[float, float], clip_margin: float = 10.0,
) -> list[Polygon]: t_start: float | None = None,
t_end: float | None = None,
) -> list[Polygon]:
"""
Applies the specified collision model to an arc geometry.
"""
if isinstance(collision_type, Polygon): if isinstance(collision_type, Polygon):
return [shapely_translate(collision_type, cxy[0], cxy[1])] # Translate the custom polygon to the bend center (cx, cy)
return [shapely.transform(collision_type, lambda x: x + [cx, cy])]
if collision_type == "arc": if collision_type == "arc":
return [arc_poly] return [arc_poly]
if collision_type == "clipped_bbox":
clipped = _clip_bbox(cxy, radius, width, ts, clip_margin)
return [clipped if not clipped.is_empty else box(*arc_poly.bounds)]
return [box(*arc_poly.bounds)]
if collision_type == "clipped_bbox" and t_start is not None and t_end is not None:
return [_clip_bbox(cx, cy, radius, width, t_start, t_end)]
class Straight: # Bounding box of the high-fidelity arc (fallback for bbox or missing angles)
@staticmethod minx, miny, maxx, maxy = arc_poly.bounds
def generate( bbox_poly = box(minx, miny, maxx, maxy)
start_port: Port,
length: float,
width: float,
dilation: float = 0.0,
) -> ComponentResult:
rot2 = rotation_matrix2(start_port.r)
length_f = _normalize_length(length)
disp = rot2 @ numpy.array((length_f, 0.0))
end_port = Port(start_port.x + disp[0], start_port.y + disp[1], start_port.r)
half_w = width / 2.0 if collision_type == "bbox":
pts = numpy.array(((0.0, half_w), (length_f, half_w), (length_f, -half_w), (0.0, -half_w))) return [bbox_poly]
poly_points = (pts @ rot2.T) + numpy.array((start_port.x, start_port.y))
geometry = [Polygon(poly_points)]
dilated_geometry = None return [arc_poly]
if dilation > 0:
half_w_d = half_w + dilation
pts_d = numpy.array(((-dilation, half_w_d), (length_f + dilation, half_w_d), (length_f + dilation, -half_w_d), (-dilation, -half_w_d)))
poly_points_d = (pts_d @ rot2.T) + numpy.array((start_port.x, start_port.y))
dilated_geometry = [Polygon(poly_points_d)]
return ComponentResult(
geometry=geometry,
end_port=end_port,
length=abs(length_f),
move_type="Straight",
dilated_geometry=dilated_geometry,
actual_geometry=geometry,
dilated_actual_geometry=dilated_geometry,
)
class Bend90: class Bend90:
"""
Move generator for 90-degree waveguide bends.
"""
@staticmethod @staticmethod
def generate( def generate(
start_port: Port, start_port: Port,
@ -208,62 +462,103 @@ class Bend90:
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc", collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc",
clip_margin: float = 10.0, clip_margin: float = 10.0,
dilation: float = 0.0, dilation: float = 0.0,
snap_to_grid: bool = True,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
) -> ComponentResult: ) -> ComponentResult:
rot2 = rotation_matrix2(start_port.r) """
sign = 1 if direction == "CCW" else -1 Generate a 90-degree bend.
"""
rad_start = numpy.radians(start_port.orientation)
center_local = numpy.array((0.0, sign * radius)) # Center of the arc
end_local = numpy.array((radius, sign * radius)) if direction == "CCW":
center_xy = (rot2 @ center_local) + numpy.array((start_port.x, start_port.y)) cx = start_port.x + radius * numpy.cos(rad_start + numpy.pi / 2)
end_xy = (rot2 @ end_local) + numpy.array((start_port.x, start_port.y)) cy = start_port.y + radius * numpy.sin(rad_start + numpy.pi / 2)
end_port = Port(end_xy[0], end_xy[1], start_port.r + sign * 90) t_start = rad_start - numpy.pi / 2
t_end = t_start + numpy.pi / 2
new_ori = (start_port.orientation + 90) % 360
else:
cx = start_port.x + radius * numpy.cos(rad_start - numpy.pi / 2)
cy = start_port.y + radius * numpy.sin(rad_start - numpy.pi / 2)
t_start = rad_start + numpy.pi / 2
t_end = t_start - numpy.pi / 2
new_ori = (start_port.orientation - 90) % 360
start_theta = start_port.r - sign * 90 # Snap the end point to the grid
end_theta = start_port.r ex_raw = cx + radius * numpy.cos(t_end)
ts = (float(start_theta), float(end_theta)) ey_raw = cy + radius * numpy.sin(t_end)
arc_polys = _get_arc_polygons((float(center_xy[0]), float(center_xy[1])), radius, width, ts, sagitta) if snap_to_grid:
ex = snap_search_grid(ex_raw, snap_size)
ey = snap_search_grid(ey_raw, snap_size)
else:
ex, ey = ex_raw, ey_raw
# Slightly adjust radius and t_end to hit snapped point exactly
dx, dy = ex - cx, ey - cy
actual_radius = numpy.sqrt(dx**2 + dy**2)
t_end_snapped = numpy.arctan2(dy, dx)
# Ensure directionality and approx 90 degree sweep
if direction == "CCW":
while t_end_snapped <= t_start:
t_end_snapped += 2 * numpy.pi
while t_end_snapped > t_start + numpy.pi:
t_end_snapped -= 2 * numpy.pi
else:
while t_end_snapped >= t_start:
t_end_snapped -= 2 * numpy.pi
while t_end_snapped < t_start - numpy.pi:
t_end_snapped += 2 * numpy.pi
t_end = t_end_snapped
end_port = Port(ex, ey, new_ori)
arc_polys = _get_arc_polygons(cx, cy, actual_radius, width, t_start, t_end, sagitta)
collision_polys = _apply_collision_model( collision_polys = _apply_collision_model(
arc_polys[0], arc_polys[0], collision_type, actual_radius, width, cx, cy, clip_margin, t_start, t_end
collision_type,
radius,
width,
(float(center_xy[0]), float(center_xy[1])),
clip_margin,
ts,
) )
proxy_geometry = None proxy_geom = None
if collision_type == "arc": if collision_type == "arc":
proxy_geometry = _apply_collision_model( # Auto-generate a clipped_bbox proxy for tiered collision checks
arc_polys[0], proxy_geom = _apply_collision_model(
"clipped_bbox", arc_polys[0], "clipped_bbox", actual_radius, width, cx, cy, clip_margin, t_start, t_end
radius,
width,
(float(center_xy[0]), float(center_xy[1])),
clip_margin,
ts,
) )
dilated_actual_geometry = None dilated_geom = None
dilated_geometry = None dilated_actual_geom = None
if dilation > 0: if dilation > 0:
dilated_actual_geometry = _get_arc_polygons((float(center_xy[0]), float(center_xy[1])), radius, width, ts, sagitta, dilation=dilation) dilated_actual_geom = _get_arc_polygons(cx, cy, actual_radius, width, t_start, t_end, sagitta, dilation=dilation)
dilated_geometry = dilated_actual_geometry if collision_type == "arc" else [poly.buffer(dilation) for poly in collision_polys] if collision_type == "arc":
dilated_geom = dilated_actual_geom
else:
dilated_geom = [p.buffer(dilation) for p in collision_polys]
# Pre-calculate grid indices for faster ComponentResult init
inv_snap = 1.0 / snap_size
rgx = int(round(ex * inv_snap))
rgy = int(round(ey * inv_snap))
rgo = int(round(new_ori))
return ComponentResult( return ComponentResult(
geometry=collision_polys, geometry=collision_polys,
end_port=end_port, end_port=end_port,
length=abs(radius) * numpy.pi / 2.0, length=actual_radius * numpy.abs(t_end - t_start),
move_type="Bend90", dilated_geometry=dilated_geom,
dilated_geometry=dilated_geometry, proxy_geometry=proxy_geom,
proxy_geometry=proxy_geometry,
actual_geometry=arc_polys, actual_geometry=arc_polys,
dilated_actual_geometry=dilated_actual_geometry, dilated_actual_geometry=dilated_actual_geom,
move_type='Bend90',
snap_size=snap_size,
rel_gx=rgx, rel_gy=rgy, rel_go=rgo
) )
class SBend: class SBend:
"""
Move generator for parametric S-bends.
"""
@staticmethod @staticmethod
def generate( def generate(
start_port: Port, start_port: Port,
@ -274,60 +569,113 @@ class SBend:
collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc", collision_type: Literal["arc", "bbox", "clipped_bbox"] | Polygon = "arc",
clip_margin: float = 10.0, clip_margin: float = 10.0,
dilation: float = 0.0, dilation: float = 0.0,
snap_to_grid: bool = True,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
) -> ComponentResult: ) -> ComponentResult:
"""
Generate a parametric S-bend (two tangent arcs).
"""
if abs(offset) >= 2 * radius: if abs(offset) >= 2 * radius:
raise ValueError(f"SBend offset {offset} must be less than 2*radius {2 * radius}") raise ValueError(f"SBend offset {offset} must be less than 2*radius {2 * radius}")
sign = 1 if offset >= 0 else -1 theta_init = numpy.arccos(1 - abs(offset) / (2 * radius))
theta = numpy.arccos(1.0 - abs(offset) / (2.0 * radius)) dx_init = 2 * radius * numpy.sin(theta_init)
dx = 2.0 * radius * numpy.sin(theta) rad_start = numpy.radians(start_port.orientation)
theta_deg = float(numpy.degrees(theta))
rot2 = rotation_matrix2(start_port.r) # Target point
end_local = numpy.array((dx, offset)) ex_raw = start_port.x + dx_init * numpy.cos(rad_start) - offset * numpy.sin(rad_start)
end_xy = (rot2 @ end_local) + numpy.array((start_port.x, start_port.y)) ey_raw = start_port.y + dx_init * numpy.sin(rad_start) + offset * numpy.cos(rad_start)
end_port = Port(end_xy[0], end_xy[1], start_port.r)
c1_local = numpy.array((0.0, sign * radius)) if snap_to_grid:
c2_local = numpy.array((dx, offset - sign * radius)) ex = snap_search_grid(ex_raw, snap_size)
c1_xy = (rot2 @ c1_local) + numpy.array((start_port.x, start_port.y)) ey = snap_search_grid(ey_raw, snap_size)
c2_xy = (rot2 @ c2_local) + numpy.array((start_port.x, start_port.y)) else:
ex, ey = ex_raw, ey_raw
ts1 = (float(start_port.r - sign * 90), float(start_port.r - sign * 90 + sign * theta_deg)) end_port = Port(ex, ey, start_port.orientation)
second_base = start_port.r + (90 if sign > 0 else 270)
ts2 = (float(second_base + sign * theta_deg), float(second_base))
arc1 = _get_arc_polygons((float(c1_xy[0]), float(c1_xy[1])), radius, width, ts1, sagitta)[0] # Solve for theta and radius that hit (ex, ey) exactly
arc2 = _get_arc_polygons((float(c2_xy[0]), float(c2_xy[1])), radius, width, ts2, sagitta)[0] local_dx = (ex - start_port.x) * numpy.cos(rad_start) + (ey - start_port.y) * numpy.sin(rad_start)
actual_geometry = [arc1, arc2] local_dy = -(ex - start_port.x) * numpy.sin(rad_start) + (ey - start_port.y) * numpy.cos(rad_start)
geometry = [
_apply_collision_model(arc1, collision_type, radius, width, (float(c1_xy[0]), float(c1_xy[1])), clip_margin, ts1)[0],
_apply_collision_model(arc2, collision_type, radius, width, (float(c2_xy[0]), float(c2_xy[1])), clip_margin, ts2)[0],
]
proxy_geometry = None # tan(theta / 2) = local_dy / local_dx
theta = 2 * numpy.arctan2(abs(local_dy), local_dx)
if abs(theta) < TOLERANCE_ANGULAR:
# De-generate to straight
actual_len = numpy.sqrt(local_dx**2 + local_dy**2)
return Straight.generate(start_port, actual_len, width, snap_to_grid=False, dilation=dilation, snap_size=snap_size)
denom = (2 * (1 - numpy.cos(theta)))
if abs(denom) < TOLERANCE_LINEAR:
raise ValueError("SBend calculation failed: radius denominator zero")
actual_radius = abs(local_dy) / denom
# Safety Check: Reject SBends with tiny radii that would cause self-overlap
if actual_radius < width:
raise ValueError(f"SBend actual_radius {actual_radius:.3f} is too small (width={width})")
# Limit radius to prevent giant arcs
if actual_radius > 100000.0:
actual_len = numpy.sqrt(local_dx**2 + local_dy**2)
return Straight.generate(start_port, actual_len, width, snap_to_grid=False, dilation=dilation, snap_size=snap_size)
direction = 1 if local_dy > 0 else -1
c1_angle = rad_start + direction * numpy.pi / 2
cx1 = start_port.x + actual_radius * numpy.cos(c1_angle)
cy1 = start_port.y + actual_radius * numpy.sin(c1_angle)
ts1, te1 = c1_angle + numpy.pi, c1_angle + numpy.pi + direction * theta
c2_angle = rad_start - direction * numpy.pi / 2
cx2 = ex + actual_radius * numpy.cos(c2_angle)
cy2 = ey + actual_radius * numpy.sin(c2_angle)
te2 = c2_angle + numpy.pi
ts2 = te2 + direction * theta
arc1 = _get_arc_polygons(cx1, cy1, actual_radius, width, ts1, te1, sagitta)[0]
arc2 = _get_arc_polygons(cx2, cy2, actual_radius, width, ts2, te2, sagitta)[0]
arc_polys = [arc1, arc2]
# Use the provided collision model for primary geometry
col1 = _apply_collision_model(arc1, collision_type, actual_radius, width, cx1, cy1, clip_margin, ts1, te1)[0]
col2 = _apply_collision_model(arc2, collision_type, actual_radius, width, cx2, cy2, clip_margin, ts2, te2)[0]
collision_polys = [col1, col2]
proxy_geom = None
if collision_type == "arc": if collision_type == "arc":
proxy_geometry = [ # Auto-generate proxies
_apply_collision_model(arc1, "clipped_bbox", radius, width, (float(c1_xy[0]), float(c1_xy[1])), clip_margin, ts1)[0], p1 = _apply_collision_model(arc1, "clipped_bbox", actual_radius, width, cx1, cy1, clip_margin, ts1, te1)[0]
_apply_collision_model(arc2, "clipped_bbox", radius, width, (float(c2_xy[0]), float(c2_xy[1])), clip_margin, ts2)[0], p2 = _apply_collision_model(arc2, "clipped_bbox", actual_radius, width, cx2, cy2, clip_margin, ts2, te2)[0]
] proxy_geom = [p1, p2]
dilated_actual_geometry = None dilated_geom = None
dilated_geometry = None dilated_actual_geom = None
if dilation > 0: if dilation > 0:
dilated_actual_geometry = [ d1 = _get_arc_polygons(cx1, cy1, actual_radius, width, ts1, te1, sagitta, dilation=dilation)[0]
_get_arc_polygons((float(c1_xy[0]), float(c1_xy[1])), radius, width, ts1, sagitta, dilation=dilation)[0], d2 = _get_arc_polygons(cx2, cy2, actual_radius, width, ts2, te2, sagitta, dilation=dilation)[0]
_get_arc_polygons((float(c2_xy[0]), float(c2_xy[1])), radius, width, ts2, sagitta, dilation=dilation)[0], dilated_actual_geom = [d1, d2]
]
dilated_geometry = dilated_actual_geometry if collision_type == "arc" else [poly.buffer(dilation) for poly in geometry] if collision_type == "arc":
dilated_geom = dilated_actual_geom
else:
dilated_geom = [p.buffer(dilation) for p in collision_polys]
# Pre-calculate grid indices for faster ComponentResult init
inv_snap = 1.0 / snap_size
rgx = int(round(ex * inv_snap))
rgy = int(round(ey * inv_snap))
rgo = int(round(start_port.orientation))
return ComponentResult( return ComponentResult(
geometry=geometry, geometry=collision_polys,
end_port=end_port, end_port=end_port,
length=2.0 * radius * theta, length=2 * actual_radius * theta,
move_type="SBend", dilated_geometry=dilated_geom,
dilated_geometry=dilated_geometry, proxy_geometry=proxy_geom,
proxy_geometry=proxy_geometry, actual_geometry=arc_polys,
actual_geometry=actual_geometry, dilated_actual_geometry=dilated_actual_geom,
dilated_actual_geometry=dilated_actual_geometry, move_type='SBend',
snap_size=snap_size,
rel_gx=rgx, rel_gy=rgy, rel_go=rgo
) )

View file

@ -1,160 +1,77 @@
from __future__ import annotations from __future__ import annotations
from collections.abc import Iterator
from typing import Self
import numpy import numpy
from numpy.typing import ArrayLike, NDArray
def _normalize_angle(angle_deg: int | float) -> int: # 1nm snap (0.001 µm)
angle = int(round(angle_deg)) % 360 GRID_SNAP_UM = 0.001
if angle % 90 != 0:
raise ValueError(f"Port angle must be Manhattan (multiple of 90), got {angle_deg!r}")
return angle
def _as_int32_triplet(value: ArrayLike) -> NDArray[numpy.int32]: def snap_nm(value: float) -> float:
arr = numpy.asarray(value, dtype=numpy.int32) """
if arr.shape != (3,): Snap a coordinate to the nearest 1nm (0.001 um).
raise ValueError(f"Port array must have shape (3,), got {arr.shape}") """
arr = arr.copy() return round(value * 1000) / 1000
arr[2] = _normalize_angle(int(arr[2]))
return arr
from inire.constants import TOLERANCE_LINEAR
class Port: class Port:
""" """
Port represented as an ndarray-backed (x, y, r) triple with int32 storage. A port defined by (x, y, orientation) in micrometers.
""" """
__slots__ = ('x', 'y', 'orientation')
__slots__ = ("_xyr",) def __init__(
self,
def __init__(self, x: int | float, y: int | float, r: int | float) -> None: x: float,
self._xyr = numpy.array( y: float,
(int(round(x)), int(round(y)), _normalize_angle(r)), orientation: float,
dtype=numpy.int32, snap: bool = True
) ) -> None:
if snap:
@classmethod self.x = round(x * 1000) / 1000
def from_array(cls, xyr: ArrayLike) -> Self: self.y = round(y * 1000) / 1000
obj = cls.__new__(cls) # Faster orientation normalization for common cases
obj._xyr = _as_int32_triplet(xyr) if 0 <= orientation < 360:
return obj self.orientation = float(orientation)
else:
@property self.orientation = float(orientation % 360)
def x(self) -> int: else:
return int(self._xyr[0]) self.x = x
self.y = y
@x.setter self.orientation = float(orientation)
def x(self, val: int | float) -> None:
self._xyr[0] = int(round(val))
@property
def y(self) -> int:
return int(self._xyr[1])
@y.setter
def y(self, val: int | float) -> None:
self._xyr[1] = int(round(val))
@property
def r(self) -> int:
return int(self._xyr[2])
@r.setter
def r(self, val: int | float) -> None:
self._xyr[2] = _normalize_angle(val)
@property
def orientation(self) -> int:
return self.r
@orientation.setter
def orientation(self, val: int | float) -> None:
self.r = val
@property
def xyr(self) -> NDArray[numpy.int32]:
return self._xyr
@xyr.setter
def xyr(self, val: ArrayLike) -> None:
self._xyr = _as_int32_triplet(val)
def __repr__(self) -> str: def __repr__(self) -> str:
return f"Port(x={self.x}, y={self.y}, r={self.r})" return f'Port(x={self.x}, y={self.y}, orientation={self.orientation})'
def __iter__(self) -> Iterator[int]:
return iter((self.x, self.y, self.r))
def __len__(self) -> int:
return 3
def __getitem__(self, item: int | slice) -> int | NDArray[numpy.int32]:
return self._xyr[item]
def __array__(self, dtype: numpy.dtype | None = None) -> NDArray[numpy.int32]:
return numpy.asarray(self._xyr, dtype=dtype)
def __eq__(self, other: object) -> bool: def __eq__(self, other: object) -> bool:
if not isinstance(other, Port): if not isinstance(other, Port):
return False return False
return bool(numpy.array_equal(self._xyr, other._xyr)) return (abs(self.x - other.x) < TOLERANCE_LINEAR and
abs(self.y - other.y) < TOLERANCE_LINEAR and
abs(self.orientation - other.orientation) < TOLERANCE_LINEAR)
def __hash__(self) -> int: def __hash__(self) -> int:
return hash(self.as_tuple()) return hash((round(self.x, 6), round(self.y, 6), round(self.orientation, 6)))
def copy(self) -> Self:
return type(self).from_array(self._xyr.copy())
def as_tuple(self) -> tuple[int, int, int]:
return (self.x, self.y, self.r)
def translate(self, dxy: ArrayLike) -> Self:
dxy_arr = numpy.asarray(dxy, dtype=numpy.int32)
if dxy_arr.shape == (2,):
return type(self)(self.x + int(dxy_arr[0]), self.y + int(dxy_arr[1]), self.r)
if dxy_arr.shape == (3,):
return type(self)(self.x + int(dxy_arr[0]), self.y + int(dxy_arr[1]), self.r + int(dxy_arr[2]))
raise ValueError(f"Translation must have shape (2,) or (3,), got {dxy_arr.shape}")
def __add__(self, other: ArrayLike) -> Self:
return self.translate(other)
def __sub__(self, other: ArrayLike | Self) -> NDArray[numpy.int32]:
if isinstance(other, Port):
return self._xyr - other._xyr
return self._xyr - numpy.asarray(other, dtype=numpy.int32)
ROT2_0 = numpy.array(((1, 0), (0, 1)), dtype=numpy.int32) def translate_port(port: Port, dx: float, dy: float) -> Port:
ROT2_90 = numpy.array(((0, -1), (1, 0)), dtype=numpy.int32) """
ROT2_180 = numpy.array(((-1, 0), (0, -1)), dtype=numpy.int32) Translate a port by (dx, dy).
ROT2_270 = numpy.array(((0, 1), (-1, 0)), dtype=numpy.int32) """
return Port(port.x + dx, port.y + dy, port.orientation)
def rotation_matrix2(rotation_deg: int) -> NDArray[numpy.int32]: def rotate_port(port: Port, angle: float, origin: tuple[float, float] = (0, 0)) -> Port:
quadrant = (_normalize_angle(rotation_deg) // 90) % 4 """
return (ROT2_0, ROT2_90, ROT2_180, ROT2_270)[quadrant] Rotate a port by a multiple of 90 degrees around an origin.
"""
ox, oy = origin
px, py = port.x, port.y
rad = numpy.radians(angle)
qx = snap_nm(ox + numpy.cos(rad) * (px - ox) - numpy.sin(rad) * (py - oy))
qy = snap_nm(oy + numpy.sin(rad) * (px - ox) + numpy.cos(rad) * (py - oy))
def rotation_matrix3(rotation_deg: int) -> NDArray[numpy.int32]: return Port(qx, qy, port.orientation + angle)
rot2 = rotation_matrix2(rotation_deg)
rot3 = numpy.zeros((3, 3), dtype=numpy.int32)
rot3[:2, :2] = rot2
rot3[2, 2] = 1
return rot3
def translate_port(port: Port, dx: int | float, dy: int | float) -> Port:
return Port(port.x + dx, port.y + dy, port.r)
def rotate_port(port: Port, angle: int | float, origin: tuple[int | float, int | float] = (0, 0)) -> Port:
angle_i = _normalize_angle(angle)
rot = rotation_matrix2(angle_i)
origin_xy = numpy.array((int(round(origin[0])), int(round(origin[1]))), dtype=numpy.int32)
rel = numpy.array((port.x, port.y), dtype=numpy.int32) - origin_xy
rotated = origin_xy + rot @ rel
return Port(int(rotated[0]), int(rotated[1]), port.r + angle_i)

View file

@ -2,16 +2,16 @@ from __future__ import annotations
import heapq import heapq
import logging import logging
import math from typing import TYPE_CHECKING, Literal, Any
from typing import TYPE_CHECKING, Any, Literal
import numpy
import shapely import shapely
from inire.constants import TOLERANCE_LINEAR from inire.geometry.components import Bend90, SBend, Straight, snap_search_grid
from inire.geometry.components import Bend90, SBend, Straight
from inire.geometry.primitives import Port from inire.geometry.primitives import Port
from inire.router.config import RouterConfig, VisibilityGuidanceMode from inire.router.config import RouterConfig
from inire.router.visibility import VisibilityManager from inire.router.visibility import VisibilityManager
from inire.constants import DEFAULT_SEARCH_GRID_SNAP_UM, TOLERANCE_LINEAR, TOLERANCE_ANGULAR
if TYPE_CHECKING: if TYPE_CHECKING:
from inire.geometry.components import ComponentResult from inire.geometry.components import ComponentResult
@ -21,7 +21,10 @@ logger = logging.getLogger(__name__)
class AStarNode: class AStarNode:
__slots__ = ("port", "g_cost", "h_cost", "fh_cost", "parent", "component_result") """
A node in the A* search tree.
"""
__slots__ = ('port', 'g_cost', 'h_cost', 'fh_cost', 'parent', 'component_result')
def __init__( def __init__(
self, self,
@ -43,20 +46,16 @@ class AStarNode:
class AStarMetrics: class AStarMetrics:
__slots__ = ( """
"total_nodes_expanded", Performance metrics and instrumentation for A* search.
"last_expanded_nodes", """
"nodes_expanded", __slots__ = ('total_nodes_expanded', 'last_expanded_nodes', 'nodes_expanded',
"moves_generated", 'moves_generated', 'moves_added', 'pruned_closed_set',
"moves_added", 'pruned_hard_collision', 'pruned_cost')
"pruned_closed_set",
"pruned_hard_collision",
"pruned_cost",
)
def __init__(self) -> None: def __init__(self) -> None:
self.total_nodes_expanded = 0 self.total_nodes_expanded = 0
self.last_expanded_nodes: list[tuple[int, int, int]] = [] self.last_expanded_nodes: list[tuple[float, float, float]] = []
self.nodes_expanded = 0 self.nodes_expanded = 0
self.moves_generated = 0 self.moves_generated = 0
self.moves_added = 0 self.moves_added = 0
@ -65,6 +64,7 @@ class AStarMetrics:
self.pruned_cost = 0 self.pruned_cost = 0
def reset_per_route(self) -> None: def reset_per_route(self) -> None:
""" Reset metrics that are specific to a single route() call. """
self.nodes_expanded = 0 self.nodes_expanded = 0
self.moves_generated = 0 self.moves_generated = 0
self.moves_added = 0 self.moves_added = 0
@ -73,73 +73,97 @@ class AStarMetrics:
self.pruned_cost = 0 self.pruned_cost = 0
self.last_expanded_nodes = [] self.last_expanded_nodes = []
def get_summary_dict(self) -> dict[str, int]:
""" Return a dictionary of current metrics. """
return {
'nodes_expanded': self.nodes_expanded,
'moves_generated': self.moves_generated,
'moves_added': self.moves_added,
'pruned_closed_set': self.pruned_closed_set,
'pruned_hard_collision': self.pruned_hard_collision,
'pruned_cost': self.pruned_cost
}
class AStarContext: class AStarContext:
__slots__ = ( """
"cost_evaluator", Persistent state for A* search, decoupled from search logic.
"config", """
"visibility_manager", __slots__ = ('cost_evaluator', 'config', 'visibility_manager',
"move_cache_rel", 'move_cache_rel', 'move_cache_abs', 'hard_collision_set', 'static_safe_cache', 'max_cache_size')
"move_cache_abs",
"hard_collision_set",
"static_safe_cache",
"max_cache_size",
)
def __init__( def __init__(
self, self,
cost_evaluator: CostEvaluator, cost_evaluator: CostEvaluator,
node_limit: int = 1000000, node_limit: int = 1000000,
snap_size: float = DEFAULT_SEARCH_GRID_SNAP_UM,
max_straight_length: float = 2000.0, max_straight_length: float = 2000.0,
min_straight_length: float = 5.0, min_straight_length: float = 5.0,
bend_radii: list[float] | None = None, bend_radii: list[float] | None = None,
sbend_radii: list[float] | None = None, sbend_radii: list[float] | None = None,
sbend_offsets: list[float] | None = None, sbend_offsets: list[float] | None = None,
bend_penalty: float = 250.0, bend_penalty: float = 250.0,
sbend_penalty: float | None = None, sbend_penalty: float = 500.0,
bend_collision_type: Literal["arc", "bbox", "clipped_bbox"] | Any = "arc", bend_collision_type: Literal["arc", "bbox", "clipped_bbox"] | Any = "arc",
bend_clip_margin: float = 10.0, bend_clip_margin: float = 10.0,
visibility_guidance: VisibilityGuidanceMode = "tangent_corner",
max_cache_size: int = 1000000, max_cache_size: int = 1000000,
) -> None: ) -> None:
actual_sbend_penalty = 2.0 * bend_penalty if sbend_penalty is None else sbend_penalty
self.cost_evaluator = cost_evaluator self.cost_evaluator = cost_evaluator
self.max_cache_size = max_cache_size self.max_cache_size = max_cache_size
# Use provided lists or defaults for the configuration
br = bend_radii if bend_radii is not None else [50.0, 100.0]
sr = sbend_radii if sbend_radii is not None else [5.0, 10.0, 50.0, 100.0]
self.config = RouterConfig( self.config = RouterConfig(
node_limit=node_limit, node_limit=node_limit,
snap_size=snap_size,
max_straight_length=max_straight_length, max_straight_length=max_straight_length,
min_straight_length=min_straight_length, min_straight_length=min_straight_length,
bend_radii=bend_radii if bend_radii is not None else [50.0, 100.0], bend_radii=br,
sbend_radii=sbend_radii if sbend_radii is not None else [5.0, 10.0, 50.0, 100.0], sbend_radii=sr,
sbend_offsets=sbend_offsets, sbend_offsets=sbend_offsets,
bend_penalty=bend_penalty, bend_penalty=bend_penalty,
sbend_penalty=actual_sbend_penalty, sbend_penalty=sbend_penalty,
bend_collision_type=bend_collision_type, bend_collision_type=bend_collision_type,
bend_clip_margin=bend_clip_margin, bend_clip_margin=bend_clip_margin
visibility_guidance=visibility_guidance,
) )
self.cost_evaluator.config = self.config self.cost_evaluator.config = self.config
self.cost_evaluator._refresh_cached_config()
self.visibility_manager = VisibilityManager(self.cost_evaluator.collision_engine) self.visibility_manager = VisibilityManager(self.cost_evaluator.collision_engine)
# Long-lived caches (shared across multiple route calls)
self.move_cache_rel: dict[tuple, ComponentResult] = {} self.move_cache_rel: dict[tuple, ComponentResult] = {}
self.move_cache_abs: dict[tuple, ComponentResult] = {} self.move_cache_abs: dict[tuple, ComponentResult] = {}
self.hard_collision_set: set[tuple] = set() self.hard_collision_set: set[tuple] = set()
self.static_safe_cache: set[tuple] = set() self.static_safe_cache: set[tuple] = set()
def clear_static_caches(self) -> None: def clear_static_caches(self) -> None:
""" Clear caches that depend on the state of static obstacles. """
self.hard_collision_set.clear() self.hard_collision_set.clear()
self.static_safe_cache.clear() self.static_safe_cache.clear()
self.visibility_manager.clear_cache()
def check_cache_eviction(self) -> None: def check_cache_eviction(self) -> None:
if len(self.move_cache_abs) <= self.max_cache_size * 1.2: """
return Trigger FIFO eviction of Absolute moves if cache exceeds max_cache_size.
We preserve Relative move templates.
"""
# Trigger eviction if 20% over limit to reduce frequency
if len(self.move_cache_abs) > self.max_cache_size * 1.2:
num_to_evict = int(len(self.move_cache_abs) * 0.25) num_to_evict = int(len(self.move_cache_abs) * 0.25)
for idx, key in enumerate(list(self.move_cache_abs.keys())): # Efficient FIFO eviction
if idx >= num_to_evict: keys_to_evict = []
break it = iter(self.move_cache_abs)
del self.move_cache_abs[key] for _ in range(num_to_evict):
try: keys_to_evict.append(next(it))
except StopIteration: break
for k in keys_to_evict:
del self.move_cache_abs[k]
# Decouple collision cache clearing - only clear if truly massive
if len(self.hard_collision_set) > 2000000:
self.hard_collision_set.clear()
self.static_safe_cache.clear()
def route_astar( def route_astar(
@ -148,38 +172,57 @@ def route_astar(
net_width: float, net_width: float,
context: AStarContext, context: AStarContext,
metrics: AStarMetrics | None = None, metrics: AStarMetrics | None = None,
net_id: str = "default", net_id: str = 'default',
bend_collision_type: Literal["arc", "bbox", "clipped_bbox"] | None = None, bend_collision_type: Literal['arc', 'bbox', 'clipped_bbox'] | None = None,
return_partial: bool = False, return_partial: bool = False,
store_expanded: bool = False, store_expanded: bool = False,
skip_congestion: bool = False, skip_congestion: bool = False,
max_cost: float | None = None, max_cost: float | None = None,
self_collision_check: bool = False, self_collision_check: bool = False,
node_limit: int | None = None, node_limit: int | None = None,
) -> list[ComponentResult] | None: ) -> list[ComponentResult] | None:
"""
Functional implementation of A* routing.
"""
if metrics is None: if metrics is None:
metrics = AStarMetrics() metrics = AStarMetrics()
metrics.reset_per_route() metrics.reset_per_route()
# Enforce Grid Alignment for start and target
snap = context.config.snap_size
start_snapped = Port(snap_search_grid(start.x, snap), snap_search_grid(start.y, snap), start.orientation, snap=False)
target_snapped = Port(snap_search_grid(target.x, snap), snap_search_grid(target.y, snap), target.orientation, snap=False)
# Per-route congestion cache (not shared across different routes)
congestion_cache: dict[tuple, int] = {}
if bend_collision_type is not None: if bend_collision_type is not None:
context.config.bend_collision_type = bend_collision_type context.config.bend_collision_type = bend_collision_type
context.cost_evaluator.set_target(target) context.cost_evaluator.set_target(target_snapped)
open_set: list[AStarNode] = []
closed_set: dict[tuple[int, int, int], float] = {}
congestion_cache: dict[tuple, int] = {}
start_node = AStarNode(start, 0.0, context.cost_evaluator.h_manhattan(start, target)) open_set: list[AStarNode] = []
inv_snap = 1.0 / snap
# (x_grid, y_grid, orientation_grid) -> min_g_cost
closed_set: dict[tuple[int, int, int], float] = {}
start_node = AStarNode(start_snapped, 0.0, context.cost_evaluator.h_manhattan(start_snapped, target_snapped))
heapq.heappush(open_set, start_node) heapq.heappush(open_set, start_node)
best_node = start_node best_node = start_node
effective_node_limit = node_limit if node_limit is not None else context.config.node_limit
nodes_expanded = 0 nodes_expanded = 0
effective_node_limit = node_limit if node_limit is not None else context.config.node_limit
while open_set: while open_set:
if nodes_expanded >= effective_node_limit: if nodes_expanded >= effective_node_limit:
return reconstruct_path(best_node) if return_partial else None return reconstruct_path(best_node) if return_partial else None
current = heapq.heappop(open_set) current = heapq.heappop(open_set)
# Cost Pruning (Fail Fast)
if max_cost is not None and current.fh_cost[0] > max_cost: if max_cost is not None and current.fh_cost[0] > max_cost:
metrics.pruned_cost += 1 metrics.pruned_cost += 1
continue continue
@ -187,147 +230,36 @@ def route_astar(
if current.h_cost < best_node.h_cost: if current.h_cost < best_node.h_cost:
best_node = current best_node = current
state = current.port.as_tuple() state = (int(round(current.port.x * inv_snap)), int(round(current.port.y * inv_snap)), int(round(current.port.orientation)))
if state in closed_set and closed_set[state] <= current.g_cost + TOLERANCE_LINEAR: if state in closed_set and closed_set[state] <= current.g_cost + TOLERANCE_LINEAR:
continue continue
closed_set[state] = current.g_cost closed_set[state] = current.g_cost
if store_expanded: if store_expanded:
metrics.last_expanded_nodes.append(state) metrics.last_expanded_nodes.append((current.port.x, current.port.y, current.port.orientation))
nodes_expanded += 1 nodes_expanded += 1
metrics.total_nodes_expanded += 1 metrics.total_nodes_expanded += 1
metrics.nodes_expanded += 1 metrics.nodes_expanded += 1
if current.port == target: # Check if we reached the target exactly
if (abs(current.port.x - target_snapped.x) < TOLERANCE_LINEAR and
abs(current.port.y - target_snapped.y) < TOLERANCE_LINEAR and
abs(current.port.orientation - target_snapped.orientation) < 0.1):
return reconstruct_path(current) return reconstruct_path(current)
# Expansion
expand_moves( expand_moves(
current, current, target_snapped, net_width, net_id, open_set, closed_set,
target, context, metrics, congestion_cache,
net_width, snap=snap, inv_snap=inv_snap, parent_state=state,
net_id, max_cost=max_cost, skip_congestion=skip_congestion,
open_set, self_collision_check=self_collision_check
closed_set,
context,
metrics,
congestion_cache,
max_cost=max_cost,
skip_congestion=skip_congestion,
self_collision_check=self_collision_check,
) )
return reconstruct_path(best_node) if return_partial else None return reconstruct_path(best_node) if return_partial else None
def _quantized_lengths(values: list[float], max_reach: float) -> list[int]:
out = {int(round(v)) for v in values if v > 0 and v <= max_reach + 0.01}
return sorted((v for v in out if v > 0), reverse=True)
def _sbend_forward_span(offset: float, radius: float) -> float | None:
abs_offset = abs(offset)
if abs_offset <= TOLERANCE_LINEAR or radius <= 0 or abs_offset >= 2.0 * radius:
return None
theta = __import__("math").acos(1.0 - abs_offset / (2.0 * radius))
return 2.0 * radius * __import__("math").sin(theta)
def _visible_straight_candidates(
current: Port,
context: AStarContext,
max_reach: float,
cos_v: float,
sin_v: float,
net_width: float,
) -> list[float]:
mode = context.config.visibility_guidance
if mode == "off":
return []
if mode == "exact_corner":
max_bend_radius = max(context.config.bend_radii, default=0.0)
visibility_reach = max_reach + max_bend_radius
visible_corners = sorted(
context.visibility_manager.get_corner_visibility(current, max_dist=visibility_reach),
key=lambda corner: corner[2],
)
if not visible_corners:
return []
candidates: set[int] = set()
for cx, cy, _ in visible_corners[:12]:
dx = cx - current.x
dy = cy - current.y
local_x = dx * cos_v + dy * sin_v
if local_x <= context.config.min_straight_length:
continue
candidates.add(int(round(local_x)))
return sorted(candidates, reverse=True)
if mode != "tangent_corner":
return []
visibility_manager = context.visibility_manager
visibility_manager._ensure_current()
max_bend_radius = max(context.config.bend_radii, default=0.0)
if max_bend_radius <= 0 or not visibility_manager.corners:
return []
reach = max_reach + max_bend_radius
bounds = (current.x - reach, current.y - reach, current.x + reach, current.y + reach)
candidate_ids = list(visibility_manager.corner_index.intersection(bounds))
if not candidate_ids:
return []
scored: list[tuple[float, float, float, float, float]] = []
for idx in candidate_ids:
cx, cy = visibility_manager.corners[idx]
dx = cx - current.x
dy = cy - current.y
local_x = dx * cos_v + dy * sin_v
local_y = -dx * sin_v + dy * cos_v
if local_x <= context.config.min_straight_length or local_x > reach + 0.01:
continue
nearest_radius = min(context.config.bend_radii, key=lambda radius: abs(abs(local_y) - radius))
tangent_error = abs(abs(local_y) - nearest_radius)
if tangent_error > 2.0:
continue
length = local_x - nearest_radius
if length <= context.config.min_straight_length or length > max_reach + 0.01:
continue
scored.append((tangent_error, math.hypot(dx, dy), length, dx, dy))
if not scored:
return []
collision_engine = context.cost_evaluator.collision_engine
candidates: set[int] = set()
for _, dist, length, dx, dy in sorted(scored)[:4]:
angle = math.degrees(math.atan2(dy, dx))
corner_reach = collision_engine.ray_cast(current, angle, max_dist=dist + 0.05, net_width=net_width)
if corner_reach < dist - 0.01:
continue
qlen = int(round(length))
if qlen > 0:
candidates.add(qlen)
return sorted(candidates, reverse=True)
def _previous_move_metadata(node: AStarNode) -> tuple[str | None, float | None]:
result = node.component_result
if result is None:
return None, None
move_type = result.move_type
if move_type == "Straight":
return move_type, result.length
return move_type, None
def expand_moves( def expand_moves(
current: AStarNode, current: AStarNode,
target: Port, target: Port,
@ -338,174 +270,131 @@ def expand_moves(
context: AStarContext, context: AStarContext,
metrics: AStarMetrics, metrics: AStarMetrics,
congestion_cache: dict[tuple, int], congestion_cache: dict[tuple, int],
snap: float = 1.0,
inv_snap: float | None = None,
parent_state: tuple[int, int, int] | None = None,
max_cost: float | None = None, max_cost: float | None = None,
skip_congestion: bool = False, skip_congestion: bool = False,
self_collision_check: bool = False, self_collision_check: bool = False,
) -> None: ) -> None:
"""
Extract moves and add valid successors to the open set.
"""
cp = current.port cp = current.port
prev_move_type, prev_straight_length = _previous_move_metadata(current) if inv_snap is None: inv_snap = 1.0 / snap
if parent_state is None:
parent_state = (int(round(cp.x * inv_snap)), int(round(cp.y * inv_snap)), int(round(cp.orientation)))
dx_t = target.x - cp.x dx_t = target.x - cp.x
dy_t = target.y - cp.y dy_t = target.y - cp.y
dist_sq = dx_t * dx_t + dy_t * dy_t dist_sq = dx_t*dx_t + dy_t*dy_t
if cp.r == 0: rad = numpy.radians(cp.orientation)
cos_v, sin_v = 1.0, 0.0 cos_v, sin_v = numpy.cos(rad), numpy.sin(rad)
elif cp.r == 90:
cos_v, sin_v = 0.0, 1.0
elif cp.r == 180:
cos_v, sin_v = -1.0, 0.0
else:
cos_v, sin_v = 0.0, -1.0
# 1. DIRECT JUMP TO TARGET
proj_t = dx_t * cos_v + dy_t * sin_v proj_t = dx_t * cos_v + dy_t * sin_v
perp_t = -dx_t * sin_v + dy_t * cos_v perp_t = -dx_t * sin_v + dy_t * cos_v
dx_local = proj_t
dy_local = perp_t
if proj_t > 0 and abs(perp_t) < 1e-6 and cp.r == target.r: # A. Straight Jump (Only if target aligns with grid state or direct jump is enabled)
max_reach = context.cost_evaluator.collision_engine.ray_cast(cp, cp.r, proj_t + 1.0, net_width=net_width) if proj_t > 0 and abs(perp_t) < 1e-3 and abs(cp.orientation - target.orientation) < 0.1:
if max_reach >= proj_t - 0.01 and ( max_reach = context.cost_evaluator.collision_engine.ray_cast(cp, cp.orientation, proj_t + 1.0)
prev_straight_length is None or proj_t < prev_straight_length - TOLERANCE_LINEAR if max_reach >= proj_t - 0.01:
):
process_move( process_move(
current, current, target, net_width, net_id, open_set, closed_set, context, metrics, congestion_cache,
target, f'S{proj_t}', 'S', (proj_t,), skip_congestion, inv_snap=inv_snap, snap_to_grid=False,
net_width, parent_state=parent_state, max_cost=max_cost, snap=snap, self_collision_check=self_collision_check
net_id,
open_set,
closed_set,
context,
metrics,
congestion_cache,
"S",
(int(round(proj_t)),),
skip_congestion,
max_cost=max_cost,
self_collision_check=self_collision_check,
) )
max_reach = context.cost_evaluator.collision_engine.ray_cast(cp, cp.r, context.config.max_straight_length, net_width=net_width) # 2. VISIBILITY JUMPS & MAX REACH
candidate_lengths = [ max_reach = context.cost_evaluator.collision_engine.ray_cast(cp, cp.orientation, context.config.max_straight_length)
context.config.min_straight_length,
max_reach,
max_reach / 2.0,
max_reach - 5.0,
]
axis_target_dist = abs(dx_t) if cp.r in (0, 180) else abs(dy_t) straight_lengths = set()
candidate_lengths.append(axis_target_dist) if max_reach > context.config.min_straight_length:
straight_lengths.add(snap_search_grid(max_reach, snap))
for radius in context.config.bend_radii: for radius in context.config.bend_radii:
candidate_lengths.extend((max_reach - radius, axis_target_dist - radius, axis_target_dist - 2.0 * radius)) if max_reach > radius + context.config.min_straight_length:
straight_lengths.add(snap_search_grid(max_reach - radius, snap))
candidate_lengths.extend( if max_reach > context.config.min_straight_length + 5.0:
_visible_straight_candidates( straight_lengths.add(snap_search_grid(max_reach - 5.0, snap))
cp,
context,
max_reach,
cos_v,
sin_v,
net_width,
)
)
if cp.r == target.r and dx_local > 0 and abs(dy_local) > TOLERANCE_LINEAR: straight_lengths.add(context.config.min_straight_length)
for radius in context.config.sbend_radii: if max_reach > context.config.min_straight_length * 4:
sbend_span = _sbend_forward_span(dy_local, radius) straight_lengths.add(snap_search_grid(max_reach / 2.0, snap))
if sbend_span is None:
continue
candidate_lengths.extend((dx_local - sbend_span, dx_local - 2.0 * sbend_span))
for length in _quantized_lengths(candidate_lengths, max_reach): if abs(cp.orientation % 180) < 0.1: # Horizontal
if length < context.config.min_straight_length: target_dist = abs(target.x - cp.x)
continue if target_dist <= max_reach and target_dist > context.config.min_straight_length:
if prev_straight_length is not None and length >= prev_straight_length - TOLERANCE_LINEAR: sl = snap_search_grid(target_dist, snap)
continue if sl > 0.1: straight_lengths.add(sl)
for radius in context.config.bend_radii:
for l in [target_dist - radius, target_dist - 2*radius]:
if l > context.config.min_straight_length:
s_l = snap_search_grid(l, snap)
if s_l <= max_reach and s_l > 0.1: straight_lengths.add(s_l)
else: # Vertical
target_dist = abs(target.y - cp.y)
if target_dist <= max_reach and target_dist > context.config.min_straight_length:
sl = snap_search_grid(target_dist, snap)
if sl > 0.1: straight_lengths.add(sl)
for radius in context.config.bend_radii:
for l in [target_dist - radius, target_dist - 2*radius]:
if l > context.config.min_straight_length:
s_l = snap_search_grid(l, snap)
if s_l <= max_reach and s_l > 0.1: straight_lengths.add(s_l)
for length in sorted(straight_lengths, reverse=True):
process_move( process_move(
current, current, target, net_width, net_id, open_set, closed_set, context, metrics, congestion_cache,
target, f'S{length}', 'S', (length,), skip_congestion, inv_snap=inv_snap, parent_state=parent_state,
net_width, max_cost=max_cost, snap=snap, self_collision_check=self_collision_check
net_id,
open_set,
closed_set,
context,
metrics,
congestion_cache,
"S",
(length,),
skip_congestion,
max_cost=max_cost,
self_collision_check=self_collision_check,
) )
angle_to_target = 0.0 # 3. BENDS & SBENDS
if dx_t != 0 or dy_t != 0: angle_to_target = numpy.degrees(numpy.arctan2(target.y - cp.y, target.x - cp.x))
angle_to_target = float((round((180.0 / 3.141592653589793) * __import__("math").atan2(dy_t, dx_t)) + 360.0) % 360.0) allow_backwards = (dist_sq < 150*150)
allow_backwards = dist_sq < 150 * 150
for radius in context.config.bend_radii: for radius in context.config.bend_radii:
for direction in ("CW", "CCW"): for direction in ['CW', 'CCW']:
if not allow_backwards: if not allow_backwards:
turn = 90 if direction == "CCW" else -90 turn = 90 if direction == 'CCW' else -90
new_ori = (cp.r + turn) % 360 new_ori = (cp.orientation + turn) % 360
new_diff = (angle_to_target - new_ori + 180.0) % 360.0 - 180.0 new_diff = (angle_to_target - new_ori + 180) % 360 - 180
if abs(new_diff) > 135.0: if abs(new_diff) > 135:
continue continue
process_move( process_move(
current, current, target, net_width, net_id, open_set, closed_set, context, metrics, congestion_cache,
target, f'B{radius}{direction}', 'B', (radius, direction), skip_congestion, inv_snap=inv_snap,
net_width, parent_state=parent_state, max_cost=max_cost, snap=snap, self_collision_check=self_collision_check
net_id,
open_set,
closed_set,
context,
metrics,
congestion_cache,
"B",
(radius, direction),
skip_congestion,
max_cost=max_cost,
self_collision_check=self_collision_check,
) )
max_sbend_r = max(context.config.sbend_radii) if context.config.sbend_radii else 0.0 # 4. SBENDS
if max_sbend_r <= 0 or prev_move_type == "SBend": max_sbend_r = max(context.config.sbend_radii) if context.config.sbend_radii else 0
return if max_sbend_r > 0:
user_offsets = context.config.sbend_offsets
offsets: set[float] = set(user_offsets) if user_offsets is not None else set()
dx_local = (target.x - cp.x) * cos_v + (target.y - cp.y) * sin_v
dy_local = -(target.x - cp.x) * sin_v + (target.y - cp.y) * cos_v
explicit_offsets = context.config.sbend_offsets if dx_local > 0 and abs(dy_local) < 2 * max_sbend_r:
offsets: set[int] = set(int(round(v)) for v in explicit_offsets or []) min_d = numpy.sqrt(max(0, 4 * (abs(dy_local)/2.0) * abs(dy_local) - dy_local**2))
if dx_local >= min_d: offsets.add(dy_local)
# S-bends preserve orientation, so the implicit search only makes sense if user_offsets is None:
# when the target is ahead in local coordinates and keeps the same for sign in [-1, 1]:
# orientation. Generating generic speculative offsets on the integer lattice # Adaptive sampling: scale steps by snap_size but ensure enough range
# explodes the search space without contributing useful moves. for i in [1, 2, 5, 13, 34, 89]:
if target.r == cp.r and 0 < dx_local <= 4 * max_sbend_r: o = sign * i * snap
if 0 < abs(dy_local) < 2 * max_sbend_r: if abs(o) < 2 * max_sbend_r: offsets.add(o)
offsets.add(int(round(dy_local)))
if not offsets:
return
for offset in sorted(offsets): for offset in sorted(offsets):
if offset == 0:
continue
for radius in context.config.sbend_radii: for radius in context.config.sbend_radii:
if abs(offset) >= 2 * radius: if abs(offset) >= 2 * radius: continue
continue
process_move( process_move(
current, current, target, net_width, net_id, open_set, closed_set, context, metrics, congestion_cache,
target, f'SB{offset}R{radius}', 'SB', (offset, radius), skip_congestion, inv_snap=inv_snap,
net_width, parent_state=parent_state, max_cost=max_cost, snap=snap, self_collision_check=self_collision_check
net_id,
open_set,
closed_set,
context,
metrics,
congestion_cache,
"SB",
(offset, radius),
skip_congestion,
max_cost=max_cost,
self_collision_check=self_collision_check,
) )
@ -519,90 +408,87 @@ def process_move(
context: AStarContext, context: AStarContext,
metrics: AStarMetrics, metrics: AStarMetrics,
congestion_cache: dict[tuple, int], congestion_cache: dict[tuple, int],
move_class: Literal["S", "B", "SB"], move_type: str,
move_class: Literal['S', 'B', 'SB'],
params: tuple, params: tuple,
skip_congestion: bool, skip_congestion: bool,
inv_snap: float | None = None,
snap_to_grid: bool = True,
parent_state: tuple[int, int, int] | None = None,
max_cost: float | None = None, max_cost: float | None = None,
snap: float = 1.0,
self_collision_check: bool = False, self_collision_check: bool = False,
) -> None: ) -> None:
"""
Generate or retrieve geometry and delegate to add_node.
"""
cp = parent.port cp = parent.port
if inv_snap is None: inv_snap = 1.0 / snap
base_ori = float(int(cp.orientation + 0.5))
if parent_state is None:
gx = int(round(cp.x * inv_snap))
gy = int(round(cp.y * inv_snap))
go = int(round(cp.orientation))
parent_state = (gx, gy, go)
else:
gx, gy, go = parent_state
coll_type = context.config.bend_collision_type coll_type = context.config.bend_collision_type
coll_key = id(coll_type) if isinstance(coll_type, shapely.geometry.Polygon) else coll_type coll_key = id(coll_type) if isinstance(coll_type, shapely.geometry.Polygon) else coll_type
self_dilation = context.cost_evaluator.collision_engine.clearance / 2.0
abs_key = ( abs_key = (parent_state, move_class, params, net_width, coll_key, snap_to_grid)
cp.as_tuple(),
move_class,
params,
net_width,
coll_key,
context.config.bend_clip_margin,
self_dilation,
)
if abs_key in context.move_cache_abs: if abs_key in context.move_cache_abs:
res = context.move_cache_abs[abs_key] res = context.move_cache_abs[abs_key]
else: move_radius = params[0] if move_class == 'B' else (params[1] if move_class == 'SB' else None)
context.check_cache_eviction() add_node(
base_port = Port(0, 0, cp.r) parent, res, target, net_width, net_id, open_set, closed_set, context, metrics, congestion_cache,
rel_key = ( move_type, move_radius=move_radius, snap=snap, skip_congestion=skip_congestion,
cp.r, inv_snap=inv_snap, parent_state=parent_state, max_cost=max_cost,
move_class, self_collision_check=self_collision_check
params,
net_width,
coll_key,
context.config.bend_clip_margin,
self_dilation,
) )
return
# Trigger periodic cache eviction check (only on Absolute cache misses)
context.check_cache_eviction()
# Template Cache Key (Relative to Port 0,0,Ori)
# We snap the parameters to ensure template re-use
snapped_params = params
if move_class == 'SB':
snapped_params = (snap_search_grid(params[0], snap), params[1])
self_dilation = context.cost_evaluator.collision_engine.clearance / 2.0
rel_key = (base_ori, move_class, snapped_params, net_width, coll_key, self_dilation, snap_to_grid)
cache_key = (gx, gy, go, move_type, net_width)
if cache_key in context.hard_collision_set:
return
if rel_key in context.move_cache_rel: if rel_key in context.move_cache_rel:
res_rel = context.move_cache_rel[rel_key] res_rel = context.move_cache_rel[rel_key]
else: else:
try: try:
if move_class == "S": p0 = Port(0, 0, base_ori)
res_rel = Straight.generate(base_port, params[0], net_width, dilation=self_dilation) if move_class == 'S':
elif move_class == "B": res_rel = Straight.generate(p0, params[0], net_width, dilation=self_dilation, snap_to_grid=snap_to_grid, snap_size=snap)
res_rel = Bend90.generate( elif move_class == 'B':
base_port, res_rel = Bend90.generate(p0, params[0], net_width, params[1], collision_type=context.config.bend_collision_type, clip_margin=context.config.bend_clip_margin, dilation=self_dilation, snap_to_grid=snap_to_grid, snap_size=snap)
params[0], elif move_class == 'SB':
net_width, res_rel = SBend.generate(p0, snapped_params[0], snapped_params[1], net_width, collision_type=context.config.bend_collision_type, clip_margin=context.config.bend_clip_margin, dilation=self_dilation, snap_to_grid=snap_to_grid, snap_size=snap)
params[1],
collision_type=context.config.bend_collision_type,
clip_margin=context.config.bend_clip_margin,
dilation=self_dilation,
)
else: else:
res_rel = SBend.generate(
base_port,
params[0],
params[1],
net_width,
collision_type=context.config.bend_collision_type,
clip_margin=context.config.bend_clip_margin,
dilation=self_dilation,
)
except ValueError:
return return
context.move_cache_rel[rel_key] = res_rel context.move_cache_rel[rel_key] = res_rel
res = res_rel.translate(cp.x, cp.y) except (ValueError, ZeroDivisionError):
context.move_cache_abs[abs_key] = res return
move_radius = params[0] if move_class == "B" else (params[1] if move_class == "SB" else None) res = res_rel.translate(cp.x, cp.y, rel_gx=res_rel.rel_gx + gx, rel_gy=res_rel.rel_gy + gy, rel_go=res_rel.rel_go)
context.move_cache_abs[abs_key] = res
move_radius = params[0] if move_class == 'B' else (params[1] if move_class == 'SB' else None)
add_node( add_node(
parent, parent, res, target, net_width, net_id, open_set, closed_set, context, metrics, congestion_cache,
res, move_type, move_radius=move_radius, snap=snap, skip_congestion=skip_congestion,
target, inv_snap=inv_snap, parent_state=parent_state, max_cost=max_cost,
net_width, self_collision_check=self_collision_check
net_id,
open_set,
closed_set,
context,
metrics,
congestion_cache,
move_class,
abs_key,
move_radius=move_radius,
skip_congestion=skip_congestion,
max_cost=max_cost,
self_collision_check=self_collision_check,
) )
@ -618,14 +504,22 @@ def add_node(
metrics: AStarMetrics, metrics: AStarMetrics,
congestion_cache: dict[tuple, int], congestion_cache: dict[tuple, int],
move_type: str, move_type: str,
cache_key: tuple,
move_radius: float | None = None, move_radius: float | None = None,
snap: float = 1.0,
skip_congestion: bool = False, skip_congestion: bool = False,
inv_snap: float | None = None,
parent_state: tuple[int, int, int] | None = None,
max_cost: float | None = None, max_cost: float | None = None,
self_collision_check: bool = False, self_collision_check: bool = False,
) -> None: ) -> None:
"""
Check collisions and costs, and add node to the open set.
"""
metrics.moves_generated += 1 metrics.moves_generated += 1
state = result.end_port.as_tuple() state = (result.rel_gx, result.rel_gy, result.rel_go)
# Early pruning using lower-bound total cost
# child.total_g >= parent.total_g + move_length
new_lower_bound_g = parent.g_cost + result.length new_lower_bound_g = parent.g_cost + result.length
if state in closed_set and closed_set[state] <= new_lower_bound_g + TOLERANCE_LINEAR: if state in closed_set and closed_set[state] <= new_lower_bound_g + TOLERANCE_LINEAR:
metrics.pruned_closed_set += 1 metrics.pruned_closed_set += 1
@ -633,22 +527,30 @@ def add_node(
parent_p = parent.port parent_p = parent.port
end_p = result.end_port end_p = result.end_port
if parent_state is None:
pgx, pgy, pgo = int(round(parent_p.x * inv_snap)), int(round(parent_p.y * inv_snap)), int(round(parent_p.orientation))
else:
pgx, pgy, pgo = parent_state
cache_key = (pgx, pgy, pgo, move_type, net_width)
if cache_key in context.hard_collision_set: if cache_key in context.hard_collision_set:
metrics.pruned_hard_collision += 1 metrics.pruned_hard_collision += 1
return return
is_static_safe = cache_key in context.static_safe_cache is_static_safe = (cache_key in context.static_safe_cache)
if not is_static_safe: if not is_static_safe:
ce = context.cost_evaluator.collision_engine ce = context.cost_evaluator.collision_engine
if move_type == "S": collision_found = False
collision_found = ce.check_move_straight_static(parent_p, result.length, net_width=net_width) if 'S' in move_type and 'SB' not in move_type:
collision_found = ce.check_move_straight_static(parent_p, result.length)
else: else:
collision_found = ce.check_move_static(result, start_port=parent_p, end_port=end_p, net_width=net_width) collision_found = ce.check_move_static(result, start_port=parent_p, end_port=end_p)
if collision_found: if collision_found:
context.hard_collision_set.add(cache_key) context.hard_collision_set.add(cache_key)
metrics.pruned_hard_collision += 1 metrics.pruned_hard_collision += 1
return return
else:
context.static_safe_cache.add(cache_key) context.static_safe_cache.add(cache_key)
total_overlaps = 0 total_overlaps = 0
@ -659,6 +561,7 @@ def add_node(
total_overlaps = context.cost_evaluator.collision_engine.check_move_congestion(result, net_id) total_overlaps = context.cost_evaluator.collision_engine.check_move_congestion(result, net_id)
congestion_cache[cache_key] = total_overlaps congestion_cache[cache_key] = total_overlaps
# SELF-COLLISION CHECK (Optional for performance)
if self_collision_check: if self_collision_check:
curr_p = parent curr_p = parent
new_tb = result.total_bounds new_tb = result.total_bounds
@ -666,7 +569,8 @@ def add_node(
ancestor_res = curr_p.component_result ancestor_res = curr_p.component_result
if ancestor_res: if ancestor_res:
anc_tb = ancestor_res.total_bounds anc_tb = ancestor_res.total_bounds
if new_tb[0] < anc_tb[2] and new_tb[2] > anc_tb[0] and new_tb[1] < anc_tb[3] and new_tb[3] > anc_tb[1]: if (new_tb[0] < anc_tb[2] and new_tb[2] > anc_tb[0] and
new_tb[1] < anc_tb[3] and new_tb[3] > anc_tb[1]):
for p_anc in ancestor_res.geometry: for p_anc in ancestor_res.geometry:
for p_new in result.geometry: for p_new in result.geometry:
if p_new.intersects(p_anc) and not p_new.touches(p_anc): if p_new.intersects(p_anc) and not p_new.touches(p_anc):
@ -674,30 +578,18 @@ def add_node(
curr_p = curr_p.parent curr_p = curr_p.parent
penalty = 0.0 penalty = 0.0
if move_type == "SB": if 'SB' in move_type: penalty = context.config.sbend_penalty
penalty = context.config.sbend_penalty elif 'B' in move_type: penalty = context.config.bend_penalty
elif move_type == "B": if move_radius is not None and move_radius > TOLERANCE_LINEAR: penalty *= (10.0 / move_radius)**0.5
penalty = context.config.bend_penalty
if move_radius is not None and move_radius > TOLERANCE_LINEAR:
penalty *= (10.0 / move_radius) ** 0.5
move_cost = context.cost_evaluator.evaluate_move( move_cost = context.cost_evaluator.evaluate_move(
result.geometry, None, result.end_port, net_width, net_id,
result.end_port, start_port=parent_p, length=result.length,
net_width, dilated_geometry=None, penalty=penalty,
net_id, skip_static=True, skip_congestion=True # Congestion overlaps already calculated
start_port=parent_p,
length=result.length,
dilated_geometry=result.dilated_geometry,
penalty=penalty,
skip_static=True,
skip_congestion=True,
) )
move_cost += total_overlaps * context.cost_evaluator.congestion_penalty move_cost += total_overlaps * context.cost_evaluator.congestion_penalty
if max_cost is not None and parent.g_cost + move_cost > max_cost:
metrics.pruned_cost += 1
return
if move_cost > 1e12: if move_cost > 1e12:
metrics.pruned_cost += 1 metrics.pruned_cost += 1
return return
@ -713,6 +605,7 @@ def add_node(
def reconstruct_path(end_node: AStarNode) -> list[ComponentResult]: def reconstruct_path(end_node: AStarNode) -> list[ComponentResult]:
""" Trace back from end node to start node to get the path. """
path = [] path = []
curr: AStarNode | None = end_node curr: AStarNode | None = end_node
while curr and curr.component_result: while curr and curr.component_result:

View file

@ -4,15 +4,13 @@ from dataclasses import dataclass, field
from typing import Literal, Any from typing import Literal, Any
VisibilityGuidanceMode = Literal["off", "exact_corner", "tangent_corner"]
@dataclass @dataclass
class RouterConfig: class RouterConfig:
"""Configuration parameters for the A* Router.""" """Configuration parameters for the A* Router."""
node_limit: int = 1000000 node_limit: int = 1000000
snap_size: float = 5.0
# Sparse Sampling Configuration # Sparse Sampling Configuration
max_straight_length: float = 2000.0 max_straight_length: float = 2000.0
num_straight_samples: int = 5 num_straight_samples: int = 5
@ -31,7 +29,6 @@ class RouterConfig:
sbend_penalty: float = 500.0 sbend_penalty: float = 500.0
bend_collision_type: Literal["arc", "bbox", "clipped_bbox"] | Any = "arc" bend_collision_type: Literal["arc", "bbox", "clipped_bbox"] | Any = "arc"
bend_clip_margin: float = 10.0 bend_clip_margin: float = 10.0
visibility_guidance: VisibilityGuidanceMode = "tangent_corner"
@dataclass @dataclass

View file

@ -3,9 +3,8 @@ from __future__ import annotations
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING, Any
import numpy as np import numpy as np
from inire.constants import TOLERANCE_LINEAR
from inire.router.config import CostConfig from inire.router.config import CostConfig
from inire.constants import TOLERANCE_LINEAR
if TYPE_CHECKING: if TYPE_CHECKING:
from shapely.geometry import Polygon from shapely.geometry import Polygon
@ -16,20 +15,25 @@ if TYPE_CHECKING:
class CostEvaluator: class CostEvaluator:
__slots__ = ( """
"collision_engine", Calculates total path and proximity costs.
"danger_map", """
"config", __slots__ = ('collision_engine', 'danger_map', 'config', 'unit_length_cost', 'greedy_h_weight', 'congestion_penalty',
"unit_length_cost", '_target_x', '_target_y', '_target_ori', '_target_cos', '_target_sin', '_min_radius')
"greedy_h_weight",
"congestion_penalty", collision_engine: CollisionEngine
"_target_x", """ The engine for intersection checks """
"_target_y",
"_target_r", danger_map: DangerMap
"_target_cos", """ Pre-computed grid for heuristic proximity costs """
"_target_sin",
"_min_radius", config: Any
) """ Parameter configuration (CostConfig or RouterConfig) """
unit_length_cost: float
greedy_h_weight: float
congestion_penalty: float
""" Cached weight values for performance """
def __init__( def __init__(
self, self,
@ -39,10 +43,22 @@ class CostEvaluator:
greedy_h_weight: float = 1.5, greedy_h_weight: float = 1.5,
congestion_penalty: float = 10000.0, congestion_penalty: float = 10000.0,
bend_penalty: float = 250.0, bend_penalty: float = 250.0,
sbend_penalty: float | None = None, sbend_penalty: float = 500.0,
min_bend_radius: float = 50.0, min_bend_radius: float = 50.0,
) -> None: ) -> None:
actual_sbend_penalty = 2.0 * bend_penalty if sbend_penalty is None else sbend_penalty """
Initialize the Cost Evaluator.
Args:
collision_engine: The engine for intersection checks.
danger_map: Pre-computed grid for heuristic proximity costs.
unit_length_cost: Cost multiplier per micrometer of path length.
greedy_h_weight: Heuristic weighting (A* greedy factor).
congestion_penalty: Multiplier for path overlaps in negotiated congestion.
bend_penalty: Base cost for 90-degree bends.
sbend_penalty: Base cost for parametric S-bends.
min_bend_radius: Minimum radius for 90-degree bends (used for alignment heuristic).
"""
self.collision_engine = collision_engine self.collision_engine = collision_engine
self.danger_map = danger_map self.danger_map = danger_map
self.config = CostConfig( self.config = CostConfig(
@ -50,86 +66,123 @@ class CostEvaluator:
greedy_h_weight=greedy_h_weight, greedy_h_weight=greedy_h_weight,
congestion_penalty=congestion_penalty, congestion_penalty=congestion_penalty,
bend_penalty=bend_penalty, bend_penalty=bend_penalty,
sbend_penalty=actual_sbend_penalty, sbend_penalty=sbend_penalty,
min_bend_radius=min_bend_radius, min_bend_radius=min_bend_radius,
) )
# Use config values
self.unit_length_cost = self.config.unit_length_cost self.unit_length_cost = self.config.unit_length_cost
self.greedy_h_weight = self.config.greedy_h_weight self.greedy_h_weight = self.config.greedy_h_weight
self.congestion_penalty = self.config.congestion_penalty self.congestion_penalty = self.config.congestion_penalty
# Pre-cache configuration flags for fast path
self._refresh_cached_config() self._refresh_cached_config()
# Target cache
self._target_x = 0.0 self._target_x = 0.0
self._target_y = 0.0 self._target_y = 0.0
self._target_r = 0 self._target_ori = 0.0
self._target_cos = 1.0 self._target_cos = 1.0
self._target_sin = 0.0 self._target_sin = 0.0
def _refresh_cached_config(self) -> None: def _refresh_cached_config(self) -> None:
if hasattr(self.config, "min_bend_radius"): """ Sync internal caches with the current self.config object. """
if hasattr(self.config, 'min_bend_radius'):
self._min_radius = self.config.min_bend_radius self._min_radius = self.config.min_bend_radius
elif hasattr(self.config, "bend_radii") and self.config.bend_radii: elif hasattr(self.config, 'bend_radii') and self.config.bend_radii:
self._min_radius = min(self.config.bend_radii) self._min_radius = min(self.config.bend_radii)
else: else:
self._min_radius = 50.0 self._min_radius = 50.0
if hasattr(self.config, "unit_length_cost"):
if hasattr(self.config, 'unit_length_cost'):
self.unit_length_cost = self.config.unit_length_cost self.unit_length_cost = self.config.unit_length_cost
if hasattr(self.config, "greedy_h_weight"): if hasattr(self.config, 'greedy_h_weight'):
self.greedy_h_weight = self.config.greedy_h_weight self.greedy_h_weight = self.config.greedy_h_weight
if hasattr(self.config, "congestion_penalty"): if hasattr(self.config, 'congestion_penalty'):
self.congestion_penalty = self.config.congestion_penalty self.congestion_penalty = self.config.congestion_penalty
def set_target(self, target: Port) -> None: def set_target(self, target: Port) -> None:
""" Pre-calculate target-dependent values for faster heuristic. """
self._target_x = target.x self._target_x = target.x
self._target_y = target.y self._target_y = target.y
self._target_r = target.r self._target_ori = target.orientation
rad = np.radians(target.r) rad = np.radians(target.orientation)
self._target_cos = np.cos(rad) self._target_cos = np.cos(rad)
self._target_sin = np.sin(rad) self._target_sin = np.sin(rad)
def g_proximity(self, x: float, y: float) -> float: def g_proximity(self, x: float, y: float) -> float:
"""
Get proximity cost from the Danger Map.
Args:
x, y: Coordinate to check.
Returns:
Proximity cost at location.
"""
if self.danger_map is None: if self.danger_map is None:
return 0.0 return 0.0
return self.danger_map.get_cost(x, y) return self.danger_map.get_cost(x, y)
def h_manhattan(self, current: Port, target: Port) -> float: def h_manhattan(self, current: Port, target: Port) -> float:
"""
Heuristic: weighted Manhattan distance + mandatory turn penalties.
"""
tx, ty = target.x, target.y tx, ty = target.x, target.y
if abs(tx - self._target_x) > TOLERANCE_LINEAR or abs(ty - self._target_y) > TOLERANCE_LINEAR or target.r != self._target_r:
# Avoid repeated trig for target orientation
if (abs(tx - self._target_x) > TOLERANCE_LINEAR or
abs(ty - self._target_y) > TOLERANCE_LINEAR or
abs(target.orientation - self._target_ori) > 0.1):
self.set_target(target) self.set_target(target)
dx = abs(current.x - tx) dx = abs(current.x - tx)
dy = abs(current.y - ty) dy = abs(current.y - ty)
dist = dx + dy dist = dx + dy
bp = self.config.bend_penalty bp = self.config.bend_penalty
penalty = 0.0 penalty = 0.0
curr_r = current.r # 1. Orientation Difference
diff = abs(curr_r - self._target_r) % 360 curr_ori = current.orientation
if diff > 0: diff = abs(curr_ori - self._target_ori) % 360
penalty += 2 * bp if diff == 180 else bp if diff > 0.1:
if abs(diff - 180) < 0.1:
penalty += 2 * bp
else: # 90 or 270 degree rotation
penalty += 1 * bp
# 2. Side Check (Entry half-plane)
v_dx = tx - current.x v_dx = tx - current.x
v_dy = ty - current.y v_dy = ty - current.y
side_proj = v_dx * self._target_cos + v_dy * self._target_sin side_proj = v_dx * self._target_cos + v_dy * self._target_sin
perp_dist = abs(v_dx * self._target_sin - v_dy * self._target_cos) perp_dist = abs(v_dx * self._target_sin - v_dy * self._target_cos)
if side_proj < 0 or (side_proj < self._min_radius and perp_dist > 0):
if side_proj < -0.1 or (side_proj < self._min_radius and perp_dist > 0.1):
penalty += 2 * bp penalty += 2 * bp
if curr_r == 0: # 3. Traveling Away
c_cos, c_sin = 1.0, 0.0 # Optimization: avoid np.radians/cos/sin if current_ori is standard 0,90,180,270
elif curr_r == 90: if curr_ori == 0: c_cos, c_sin = 1.0, 0.0
c_cos, c_sin = 0.0, 1.0 elif curr_ori == 90: c_cos, c_sin = 0.0, 1.0
elif curr_r == 180: elif curr_ori == 180: c_cos, c_sin = -1.0, 0.0
c_cos, c_sin = -1.0, 0.0 elif curr_ori == 270: c_cos, c_sin = 0.0, -1.0
else: else:
c_cos, c_sin = 0.0, -1.0 curr_rad = np.radians(curr_ori)
c_cos, c_sin = np.cos(curr_rad), np.sin(curr_rad)
move_proj = v_dx * c_cos + v_dy * c_sin move_proj = v_dx * c_cos + v_dy * c_sin
if move_proj < 0: if move_proj < -0.1:
penalty += 2 * bp penalty += 2 * bp
if diff == 0 and perp_dist > 0:
# 4. Jog Alignment
if diff < 0.1:
if perp_dist > 0.1:
penalty += 2 * bp penalty += 2 * bp
return self.greedy_h_weight * (dist + penalty) return self.greedy_h_weight * (dist + penalty)
def evaluate_move( def evaluate_move(
self, self,
geometry: list[Polygon] | None, geometry: list[Polygon] | None,
@ -143,40 +196,59 @@ class CostEvaluator:
skip_congestion: bool = False, skip_congestion: bool = False,
penalty: float = 0.0, penalty: float = 0.0,
) -> float: ) -> float:
_ = net_width """
Calculate the cost of a single move (Straight, Bend, SBend).
Args:
geometry: List of polygons in the move.
end_port: Port at the end of the move.
net_width: Width of the waveguide (unused).
net_id: Identifier for the net.
start_port: Port at the start of the move.
length: Physical path length of the move.
dilated_geometry: Pre-calculated dilated polygons.
skip_static: If True, bypass static collision checks.
skip_congestion: If True, bypass congestion checks.
penalty: Fixed cost penalty for the move type.
Returns:
Total cost of the move, or 1e15 if invalid.
"""
_ = net_width # Unused
# 1. Boundary Check
danger_map = self.danger_map danger_map = self.danger_map
if danger_map is not None and not danger_map.is_within_bounds(end_port.x, end_port.y): if danger_map is not None:
if not danger_map.is_within_bounds(end_port.x, end_port.y):
return 1e15 return 1e15
total_cost = length * self.unit_length_cost + penalty total_cost = length * self.unit_length_cost + penalty
# 2. Collision Check
if not skip_static or not skip_congestion: if not skip_static or not skip_congestion:
collision_engine = self.collision_engine
# Ensure geometry is provided if collision checks are enabled
if geometry is None: if geometry is None:
return 1e15 return 1e15
collision_engine = self.collision_engine
for i, poly in enumerate(geometry): for i, poly in enumerate(geometry):
dil_poly = dilated_geometry[i] if dilated_geometry else None dil_poly = dilated_geometry[i] if dilated_geometry else None
if not skip_static and collision_engine.check_collision( # Hard Collision (Static obstacles)
poly, if not skip_static:
net_id, if collision_engine.check_collision(
buffer_mode="static", poly, net_id, buffer_mode='static', start_port=start_port, end_port=end_port,
start_port=start_port, dilated_geometry=dil_poly
end_port=end_port,
dilated_geometry=dil_poly,
): ):
return 1e15 return 1e15
# Soft Collision (Negotiated Congestion)
if not skip_congestion: if not skip_congestion:
overlaps = collision_engine.check_collision(poly, net_id, buffer_mode="congestion", dilated_geometry=dil_poly) overlaps = collision_engine.check_collision(
poly, net_id, buffer_mode='congestion', dilated_geometry=dil_poly
)
if isinstance(overlaps, int) and overlaps > 0: if isinstance(overlaps, int) and overlaps > 0:
total_cost += overlaps * self.congestion_penalty total_cost += overlaps * self.congestion_penalty
# 3. Proximity cost from Danger Map
if danger_map is not None: if danger_map is not None:
cost_s = danger_map.get_cost(start_port.x, start_port.y) if start_port else 0.0 total_cost += danger_map.get_cost(end_port.x, end_port.y)
cost_e = danger_map.get_cost(end_port.x, end_port.y)
if start_port:
mid_x = (start_port.x + end_port.x) / 2.0
mid_y = (start_port.y + end_port.y) / 2.0
cost_m = danger_map.get_cost(mid_x, mid_y)
total_cost += length * (cost_s + cost_m + cost_e) / 3.0
else:
total_cost += length * cost_e
return total_cost return total_cost

View file

@ -3,8 +3,6 @@ from __future__ import annotations
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
import numpy import numpy
import shapely import shapely
from scipy.spatial import cKDTree
from functools import lru_cache
if TYPE_CHECKING: if TYPE_CHECKING:
from shapely.geometry import Polygon from shapely.geometry import Polygon
@ -12,15 +10,36 @@ if TYPE_CHECKING:
class DangerMap: class DangerMap:
""" """
A proximity cost evaluator using a KD-Tree of obstacle boundary points. A pre-computed grid for heuristic proximity costs, vectorized for performance.
Scales with obstacle perimeter rather than design area.
""" """
__slots__ = ('minx', 'miny', 'maxx', 'maxy', 'resolution', 'safety_threshold', 'k', 'tree') __slots__ = ('minx', 'miny', 'maxx', 'maxy', 'resolution', 'safety_threshold', 'k', 'width_cells', 'height_cells', 'grid')
minx: float
miny: float
maxx: float
maxy: float
""" Boundary coordinates of the map """
resolution: float
""" Grid cell size in micrometers """
safety_threshold: float
""" Distance below which proximity costs are applied """
k: float
""" Cost multiplier constant """
width_cells: int
height_cells: int
""" Grid dimensions in cells """
grid: numpy.ndarray
""" 2D array of pre-computed costs """
def __init__( def __init__(
self, self,
bounds: tuple[float, float, float, float], bounds: tuple[float, float, float, float],
resolution: float = 5.0, resolution: float = 1.0,
safety_threshold: float = 10.0, safety_threshold: float = 10.0,
k: float = 1.0, k: float = 1.0,
) -> None: ) -> None:
@ -29,7 +48,7 @@ class DangerMap:
Args: Args:
bounds: (minx, miny, maxx, maxy) in um. bounds: (minx, miny, maxx, maxy) in um.
resolution: Sampling resolution for obstacle boundaries (um). resolution: Cell size (um).
safety_threshold: Proximity limit (um). safety_threshold: Proximity limit (um).
k: Penalty multiplier. k: Penalty multiplier.
""" """
@ -37,62 +56,79 @@ class DangerMap:
self.resolution = resolution self.resolution = resolution
self.safety_threshold = safety_threshold self.safety_threshold = safety_threshold
self.k = k self.k = k
self.tree: cKDTree | None = None
# Grid dimensions
self.width_cells = int(numpy.ceil((self.maxx - self.minx) / self.resolution))
self.height_cells = int(numpy.ceil((self.maxy - self.miny) / self.resolution))
self.grid = numpy.zeros((self.width_cells, self.height_cells), dtype=numpy.float32)
def precompute(self, obstacles: list[Polygon]) -> None: def precompute(self, obstacles: list[Polygon]) -> None:
""" """
Pre-compute the proximity tree by sampling obstacle boundaries. Pre-compute the proximity costs for the entire grid using vectorized operations.
Args:
obstacles: List of static obstacle geometries.
""" """
all_points = [] from scipy.ndimage import distance_transform_edt
# 1. Create a binary mask of obstacles
mask = numpy.ones((self.width_cells, self.height_cells), dtype=bool)
# Create coordinate grids
x_coords = numpy.linspace(self.minx + self.resolution/2, self.maxx - self.resolution/2, self.width_cells)
y_coords = numpy.linspace(self.miny + self.resolution/2, self.maxy - self.resolution/2, self.height_cells)
xv, yv = numpy.meshgrid(x_coords, y_coords, indexing='ij')
for poly in obstacles: for poly in obstacles:
# Sample exterior # Use shapely.contains_xy for fast vectorized point-in-polygon check
exterior = poly.exterior in_poly = shapely.contains_xy(poly, xv, yv)
dist = 0 mask[in_poly] = False
while dist < exterior.length:
pt = exterior.interpolate(dist)
all_points.append((pt.x, pt.y))
dist += self.resolution
# Sample interiors (holes)
for interior in poly.interiors:
dist = 0
while dist < interior.length:
pt = interior.interpolate(dist)
all_points.append((pt.x, pt.y))
dist += self.resolution
if all_points: # 2. Distance transform (mask=True for empty space)
self.tree = cKDTree(numpy.array(all_points)) distances = distance_transform_edt(mask) * self.resolution
else:
self.tree = None
# Clear cache when tree changes # 3. Proximity cost: k / d^2 if d < threshold, else 0
self._get_cost_quantized.cache_clear() # Cap distances at a small epsilon (e.g. 0.1um) to avoid division by zero
safe_distances = numpy.maximum(distances, 0.1)
self.grid = numpy.where(
distances < self.safety_threshold,
self.k / (safe_distances**2),
0.0
).astype(numpy.float32)
def is_within_bounds(self, x: float, y: float) -> bool: def is_within_bounds(self, x: float, y: float) -> bool:
""" """
Check if a coordinate is within the design bounds. Check if a coordinate is within the design bounds.
Args:
x, y: Coordinate to check.
Returns:
True if within [min, max] for both axes.
""" """
return self.minx <= x <= self.maxx and self.miny <= y <= self.maxy return self.minx <= x <= self.maxx and self.miny <= y <= self.maxy
def get_cost(self, x: float, y: float) -> float: def get_cost(self, x: float, y: float) -> float:
""" """
Get the proximity cost at a specific coordinate using the KD-Tree. Get the proximity cost at a specific coordinate.
Coordinates are quantized to 1nm to improve cache performance.
"""
qx_milli = int(round(x * 1000))
qy_milli = int(round(y * 1000))
return self._get_cost_quantized(qx_milli, qy_milli)
@lru_cache(maxsize=100000) Args:
def _get_cost_quantized(self, qx_milli: int, qy_milli: int) -> float: x, y: Coordinate to look up.
qx = qx_milli / 1000.0
qy = qy_milli / 1000.0 Returns:
if not self.is_within_bounds(qx, qy): Pre-computed cost, or 1e15 if out of bounds.
return 1e15 """
if self.tree is None: # Clamp to grid range to handle upper boundary exactly
return 0.0 ix = int((x - self.minx) / self.resolution)
dist, _ = self.tree.query([qx, qy], distance_upper_bound=self.safety_threshold) iy = int((y - self.miny) / self.resolution)
if dist >= self.safety_threshold:
return 0.0 # Handle exact upper boundary
safe_dist = max(dist, 0.1) if ix == self.width_cells and abs(x - self.maxx) < 1e-9:
return float(self.k / (safe_dist ** 2)) ix = self.width_cells - 1
if iy == self.height_cells and abs(y - self.maxy) < 1e-9:
iy = self.height_cells - 1
if 0 <= ix < self.width_cells and 0 <= iy < self.height_cells:
return float(self.grid[ix, iy])
return 1e15 # Outside bounds

View file

@ -1,16 +1,13 @@
from __future__ import annotations from __future__ import annotations
import logging import logging
import math
import random
import time import time
import random
from dataclasses import dataclass from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Callable, Literal from typing import TYPE_CHECKING, Callable, Literal, Any
import numpy from inire.router.astar import route_astar, AStarMetrics
from inire.constants import TOLERANCE_LINEAR
from inire.geometry.components import Bend90, Straight
from inire.router.astar import AStarMetrics, route_astar
if TYPE_CHECKING: if TYPE_CHECKING:
from inire.geometry.components import ComponentResult from inire.geometry.components import ComponentResult
@ -23,25 +20,52 @@ logger = logging.getLogger(__name__)
@dataclass @dataclass
class RoutingResult: class RoutingResult:
"""
Result of a single net routing operation.
"""
net_id: str net_id: str
""" Identifier for the net """
path: list[ComponentResult] path: list[ComponentResult]
""" List of moves forming the path """
is_valid: bool is_valid: bool
""" Whether the path is collision-free and reached the target """
collisions: int collisions: int
""" Number of detected collisions/overlaps """
reached_target: bool = False reached_target: bool = False
""" Whether the final port matches the target port """
class PathFinder: class PathFinder:
__slots__ = ( """
"context", Multi-net router using Negotiated Congestion.
"metrics", """
"max_iterations", __slots__ = ('context', 'metrics', 'max_iterations', 'base_congestion_penalty',
"base_congestion_penalty", 'use_tiered_strategy', 'congestion_multiplier', 'accumulated_expanded_nodes', 'warm_start')
"use_tiered_strategy",
"congestion_multiplier", context: AStarContext
"accumulated_expanded_nodes", """ The A* persistent state (config, caches, evaluator) """
"warm_start",
"refine_paths", metrics: AStarMetrics
) """ Performance metrics for search operations """
max_iterations: int
""" Maximum number of rip-up and reroute iterations """
base_congestion_penalty: float
""" Starting penalty for overlaps """
congestion_multiplier: float
""" Multiplier for congestion penalty per iteration """
use_tiered_strategy: bool
""" If True, use simpler collision models in early iterations for speed """
warm_start: Literal['shortest', 'longest', 'user'] | None
""" Heuristic sorting for the initial greedy pass """
def __init__( def __init__(
self, self,
@ -51,9 +75,20 @@ class PathFinder:
base_congestion_penalty: float = 100.0, base_congestion_penalty: float = 100.0,
congestion_multiplier: float = 1.5, congestion_multiplier: float = 1.5,
use_tiered_strategy: bool = True, use_tiered_strategy: bool = True,
warm_start: Literal["shortest", "longest", "user"] | None = "shortest", warm_start: Literal['shortest', 'longest', 'user'] | None = 'shortest',
refine_paths: bool = False,
) -> None: ) -> None:
"""
Initialize the PathFinder.
Args:
context: The A* search context (evaluator, config, caches).
metrics: Optional metrics container.
max_iterations: Maximum number of rip-up and reroute iterations.
base_congestion_penalty: Starting penalty for overlaps.
congestion_multiplier: Multiplier for congestion penalty per iteration.
use_tiered_strategy: Whether to use simplified collision models in early iterations.
warm_start: Initial ordering strategy for a fast greedy pass.
"""
self.context = context self.context = context
self.metrics = metrics if metrics is not None else AStarMetrics() self.metrics = metrics if metrics is not None else AStarMetrics()
self.max_iterations = max_iterations self.max_iterations = max_iterations
@ -61,8 +96,7 @@ class PathFinder:
self.congestion_multiplier = congestion_multiplier self.congestion_multiplier = congestion_multiplier
self.use_tiered_strategy = use_tiered_strategy self.use_tiered_strategy = use_tiered_strategy
self.warm_start = warm_start self.warm_start = warm_start
self.refine_paths = refine_paths self.accumulated_expanded_nodes: list[tuple[float, float, float]] = []
self.accumulated_expanded_nodes: list[tuple[int, int, int]] = []
@property @property
def cost_evaluator(self) -> CostEvaluator: def cost_evaluator(self) -> CostEvaluator:
@ -72,197 +106,77 @@ class PathFinder:
self, self,
netlist: dict[str, tuple[Port, Port]], netlist: dict[str, tuple[Port, Port]],
net_widths: dict[str, float], net_widths: dict[str, float],
order: Literal["shortest", "longest", "user"], order: Literal['shortest', 'longest', 'user']
) -> dict[str, list[ComponentResult]]: ) -> dict[str, list[ComponentResult]]:
"""
Internal greedy pass: route nets sequentially and freeze them as static.
"""
all_net_ids = list(netlist.keys()) all_net_ids = list(netlist.keys())
if order != "user": if order != 'user':
all_net_ids.sort( def get_dist(nid):
key=lambda nid: abs(netlist[nid][1].x - netlist[nid][0].x) + abs(netlist[nid][1].y - netlist[nid][0].y), s, t = netlist[nid]
reverse=(order == "longest"), return abs(t.x - s.x) + abs(t.y - s.y)
) all_net_ids.sort(key=get_dist, reverse=(order == 'longest'))
greedy_paths = {}
temp_obj_ids = []
logger.info(f"PathFinder: Starting Greedy Warm-Start ({order} order)...")
greedy_paths: dict[str, list[ComponentResult]] = {}
temp_obj_ids: list[int] = []
greedy_node_limit = min(self.context.config.node_limit, 2000)
for net_id in all_net_ids: for net_id in all_net_ids:
start, target = netlist[net_id] start, target = netlist[net_id]
width = net_widths.get(net_id, 2.0) width = net_widths.get(net_id, 2.0)
# Heuristic max cost for fail-fast
h_start = self.cost_evaluator.h_manhattan(start, target) h_start = self.cost_evaluator.h_manhattan(start, target)
max_cost_limit = max(h_start * 3.0, 2000.0) max_cost_limit = max(h_start * 3.0, 2000.0)
path = route_astar( path = route_astar(
start, start, target, width, context=self.context, metrics=self.metrics,
target, net_id=net_id, skip_congestion=True, max_cost=max_cost_limit
width,
context=self.context,
metrics=self.metrics,
net_id=net_id,
skip_congestion=True,
max_cost=max_cost_limit,
self_collision_check=True,
node_limit=greedy_node_limit,
) )
if not path:
continue if path:
greedy_paths[net_id] = path greedy_paths[net_id] = path
# Freeze as static
for res in path: for res in path:
geoms = res.actual_geometry if res.actual_geometry is not None else res.geometry geoms = res.actual_geometry if res.actual_geometry is not None else res.geometry
dilated_geoms = res.dilated_actual_geometry if res.dilated_actual_geometry else res.dilated_geometry for poly in geoms:
for i, poly in enumerate(geoms): obj_id = self.cost_evaluator.collision_engine.add_static_obstacle(poly)
dilated = dilated_geoms[i] if dilated_geoms else None
obj_id = self.cost_evaluator.collision_engine.add_static_obstacle(poly, dilated_geometry=dilated)
temp_obj_ids.append(obj_id) temp_obj_ids.append(obj_id)
self.context.clear_static_caches()
# Clean up temporary static obstacles
for obj_id in temp_obj_ids: for obj_id in temp_obj_ids:
self.cost_evaluator.collision_engine.remove_static_obstacle(obj_id) self.cost_evaluator.collision_engine.remove_static_obstacle(obj_id)
logger.info(f"PathFinder: Greedy Warm-Start finished. Seeding {len(greedy_paths)}/{len(netlist)} nets.")
return greedy_paths return greedy_paths
def _has_self_collision(self, path: list[ComponentResult]) -> bool: def _has_self_collision(self, path: list[ComponentResult]) -> bool:
for i, comp_i in enumerate(path): """
Quickly check if a path intersects itself.
"""
if not path:
return False
num_components = len(path)
for i in range(num_components):
comp_i = path[i]
tb_i = comp_i.total_bounds tb_i = comp_i.total_bounds
for j in range(i + 2, len(path)): for j in range(i + 2, num_components): # Skip immediate neighbors
comp_j = path[j] comp_j = path[j]
tb_j = comp_j.total_bounds tb_j = comp_j.total_bounds
if tb_i[0] < tb_j[2] and tb_i[2] > tb_j[0] and tb_i[1] < tb_j[3] and tb_i[3] > tb_j[1]:
# AABB Check
if (tb_i[0] < tb_j[2] and tb_i[2] > tb_j[0] and
tb_i[1] < tb_j[3] and tb_i[3] > tb_j[1]):
# Real geometry check
for p_i in comp_i.geometry: for p_i in comp_i.geometry:
for p_j in comp_j.geometry: for p_j in comp_j.geometry:
if p_i.intersects(p_j) and not p_i.touches(p_j): if p_i.intersects(p_j) and not p_i.touches(p_j):
return True return True
return False return False
def _path_cost(self, path: list[ComponentResult]) -> float:
total = 0.0
bend_penalty = self.context.config.bend_penalty
sbend_penalty = self.context.config.sbend_penalty
for comp in path:
total += comp.length
if comp.move_type == "Bend90":
radius = comp.length * 2.0 / math.pi if comp.length > 0 else 0.0
if radius > 0:
total += bend_penalty * (10.0 / radius) ** 0.5
else:
total += bend_penalty
elif comp.move_type == "SBend":
total += sbend_penalty
return total
def _extract_geometry(self, path: list[ComponentResult]) -> tuple[list[Any], list[Any]]:
all_geoms = []
all_dilated = []
for res in path:
all_geoms.extend(res.geometry)
if res.dilated_geometry:
all_dilated.extend(res.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
all_dilated.extend([p.buffer(dilation) for p in res.geometry])
return all_geoms, all_dilated
def _to_local(self, start: Port, point: Port) -> tuple[int, int]:
dx = point.x - start.x
dy = point.y - start.y
if start.r == 0:
return dx, dy
if start.r == 90:
return dy, -dx
if start.r == 180:
return -dx, -dy
return -dy, dx
def _build_same_orientation_dogleg(
self,
start: Port,
target: Port,
net_width: float,
radius: float,
side_extent: float,
) -> list[ComponentResult] | None:
local_dx, local_dy = self._to_local(start, target)
if abs(local_dy) > 0 or local_dx < 4.0 * radius - 0.01:
return None
side_abs = abs(side_extent)
side_length = side_abs - 2.0 * radius
if side_length < self.context.config.min_straight_length - 0.01:
return None
forward_length = local_dx - 4.0 * radius
if forward_length < -0.01:
return None
first_dir = "CCW" if side_extent > 0 else "CW"
second_dir = "CW" if side_extent > 0 else "CCW"
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
path: list[ComponentResult] = []
curr = start
for direction, straight_len in (
(first_dir, side_length),
(second_dir, forward_length),
(second_dir, side_length),
(first_dir, None),
):
bend = Bend90.generate(curr, radius, net_width, direction, dilation=dilation)
path.append(bend)
curr = bend.end_port
if straight_len is None:
continue
if straight_len > 0.01:
straight = Straight.generate(curr, straight_len, net_width, dilation=dilation)
path.append(straight)
curr = straight.end_port
if curr != target:
return None
return path
def _refine_path(
self,
net_id: str,
start: Port,
target: Port,
net_width: float,
path: list[ComponentResult],
) -> list[ComponentResult]:
if not path or start.r != target.r:
return path
bend_count = sum(1 for comp in path if comp.move_type == "Bend90")
if bend_count < 5:
return path
side_extents = []
local_points = [self._to_local(start, start)]
local_points.extend(self._to_local(start, comp.end_port) for comp in path)
min_side = min(point[1] for point in local_points)
max_side = max(point[1] for point in local_points)
if min_side < -0.01:
side_extents.append(float(min_side))
if max_side > 0.01:
side_extents.append(float(max_side))
if not side_extents:
return path
best_path = path
best_cost = self._path_cost(path)
collision_engine = self.cost_evaluator.collision_engine
for radius in self.context.config.bend_radii:
for side_extent in side_extents:
candidate = self._build_same_orientation_dogleg(start, target, net_width, radius, side_extent)
if candidate is None:
continue
is_valid, collisions = collision_engine.verify_path(net_id, candidate)
if not is_valid or collisions != 0:
continue
candidate_cost = self._path_cost(candidate)
if candidate_cost + 1e-6 < best_cost:
best_cost = candidate_cost
best_path = candidate
return best_path
def route_all( def route_all(
self, self,
netlist: dict[str, tuple[Port, Port]], netlist: dict[str, tuple[Port, Port]],
@ -270,10 +184,26 @@ class PathFinder:
store_expanded: bool = False, store_expanded: bool = False,
iteration_callback: Callable[[int, dict[str, RoutingResult]], None] | None = None, iteration_callback: Callable[[int, dict[str, RoutingResult]], None] | None = None,
shuffle_nets: bool = False, shuffle_nets: bool = False,
sort_nets: Literal["shortest", "longest", "user", None] = None, sort_nets: Literal['shortest', 'longest', 'user', None] = None,
initial_paths: dict[str, list[ComponentResult]] | None = None, initial_paths: dict[str, list[ComponentResult]] | None = None,
seed: int | None = None, seed: int | None = None,
) -> dict[str, RoutingResult]: ) -> dict[str, RoutingResult]:
"""
Route all nets in the netlist using Negotiated Congestion.
Args:
netlist: Mapping of net_id to (start_port, target_port).
net_widths: Mapping of net_id to waveguide width.
store_expanded: Whether to store expanded nodes for ALL iterations and nets.
iteration_callback: Optional callback(iteration_idx, current_results).
shuffle_nets: Whether to randomize the order of nets each iteration.
sort_nets: Heuristic sorting for the initial iteration order (overrides self.warm_start).
initial_paths: Pre-computed paths to use for Iteration 0 (overrides warm_start).
seed: Optional seed for randomization (enables reproducibility).
Returns:
Mapping of net_id to RoutingResult.
"""
results: dict[str, RoutingResult] = {} results: dict[str, RoutingResult] = {}
self.cost_evaluator.congestion_penalty = self.base_congestion_penalty self.cost_evaluator.congestion_penalty = self.base_congestion_penalty
self.accumulated_expanded_nodes = [] self.accumulated_expanded_nodes = []
@ -282,44 +212,63 @@ class PathFinder:
start_time = time.monotonic() start_time = time.monotonic()
num_nets = len(netlist) num_nets = len(netlist)
session_timeout = max(60.0, 10.0 * num_nets * self.max_iterations) session_timeout = max(60.0, 10.0 * num_nets * self.max_iterations)
all_net_ids = list(netlist.keys())
needs_sc: set[str] = set()
all_net_ids = list(netlist.keys())
needs_sc = set() # Nets requiring self-collision avoidance
# Determine initial paths (Warm Start)
if initial_paths is None: if initial_paths is None:
ws_order = sort_nets if sort_nets is not None else self.warm_start ws_order = sort_nets if sort_nets is not None else self.warm_start
if ws_order is not None: if ws_order is not None:
initial_paths = self._perform_greedy_pass(netlist, net_widths, ws_order) initial_paths = self._perform_greedy_pass(netlist, net_widths, ws_order)
self.context.clear_static_caches() self.context.clear_static_caches()
if sort_nets and sort_nets != "user": # Apply initial sorting heuristic if requested (for the main NC loop)
all_net_ids.sort( if sort_nets:
key=lambda nid: abs(netlist[nid][1].x - netlist[nid][0].x) + abs(netlist[nid][1].y - netlist[nid][0].y), def get_dist(nid):
reverse=(sort_nets == "longest"), s, t = netlist[nid]
) return abs(t.x - s.x) + abs(t.y - s.y)
if sort_nets != 'user':
all_net_ids.sort(key=get_dist, reverse=(sort_nets == 'longest'))
for iteration in range(self.max_iterations): for iteration in range(self.max_iterations):
any_congestion = False any_congestion = False
# Clear accumulation for this iteration so callback gets fresh data
self.accumulated_expanded_nodes = [] self.accumulated_expanded_nodes = []
self.metrics.reset_per_route() self.metrics.reset_per_route()
if shuffle_nets and (iteration > 0 or initial_paths is None): logger.info(f'PathFinder Iteration {iteration}...')
# 0. Shuffle nets if requested
if shuffle_nets:
# Use a new seed based on iteration for deterministic different orders
it_seed = (seed + iteration) if seed is not None else None it_seed = (seed + iteration) if seed is not None else None
random.Random(it_seed).shuffle(all_net_ids) random.Random(it_seed).shuffle(all_net_ids)
# Sequence through nets
for net_id in all_net_ids: for net_id in all_net_ids:
start, target = netlist[net_id] start, target = netlist[net_id]
if time.monotonic() - start_time > session_timeout: # Timeout check
self.cost_evaluator.collision_engine.dynamic_tree = None elapsed = time.monotonic() - start_time
self.cost_evaluator.collision_engine._ensure_dynamic_tree() if elapsed > session_timeout:
return self.verify_all_nets(results, netlist) logger.warning(f'PathFinder TIMEOUT after {elapsed:.2f}s')
return self._finalize_results(results, netlist)
width = net_widths.get(net_id, 2.0) width = net_widths.get(net_id, 2.0)
self.cost_evaluator.collision_engine.remove_path(net_id)
path: list[ComponentResult] | None = None
# 1. Rip-up existing path
self.cost_evaluator.collision_engine.remove_path(net_id)
# 2. Reroute or Use Initial Path
path = None
# Warm Start Logic: Use provided path for Iteration 0
if iteration == 0 and initial_paths and net_id in initial_paths: if iteration == 0 and initial_paths and net_id in initial_paths:
path = initial_paths[net_id] path = initial_paths[net_id]
logger.debug(f' Net {net_id} used Warm Start path.')
else: else:
# Standard Routing Logic
target_coll_model = self.context.config.bend_collision_type target_coll_model = self.context.config.bend_collision_type
coll_model = target_coll_model coll_model = target_coll_model
skip_cong = False skip_cong = False
@ -328,36 +277,41 @@ class PathFinder:
if target_coll_model == "arc": if target_coll_model == "arc":
coll_model = "clipped_bbox" coll_model = "clipped_bbox"
base_node_limit = self.context.config.node_limit
current_node_limit = base_node_limit
net_start = time.monotonic()
path = route_astar( path = route_astar(
start, start, target, width, context=self.context, metrics=self.metrics,
target, net_id=net_id, bend_collision_type=coll_model, return_partial=True,
width, store_expanded=store_expanded, skip_congestion=skip_cong,
context=self.context,
metrics=self.metrics,
net_id=net_id,
bend_collision_type=coll_model,
return_partial=True,
store_expanded=store_expanded,
skip_congestion=skip_cong,
self_collision_check=(net_id in needs_sc), self_collision_check=(net_id in needs_sc),
node_limit=self.context.config.node_limit, node_limit=current_node_limit
) )
if store_expanded and self.metrics.last_expanded_nodes: if store_expanded and self.metrics.last_expanded_nodes:
self.accumulated_expanded_nodes.extend(self.metrics.last_expanded_nodes) self.accumulated_expanded_nodes.extend(self.metrics.last_expanded_nodes)
if not path: logger.debug(f' Net {net_id} routed in {time.monotonic() - net_start:.4f}s using {coll_model}')
results[net_id] = RoutingResult(net_id, [], False, 0, reached_target=False)
any_congestion = True
continue
if path:
# Check if reached exactly (relative to snapped target)
last_p = path[-1].end_port last_p = path[-1].end_port
reached = last_p == target snap = self.context.config.snap_size
from inire.geometry.components import snap_search_grid
reached = (abs(last_p.x - snap_search_grid(target.x, snap)) < TOLERANCE_LINEAR and
abs(last_p.y - snap_search_grid(target.y, snap)) < TOLERANCE_LINEAR and
abs(last_p.orientation - target.orientation) < 0.1)
if reached and net_id not in needs_sc and self._has_self_collision(path): # Check for self-collision if not already handled by router
if reached and net_id not in needs_sc:
if self._has_self_collision(path):
logger.info(f' Net {net_id} detected self-collision. Enabling protection for next iteration.')
needs_sc.add(net_id) needs_sc.add(net_id)
any_congestion = True any_congestion = True
# 3. Add to index (even if partial) so other nets negotiate around it
all_geoms = [] all_geoms = []
all_dilated = [] all_dilated = []
for res in path: for res in path:
@ -367,63 +321,120 @@ class PathFinder:
else: else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0 dilation = self.cost_evaluator.collision_engine.clearance / 2.0
all_dilated.extend([p.buffer(dilation) for p in res.geometry]) all_dilated.extend([p.buffer(dilation) for p in res.geometry])
self.cost_evaluator.collision_engine.add_path(net_id, all_geoms, dilated_geometry=all_dilated) self.cost_evaluator.collision_engine.add_path(net_id, all_geoms, dilated_geometry=all_dilated)
# Check if this new path has any congestion
collision_count = 0 collision_count = 0
if reached: if reached:
is_valid, collision_count = self.cost_evaluator.collision_engine.verify_path(net_id, path) verif_geoms = []
any_congestion = any_congestion or not is_valid verif_dilated = []
for res in path:
is_proxy = (res.actual_geometry is not None)
g = res.actual_geometry if is_proxy else res.geometry
verif_geoms.extend(g)
results[net_id] = RoutingResult(net_id, path, reached and collision_count == 0, collision_count, reached_target=reached) if is_proxy:
if res.dilated_actual_geometry:
verif_dilated.extend(res.dilated_actual_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
else:
if res.dilated_geometry:
verif_dilated.extend(res.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
self.cost_evaluator.collision_engine._ensure_dynamic_tree()
if self.cost_evaluator.collision_engine.dynamic_tree:
# Vectorized query for all polygons in the path
res_indices, tree_indices = self.cost_evaluator.collision_engine.dynamic_tree.query(verif_dilated, predicate='intersects')
for hit_idx in tree_indices:
obj_id = self.cost_evaluator.collision_engine.dynamic_obj_ids[hit_idx]
other_net_id, _ = self.cost_evaluator.collision_engine.dynamic_geometries[obj_id]
if other_net_id != net_id:
collision_count += 1
if collision_count > 0:
any_congestion = True
logger.debug(f' Net {net_id}: reached={reached}, collisions={collision_count}')
results[net_id] = RoutingResult(net_id, path, (collision_count == 0 and reached), collision_count, reached_target=reached)
else:
results[net_id] = RoutingResult(net_id, [], False, 0, reached_target=False)
any_congestion = True # Total failure might need a retry with different ordering
if iteration_callback: if iteration_callback:
iteration_callback(iteration, results) iteration_callback(iteration, results)
if not any_congestion: if not any_congestion:
break break
self.cost_evaluator.congestion_penalty *= self.congestion_multiplier self.cost_evaluator.congestion_penalty *= self.congestion_multiplier
if self.refine_paths and results: return self._finalize_results(results, netlist)
for net_id in all_net_ids:
res = results.get(net_id)
if not res or not res.path or not res.reached_target or not res.is_valid:
continue
start, target = netlist[net_id]
width = net_widths.get(net_id, 2.0)
self.cost_evaluator.collision_engine.remove_path(net_id)
refined_path = self._refine_path(net_id, start, target, width, res.path)
all_geoms, all_dilated = self._extract_geometry(refined_path)
self.cost_evaluator.collision_engine.add_path(net_id, all_geoms, dilated_geometry=all_dilated)
results[net_id] = RoutingResult(
net_id=net_id,
path=refined_path,
is_valid=res.is_valid,
collisions=res.collisions,
reached_target=res.reached_target,
)
self.cost_evaluator.collision_engine.dynamic_tree = None def _finalize_results(
self.cost_evaluator.collision_engine._ensure_dynamic_tree()
return self.verify_all_nets(results, netlist)
def verify_all_nets(
self, self,
results: dict[str, RoutingResult], results: dict[str, RoutingResult],
netlist: dict[str, tuple[Port, Port]], netlist: dict[str, tuple[Port, Port]],
) -> dict[str, RoutingResult]: ) -> dict[str, RoutingResult]:
final_results: dict[str, RoutingResult] = {} """
for net_id, (_, target_p) in netlist.items(): Final check: re-verify all nets against the final static paths.
"""
logger.debug(f'Finalizing results for nets: {list(results.keys())}')
final_results = {}
for net_id in netlist:
res = results.get(net_id) res = results.get(net_id)
if not res or not res.path: if not res or not res.path:
final_results[net_id] = RoutingResult(net_id, [], False, 0) final_results[net_id] = RoutingResult(net_id, [], False, 0)
continue continue
if not res.reached_target:
# Skip re-verification for partial paths to avoid massive performance hit
final_results[net_id] = res
continue
collision_count = 0
verif_geoms = []
verif_dilated = []
for comp in res.path:
is_proxy = (comp.actual_geometry is not None)
g = comp.actual_geometry if is_proxy else comp.geometry
verif_geoms.extend(g)
if is_proxy:
if comp.dilated_actual_geometry:
verif_dilated.extend(comp.dilated_actual_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
else:
if comp.dilated_geometry:
verif_dilated.extend(comp.dilated_geometry)
else:
dilation = self.cost_evaluator.collision_engine.clearance / 2.0
verif_dilated.extend([p.buffer(dilation) for p in g])
self.cost_evaluator.collision_engine._ensure_dynamic_tree()
if self.cost_evaluator.collision_engine.dynamic_tree:
# Vectorized query
res_indices, tree_indices = self.cost_evaluator.collision_engine.dynamic_tree.query(verif_dilated, predicate='intersects')
for hit_idx in tree_indices:
obj_id = self.cost_evaluator.collision_engine.dynamic_obj_ids[hit_idx]
other_net_id, _ = self.cost_evaluator.collision_engine.dynamic_geometries[obj_id]
if other_net_id != net_id:
collision_count += 1
target_p = netlist[net_id][1]
last_p = res.path[-1].end_port last_p = res.path[-1].end_port
reached = last_p == target_p snap = self.context.config.snap_size
is_valid, collisions = self.cost_evaluator.collision_engine.verify_path(net_id, res.path) from inire.geometry.components import snap_search_grid
final_results[net_id] = RoutingResult( reached = (abs(last_p.x - snap_search_grid(target_p.x, snap)) < TOLERANCE_LINEAR and
net_id=net_id, abs(last_p.y - snap_search_grid(target_p.y, snap)) < TOLERANCE_LINEAR and
path=res.path, abs(last_p.orientation - target_p.orientation) < 0.1)
is_valid=(is_valid and reached),
collisions=collisions, final_results[net_id] = RoutingResult(net_id, res.path, (collision_count == 0 and reached), collision_count, reached_target=reached)
reached_target=reached,
)
return final_results return final_results

View file

@ -16,7 +16,7 @@ class VisibilityManager:
""" """
Manages corners of static obstacles for sparse A* / Visibility Graph jumps. Manages corners of static obstacles for sparse A* / Visibility Graph jumps.
""" """
__slots__ = ('collision_engine', 'corners', 'corner_index', '_corner_graph', '_static_visibility_cache', '_built_static_version') __slots__ = ('collision_engine', 'corners', 'corner_index', '_corner_graph', '_static_visibility_cache')
def __init__(self, collision_engine: CollisionEngine) -> None: def __init__(self, collision_engine: CollisionEngine) -> None:
self.collision_engine = collision_engine self.collision_engine = collision_engine
@ -24,28 +24,12 @@ class VisibilityManager:
self.corner_index = rtree.index.Index() self.corner_index = rtree.index.Index()
self._corner_graph: dict[int, list[tuple[float, float, float]]] = {} self._corner_graph: dict[int, list[tuple[float, float, float]]] = {}
self._static_visibility_cache: dict[tuple[int, int], list[tuple[float, float, float]]] = {} self._static_visibility_cache: dict[tuple[int, int], list[tuple[float, float, float]]] = {}
self._built_static_version = -1
self._build() self._build()
def clear_cache(self) -> None:
"""
Reset all static visibility data.
"""
self.corners = []
self.corner_index = rtree.index.Index()
self._corner_graph = {}
self._static_visibility_cache = {}
self._build()
def _ensure_current(self) -> None:
if self._built_static_version != self.collision_engine._static_version:
self.clear_cache()
def _build(self) -> None: def _build(self) -> None:
""" """
Extract corners and pre-compute corner-to-corner visibility. Extract corners and pre-compute corner-to-corner visibility.
""" """
self._built_static_version = self.collision_engine._static_version
raw_corners = [] raw_corners = []
for obj_id, poly in self.collision_engine.static_dilated.items(): for obj_id, poly in self.collision_engine.static_dilated.items():
coords = list(poly.exterior.coords) coords = list(poly.exterior.coords)
@ -61,7 +45,7 @@ class VisibilityManager:
if not raw_corners: if not raw_corners:
return return
# Deduplicate repeated corner coordinates # Deduplicate and snap to 1nm
seen = set() seen = set()
for x, y in raw_corners: for x, y in raw_corners:
sx, sy = round(x, 3), round(y, 3) sx, sy = round(x, 3), round(y, 3)
@ -97,7 +81,6 @@ class VisibilityManager:
Find all corners visible from the origin. Find all corners visible from the origin.
Returns list of (x, y, distance). Returns list of (x, y, distance).
""" """
self._ensure_current()
if max_dist < 0: if max_dist < 0:
return [] return []
@ -140,20 +123,3 @@ class VisibilityManager:
self._static_visibility_cache[cache_key] = visible self._static_visibility_cache[cache_key] = visible
return visible return visible
def get_corner_visibility(self, origin: Port, max_dist: float = 1000.0) -> list[tuple[float, float, float]]:
"""
Return precomputed visibility only when the origin is already at a known corner.
This avoids the expensive arbitrary-point visibility scan in hot search paths.
"""
self._ensure_current()
if max_dist < 0:
return []
ox, oy = round(origin.x, 3), round(origin.y, 3)
nearby = list(self.corner_index.intersection((ox - 0.001, oy - 0.001, ox + 0.001, oy + 0.001)))
for idx in nearby:
cx, cy = self.corners[idx]
if abs(cx - ox) < 1e-4 and abs(cy - oy) < 1e-4 and idx in self._corner_graph:
return [corner for corner in self._corner_graph[idx] if corner[2] <= max_dist]
return []

View file

@ -1,311 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass
from time import perf_counter
from typing import Callable
from shapely.geometry import Polygon, box
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.router.astar import AStarContext, AStarMetrics
from inire.router.cost import CostEvaluator
from inire.router.danger_map import DangerMap
from inire.router.pathfinder import PathFinder, RoutingResult
@dataclass(frozen=True)
class ScenarioOutcome:
duration_s: float
total_results: int
valid_results: int
reached_targets: int
@dataclass(frozen=True)
class ScenarioDefinition:
name: str
run: Callable[[], ScenarioOutcome]
def _build_router(
*,
bounds: tuple[float, float, float, float],
clearance: float = 2.0,
obstacles: list[Polygon] | None = None,
evaluator_kwargs: dict[str, float] | None = None,
context_kwargs: dict[str, object] | None = None,
pathfinder_kwargs: dict[str, object] | None = None,
) -> tuple[CollisionEngine, CostEvaluator, AStarContext, AStarMetrics, PathFinder]:
static_obstacles = obstacles or []
engine = CollisionEngine(clearance=clearance)
for obstacle in static_obstacles:
engine.add_static_obstacle(obstacle)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute(static_obstacles)
evaluator = CostEvaluator(engine, danger_map, **(evaluator_kwargs or {}))
context = AStarContext(evaluator, **(context_kwargs or {}))
metrics = AStarMetrics()
pathfinder = PathFinder(context, metrics, **(pathfinder_kwargs or {}))
return engine, evaluator, context, metrics, pathfinder
def _summarize(results: dict[str, RoutingResult], duration_s: float) -> ScenarioOutcome:
return ScenarioOutcome(
duration_s=duration_s,
total_results=len(results),
valid_results=sum(1 for result in results.values() if result.is_valid),
reached_targets=sum(1 for result in results.values() if result.reached_target),
)
def run_example_01() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(bounds=(0, 0, 100, 100), context_kwargs={"bend_radii": [10.0]})
netlist = {"net1": (Port(10, 50, 0), Port(90, 50, 0))}
t0 = perf_counter()
results = pathfinder.route_all(netlist, {"net1": 2.0})
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_02() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 100, 100),
evaluator_kwargs={
"greedy_h_weight": 1.5,
"bend_penalty": 50.0,
"sbend_penalty": 150.0,
},
context_kwargs={
"bend_radii": [10.0],
"sbend_radii": [10.0],
},
pathfinder_kwargs={"base_congestion_penalty": 1000.0},
)
netlist = {
"horizontal": (Port(10, 50, 0), Port(90, 50, 0)),
"vertical_up": (Port(45, 10, 90), Port(45, 90, 90)),
"vertical_down": (Port(55, 90, 270), Port(55, 10, 270)),
}
widths = {net_id: 2.0 for net_id in netlist}
t0 = perf_counter()
results = pathfinder.route_all(netlist, widths)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_03() -> ScenarioOutcome:
engine, _, _, _, pathfinder = _build_router(bounds=(0, -50, 100, 50), context_kwargs={"bend_radii": [10.0]})
t0 = perf_counter()
results_a = pathfinder.route_all({"netA": (Port(10, 0, 0), Port(90, 0, 0))}, {"netA": 2.0})
engine.lock_net("netA")
results_b = pathfinder.route_all({"netB": (Port(50, -20, 90), Port(50, 20, 90))}, {"netB": 2.0})
t1 = perf_counter()
return _summarize({**results_a, **results_b}, t1 - t0)
def run_example_04() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 100, 100),
evaluator_kwargs={
"unit_length_cost": 1.0,
"bend_penalty": 10.0,
"sbend_penalty": 20.0,
},
context_kwargs={
"node_limit": 50000,
"bend_radii": [10.0, 30.0],
"sbend_offsets": [5.0],
"bend_penalty": 10.0,
"sbend_penalty": 20.0,
},
)
netlist = {
"sbend_only": (Port(10, 50, 0), Port(60, 55, 0)),
"multi_radii": (Port(10, 10, 0), Port(90, 90, 0)),
}
widths = {"sbend_only": 2.0, "multi_radii": 2.0}
t0 = perf_counter()
results = pathfinder.route_all(netlist, widths)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_05() -> ScenarioOutcome:
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 200, 200),
evaluator_kwargs={"bend_penalty": 50.0},
context_kwargs={"bend_radii": [20.0]},
)
netlist = {
"u_turn": (Port(50, 50, 0), Port(50, 70, 180)),
"loop": (Port(100, 100, 90), Port(100, 80, 270)),
"zig_zag": (Port(20, 150, 0), Port(180, 150, 0)),
}
widths = {net_id: 2.0 for net_id in netlist}
t0 = perf_counter()
results = pathfinder.route_all(netlist, widths)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_06() -> ScenarioOutcome:
bounds = (-20, -20, 170, 170)
obstacles = [
box(40, 110, 60, 130),
box(40, 60, 60, 80),
box(40, 10, 60, 30),
]
engine = CollisionEngine(clearance=2.0)
for obstacle in obstacles:
engine.add_static_obstacle(obstacle)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute(obstacles)
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
contexts = [
AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="arc"),
AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="bbox"),
AStarContext(evaluator, bend_radii=[10.0], bend_collision_type="clipped_bbox", bend_clip_margin=1.0),
]
netlists = [
{"arc_model": (Port(10, 120, 0), Port(90, 140, 90))},
{"bbox_model": (Port(10, 70, 0), Port(90, 90, 90))},
{"clipped_model": (Port(10, 20, 0), Port(90, 40, 90))},
]
widths = [
{"arc_model": 2.0},
{"bbox_model": 2.0},
{"clipped_model": 2.0},
]
t0 = perf_counter()
combined_results: dict[str, RoutingResult] = {}
for context, netlist, net_widths in zip(contexts, netlists, widths, strict=True):
pathfinder = PathFinder(context, use_tiered_strategy=False)
combined_results.update(pathfinder.route_all(netlist, net_widths))
t1 = perf_counter()
return _summarize(combined_results, t1 - t0)
def run_example_07() -> ScenarioOutcome:
bounds = (0, 0, 1000, 1000)
obstacles = [
box(450, 0, 550, 400),
box(450, 600, 550, 1000),
]
_, evaluator, _, metrics, pathfinder = _build_router(
bounds=bounds,
clearance=6.0,
obstacles=obstacles,
evaluator_kwargs={
"greedy_h_weight": 1.5,
"unit_length_cost": 0.1,
"bend_penalty": 100.0,
"sbend_penalty": 400.0,
"congestion_penalty": 100.0,
},
context_kwargs={
"node_limit": 2000000,
"bend_radii": [50.0],
"sbend_radii": [50.0],
},
pathfinder_kwargs={
"max_iterations": 15,
"base_congestion_penalty": 100.0,
"congestion_multiplier": 1.4,
},
)
num_nets = 10
start_x = 50
start_y_base = 500 - (num_nets * 10.0) / 2.0
end_x = 950
end_y_base = 100
end_y_pitch = 800.0 / (num_nets - 1)
netlist = {}
for index in range(num_nets):
sy = int(round(start_y_base + index * 10.0))
ey = int(round(end_y_base + index * end_y_pitch))
netlist[f"net_{index:02d}"] = (Port(start_x, sy, 0), Port(end_x, ey, 0))
def iteration_callback(idx: int, current_results: dict[str, RoutingResult]) -> None:
new_greedy = max(1.1, 1.5 - ((idx + 1) / 10.0) * 0.4)
evaluator.greedy_h_weight = new_greedy
metrics.reset_per_route()
t0 = perf_counter()
results = pathfinder.route_all(
netlist,
dict.fromkeys(netlist, 2.0),
store_expanded=True,
iteration_callback=iteration_callback,
shuffle_nets=True,
seed=42,
)
t1 = perf_counter()
return _summarize(results, t1 - t0)
def run_example_08() -> ScenarioOutcome:
bounds = (0, 0, 150, 150)
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=50.0, sbend_penalty=150.0)
metrics = AStarMetrics()
netlist = {"custom_bend": (Port(20, 20, 0), Port(100, 100, 90))}
widths = {"custom_bend": 2.0}
context_std = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[])
context_custom = AStarContext(
evaluator,
bend_radii=[10.0],
bend_collision_type=Polygon([(-10, -10), (10, -10), (10, 10), (-10, 10)]),
sbend_radii=[],
)
t0 = perf_counter()
results_std = PathFinder(context_std, metrics).route_all(netlist, widths)
results_custom = PathFinder(context_custom, AStarMetrics(), use_tiered_strategy=False).route_all(
{"custom_model": netlist["custom_bend"]},
{"custom_model": 2.0},
)
t1 = perf_counter()
return _summarize({**results_std, **results_custom}, t1 - t0)
def run_example_09() -> ScenarioOutcome:
obstacles = [
box(35, 35, 45, 65),
box(55, 35, 65, 65),
]
_, _, _, _, pathfinder = _build_router(
bounds=(0, 0, 100, 100),
obstacles=obstacles,
evaluator_kwargs={"bend_penalty": 50.0, "sbend_penalty": 150.0},
context_kwargs={"node_limit": 3, "bend_radii": [10.0]},
pathfinder_kwargs={"warm_start": None},
)
netlist = {"budget_limited_net": (Port(10, 50, 0), Port(85, 60, 180))}
t0 = perf_counter()
results = pathfinder.route_all(netlist, {"budget_limited_net": 2.0})
t1 = perf_counter()
return _summarize(results, t1 - t0)
SCENARIOS: tuple[ScenarioDefinition, ...] = (
ScenarioDefinition("example_01_simple_route", run_example_01),
ScenarioDefinition("example_02_congestion_resolution", run_example_02),
ScenarioDefinition("example_03_locked_paths", run_example_03),
ScenarioDefinition("example_04_sbends_and_radii", run_example_04),
ScenarioDefinition("example_05_orientation_stress", run_example_05),
ScenarioDefinition("example_06_bend_collision_models", run_example_06),
ScenarioDefinition("example_07_large_scale_routing", run_example_07),
ScenarioDefinition("example_08_custom_bend_geometry", run_example_08),
ScenarioDefinition("example_09_unroutable_best_effort", run_example_09),
)

View file

@ -1,8 +1,6 @@
import pytest import pytest
from shapely.geometry import Polygon from shapely.geometry import Polygon
import inire.router.astar as astar_module
from inire.geometry.components import SBend, Straight
from inire.geometry.collision import CollisionEngine from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port from inire.geometry.primitives import Port
from inire.router.astar import AStarContext, route_astar from inire.router.astar import AStarContext, route_astar
@ -21,7 +19,7 @@ def basic_evaluator() -> CostEvaluator:
def test_astar_straight(basic_evaluator: CostEvaluator) -> None: def test_astar_straight(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator) context = AStarContext(basic_evaluator, snap_size=1.0)
start = Port(0, 0, 0) start = Port(0, 0, 0)
target = Port(50, 0, 0) target = Port(50, 0, 0)
path = route_astar(start, target, net_width=2.0, context=context) path = route_astar(start, target, net_width=2.0, context=context)
@ -37,7 +35,7 @@ def test_astar_straight(basic_evaluator: CostEvaluator) -> None:
def test_astar_bend(basic_evaluator: CostEvaluator) -> None: def test_astar_bend(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, bend_radii=[10.0]) context = AStarContext(basic_evaluator, snap_size=1.0, bend_radii=[10.0])
start = Port(0, 0, 0) start = Port(0, 0, 0)
# 20um right, 20um up. Needs a 10um bend and a 10um bend. # 20um right, 20um up. Needs a 10um bend and a 10um bend.
target = Port(20, 20, 0) target = Port(20, 20, 0)
@ -58,7 +56,7 @@ def test_astar_obstacle(basic_evaluator: CostEvaluator) -> None:
basic_evaluator.collision_engine.add_static_obstacle(obstacle) basic_evaluator.collision_engine.add_static_obstacle(obstacle)
basic_evaluator.danger_map.precompute([obstacle]) basic_evaluator.danger_map.precompute([obstacle])
context = AStarContext(basic_evaluator, bend_radii=[10.0], node_limit=1000000) context = AStarContext(basic_evaluator, snap_size=1.0, bend_radii=[10.0], node_limit=1000000)
start = Port(0, 0, 0) start = Port(0, 0, 0)
target = Port(60, 0, 0) target = Port(60, 0, 0)
path = route_astar(start, target, net_width=2.0, context=context) path = route_astar(start, target, net_width=2.0, context=context)
@ -72,218 +70,20 @@ def test_astar_obstacle(basic_evaluator: CostEvaluator) -> None:
assert validation["total_length"] > 50.0 assert validation["total_length"] > 50.0
def test_astar_uses_integerized_ports(basic_evaluator: CostEvaluator) -> None: def test_astar_snap_to_target_lookahead(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator) context = AStarContext(basic_evaluator, snap_size=1.0)
# Target is NOT on 1um grid
start = Port(0, 0, 0) start = Port(0, 0, 0)
target = Port(10.1, 0, 0) target = Port(10.1, 0, 0)
path = route_astar(start, target, net_width=2.0, context=context) path = route_astar(start, target, net_width=2.0, context=context)
assert path is not None assert path is not None
result = RoutingResult(net_id="test", path=path, is_valid=True, collisions=0) result = RoutingResult(net_id="test", path=path, is_valid=True, collisions=0)
assert target.x == 10
validation = validate_routing_result(result, [], clearance=2.0, expected_start=start, expected_end=target) # Under the new Enforce Grid policy, the router snaps the target internally to 10.0.
# We validate against the snapped target.
from inire.geometry.components import snap_search_grid
target_snapped = Port(snap_search_grid(target.x, 1.0), snap_search_grid(target.y, 1.0), target.orientation, snap=False)
validation = validate_routing_result(result, [], clearance=2.0, expected_start=start, expected_end=target_snapped)
assert validation["is_valid"], f"Validation failed: {validation.get('reason')}" assert validation["is_valid"], f"Validation failed: {validation.get('reason')}"
def test_expand_moves_only_shortens_consecutive_straights(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(basic_evaluator, min_straight_length=5.0, max_straight_length=100.0)
prev_result = Straight.generate(Port(0, 0, 0), 20.0, width=2.0, dilation=1.0)
current = astar_module.AStarNode(
prev_result.end_port,
g_cost=prev_result.length,
h_cost=0.0,
component_result=prev_result,
)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(80, 0, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = [params[0] for move_class, params in emitted if move_class == "S"]
assert straight_lengths
assert all(length < prev_result.length for length in straight_lengths)
def test_expand_moves_does_not_chain_sbends(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(basic_evaluator, sbend_radii=[10.0], sbend_offsets=[5.0], max_straight_length=100.0)
prev_result = SBend.generate(Port(0, 0, 0), 5.0, 10.0, width=2.0, dilation=1.0)
current = astar_module.AStarNode(
prev_result.end_port,
g_cost=prev_result.length,
h_cost=0.0,
component_result=prev_result,
)
emitted: list[str] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append(args[9])
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(60, 10, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
assert "SB" not in emitted
assert emitted
def test_expand_moves_adds_sbend_aligned_straight_stop_points(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(
basic_evaluator,
bend_radii=[10.0],
sbend_radii=[10.0],
max_straight_length=150.0,
)
current = astar_module.AStarNode(Port(0, 0, 0), g_cost=0.0, h_cost=0.0)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(100, 10, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = {params[0] for move_class, params in emitted if move_class == "S"}
sbend_span = astar_module._sbend_forward_span(10.0, 10.0)
assert sbend_span is not None
assert int(round(100.0 - sbend_span)) in straight_lengths
assert int(round(100.0 - 2.0 * sbend_span)) in straight_lengths
def test_expand_moves_adds_exact_corner_visibility_stop_points(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
context = AStarContext(
basic_evaluator,
bend_radii=[10.0],
max_straight_length=150.0,
visibility_guidance="exact_corner",
)
current = astar_module.AStarNode(Port(0, 0, 0), g_cost=0.0, h_cost=0.0)
monkeypatch.setattr(
astar_module.VisibilityManager,
"get_corner_visibility",
lambda self, origin, max_dist=0.0: [(40.0, 10.0, 41.23), (75.0, -15.0, 76.48)],
)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(120, 20, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = {params[0] for move_class, params in emitted if move_class == "S"}
assert 40 in straight_lengths
assert 75 in straight_lengths
def test_expand_moves_adds_tangent_corner_visibility_stop_points(
basic_evaluator: CostEvaluator,
monkeypatch: pytest.MonkeyPatch,
) -> None:
class DummyCornerIndex:
def intersection(self, bounds: tuple[float, float, float, float]) -> list[int]:
return [0, 1]
context = AStarContext(
basic_evaluator,
bend_radii=[10.0],
sbend_radii=[],
max_straight_length=150.0,
visibility_guidance="tangent_corner",
)
current = astar_module.AStarNode(Port(0, 0, 0), g_cost=0.0, h_cost=0.0)
monkeypatch.setattr(astar_module.VisibilityManager, "_ensure_current", lambda self: None)
context.visibility_manager.corners = [(50.0, 10.0), (80.0, -10.0)]
context.visibility_manager.corner_index = DummyCornerIndex()
monkeypatch.setattr(
type(context.cost_evaluator.collision_engine),
"ray_cast",
lambda self, origin, angle_deg, max_dist=2000.0, net_width=None: max_dist,
)
emitted: list[tuple[str, tuple]] = []
def fake_process_move(*args, **kwargs) -> None:
emitted.append((args[9], args[10]))
monkeypatch.setattr(astar_module, "process_move", fake_process_move)
astar_module.expand_moves(
current,
Port(120, 20, 0),
net_width=2.0,
net_id="test",
open_set=[],
closed_set={},
context=context,
metrics=astar_module.AStarMetrics(),
congestion_cache={},
)
straight_lengths = {params[0] for move_class, params in emitted if move_class == "S"}
assert 40 in straight_lengths
assert 70 in straight_lengths

View file

@ -1,92 +0,0 @@
import pytest
import numpy
from shapely.geometry import Polygon
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.geometry.components import Straight
from inire.router.cost import CostEvaluator
from inire.router.danger_map import DangerMap
from inire.router.astar import AStarContext
from inire.router.pathfinder import PathFinder, RoutingResult
def test_clearance_thresholds():
"""
Check that clearance is correctly calculated:
two paths slightly beyond, exactly at, and slightly violating.
"""
# Clearance = 2.0, Width = 2.0
# Required Centerline-to-Centerline = (2+2)/2 + 2.0 = 4.0
ce = CollisionEngine(clearance=2.0)
# Net 1: Centerline at y=0
p1 = Port(0, 0, 0)
res1 = Straight.generate(p1, 50.0, width=2.0, dilation=1.0)
ce.add_path("net1", res1.geometry, dilated_geometry=res1.dilated_geometry)
# Net 2: Parallel to Net 1
# 1. Beyond minimum spacing: y=5. Gap = 5 - 2 = 3 > 2. OK.
p2_ok = Port(0, 5, 0)
res2_ok = Straight.generate(p2_ok, 50.0, width=2.0, dilation=1.0)
is_v, count = ce.verify_path("net2", [res2_ok])
assert is_v, f"Gap 3 should be valid, but got {count} collisions"
# 2. Exactly at: y=4.0. Gap = 4.0 - 2.0 = 2.0. OK.
p2_exact = Port(0, 4, 0)
res2_exact = Straight.generate(p2_exact, 50.0, width=2.0, dilation=1.0)
is_v, count = ce.verify_path("net2", [res2_exact])
assert is_v, f"Gap exactly 2.0 should be valid, but got {count} collisions"
# 3. Slightly violating: y=3.999. Gap = 3.999 - 2.0 = 1.999 < 2.0. FAIL.
p2_fail = Port(0, 3, 0)
res2_fail = Straight.generate(p2_fail, 50.0, width=2.0, dilation=1.0)
is_v, count = ce.verify_path("net2", [res2_fail])
assert not is_v, "Gap 1.999 should be invalid"
assert count > 0
def test_verify_all_nets_cases():
"""
Validate that verify_all_nets catches some common cases and doesn't flag reasonable non-failing cases.
"""
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=(0, 0, 100, 100))
danger_map.precompute([])
evaluator = CostEvaluator(collision_engine=engine, danger_map=danger_map)
context = AStarContext(cost_evaluator=evaluator)
pf = PathFinder(context, warm_start=None, max_iterations=1)
# Case 1: Parallel paths exactly at clearance (Should be VALID)
netlist_parallel_ok = {
"net1": (Port(0, 50, 0), Port(100, 50, 0)),
"net2": (Port(0, 54, 0), Port(100, 54, 0)),
}
net_widths = {"net1": 2.0, "net2": 2.0}
results = pf.route_all(netlist_parallel_ok, net_widths)
assert results["net1"].is_valid, f"Exactly at clearance should be valid, collisions={results['net1'].collisions}"
assert results["net2"].is_valid
# Case 2: Parallel paths slightly within clearance (Should be INVALID)
netlist_parallel_fail = {
"net3": (Port(0, 20, 0), Port(100, 20, 0)),
"net4": (Port(0, 23, 0), Port(100, 23, 0)),
}
# Reset engine
engine.remove_path("net1")
engine.remove_path("net2")
results_p = pf.route_all(netlist_parallel_fail, net_widths)
# verify_all_nets should flag both as invalid because they cross-collide
assert not results_p["net3"].is_valid
assert not results_p["net4"].is_valid
# Case 3: Crossing paths (Should be INVALID)
netlist_cross = {
"net5": (Port(0, 75, 0), Port(100, 75, 0)),
"net6": (Port(50, 0, 90), Port(50, 100, 90)),
}
engine.remove_path("net3")
engine.remove_path("net4")
results_c = pf.route_all(netlist_cross, net_widths)
assert not results_c["net5"].is_valid
assert not results_c["net6"].is_valid

View file

@ -2,7 +2,6 @@ from shapely.geometry import Polygon
from inire.geometry.collision import CollisionEngine from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port from inire.geometry.primitives import Port
from inire.geometry.components import Straight
def test_collision_detection() -> None: def test_collision_detection() -> None:
@ -39,7 +38,7 @@ def test_safety_zone() -> None:
engine.add_static_obstacle(obstacle) engine.add_static_obstacle(obstacle)
# Port exactly on the boundary # Port exactly on the boundary
start_port = Port(10, 12, 0) start_port = Port(10.0, 12.0, 0)
# Move starting from this port that overlaps the obstacle by 1nm # Move starting from this port that overlaps the obstacle by 1nm
# (Inside the 2nm safety zone) # (Inside the 2nm safety zone)
@ -60,46 +59,3 @@ def test_configurable_max_net_width() -> None:
# Dilated test_poly bounds: (14, 19, 17, 26). # Dilated test_poly bounds: (14, 19, 17, 26).
# obstacle: (20, 20, 25, 25). No physical collision. # obstacle: (20, 20, 25, 25). No physical collision.
assert not engine.is_collision(test_poly, net_width=2.0) assert not engine.is_collision(test_poly, net_width=2.0)
def test_ray_cast_width_clearance() -> None:
# Clearance = 2.0um, Width = 2.0um.
# Centerline to obstacle edge must be >= W/2 + C = 1.0 + 2.0 = 3.0um.
engine = CollisionEngine(clearance=2.0)
# Obstacle at x=10 to 20
obstacle = Polygon([(10, 0), (20, 0), (20, 100), (10, 100)])
engine.add_static_obstacle(obstacle)
# 1. Parallel move at x=6. Gap = 10 - 6 = 4.0. Clearly OK.
start_ok = Port(6, 50, 90)
reach_ok = engine.ray_cast(start_ok, 90, max_dist=10.0, net_width=2.0)
assert reach_ok >= 10.0
# 2. Parallel move at x=8. Gap = 10 - 8 = 2.0. COLLISION.
start_fail = Port(8, 50, 90)
reach_fail = engine.ray_cast(start_fail, 90, max_dist=10.0, net_width=2.0)
assert reach_fail < 10.0
def test_check_move_static_clearance() -> None:
engine = CollisionEngine(clearance=2.0)
obstacle = Polygon([(10, 0), (20, 0), (20, 100), (10, 100)])
engine.add_static_obstacle(obstacle)
# Straight move of length 10 at x=8 (Width 2.0)
# Gap = 10 - 8 = 2.0 < 3.0. COLLISION.
start = Port(8, 0, 90)
res = Straight.generate(start, 10.0, width=2.0, dilation=1.0) # dilation = C/2
assert engine.check_move_static(res, start_port=start, net_width=2.0)
# Move at x=7. Gap = 3.0 == minimum. OK.
start_ok = Port(7, 0, 90)
res_ok = Straight.generate(start_ok, 10.0, width=2.0, dilation=1.0)
assert not engine.check_move_static(res_ok, start_port=start_ok, net_width=2.0)
# 3. Same exact-boundary case.
start_exact = Port(7, 0, 90)
res_exact = Straight.generate(start_exact, 10.0, width=2.0, dilation=1.0)
assert not engine.check_move_static(res_exact, start_port=start_exact, net_width=2.0)

View file

@ -8,7 +8,7 @@ def test_straight_generation() -> None:
start = Port(0, 0, 0) start = Port(0, 0, 0)
length = 10.0 length = 10.0
width = 2.0 width = 2.0
result = Straight.generate(start, length, width) result = Straight.generate(start, length, width, snap_size=1.0)
assert result.end_port.x == 10.0 assert result.end_port.x == 10.0
assert result.end_port.y == 0.0 assert result.end_port.y == 0.0
@ -29,13 +29,13 @@ def test_bend90_generation() -> None:
width = 2.0 width = 2.0
# CW bend # CW bend
result_cw = Bend90.generate(start, radius, width, direction="CW") result_cw = Bend90.generate(start, radius, width, direction="CW", snap_size=1.0)
assert result_cw.end_port.x == 10.0 assert result_cw.end_port.x == 10.0
assert result_cw.end_port.y == -10.0 assert result_cw.end_port.y == -10.0
assert result_cw.end_port.orientation == 270.0 assert result_cw.end_port.orientation == 270.0
# CCW bend # CCW bend
result_ccw = Bend90.generate(start, radius, width, direction="CCW") result_ccw = Bend90.generate(start, radius, width, direction="CCW", snap_size=1.0)
assert result_ccw.end_port.x == 10.0 assert result_ccw.end_port.x == 10.0
assert result_ccw.end_port.y == 10.0 assert result_ccw.end_port.y == 10.0
assert result_ccw.end_port.orientation == 90.0 assert result_ccw.end_port.orientation == 90.0
@ -47,7 +47,7 @@ def test_sbend_generation() -> None:
radius = 10.0 radius = 10.0
width = 2.0 width = 2.0
result = SBend.generate(start, offset, radius, width) result = SBend.generate(start, offset, radius, width, snap_size=1.0)
assert result.end_port.y == 5.0 assert result.end_port.y == 5.0
assert result.end_port.orientation == 0.0 assert result.end_port.orientation == 0.0
assert len(result.geometry) == 2 # Optimization: returns individual arcs assert len(result.geometry) == 2 # Optimization: returns individual arcs
@ -57,27 +57,13 @@ def test_sbend_generation() -> None:
SBend.generate(start, 25.0, 10.0, 2.0) SBend.generate(start, 25.0, 10.0, 2.0)
def test_sbend_generation_negative_offset_keeps_second_arc_below_centerline() -> None:
start = Port(0, 0, 0)
offset = -5.0
radius = 10.0
width = 2.0
result = SBend.generate(start, offset, radius, width)
assert result.end_port.y == -5.0
second_arc_minx, second_arc_miny, second_arc_maxx, second_arc_maxy = result.geometry[1].bounds
assert second_arc_maxy <= width / 2.0 + 1e-6
assert second_arc_miny < -width / 2.0
def test_bend_collision_models() -> None: def test_bend_collision_models() -> None:
start = Port(0, 0, 0) start = Port(0, 0, 0)
radius = 10.0 radius = 10.0
width = 2.0 width = 2.0
# 1. BBox model # 1. BBox model
res_bbox = Bend90.generate(start, radius, width, direction="CCW", collision_type="bbox") res_bbox = Bend90.generate(start, radius, width, direction="CCW", collision_type="bbox", snap_size=1.0)
# Arc CCW R=10 from (0,0,0) ends at (10,10,90). # Arc CCW R=10 from (0,0,0) ends at (10,10,90).
# Waveguide width is 2.0, so bbox will be slightly larger than (0,0,10,10) # Waveguide width is 2.0, so bbox will be slightly larger than (0,0,10,10)
minx, miny, maxx, maxy = res_bbox.geometry[0].bounds minx, miny, maxx, maxy = res_bbox.geometry[0].bounds
@ -87,7 +73,7 @@ def test_bend_collision_models() -> None:
assert maxy >= 10.0 - 1e-6 assert maxy >= 10.0 - 1e-6
# 2. Clipped BBox model # 2. Clipped BBox model
res_clipped = Bend90.generate(start, radius, width, direction="CCW", collision_type="clipped_bbox", clip_margin=1.0) res_clipped = Bend90.generate(start, radius, width, direction="CCW", collision_type="clipped_bbox", clip_margin=1.0, snap_size=1.0)
# Area should be less than full bbox # Area should be less than full bbox
assert res_clipped.geometry[0].area < res_bbox.geometry[0].area assert res_clipped.geometry[0].area < res_bbox.geometry[0].area
@ -98,11 +84,11 @@ def test_sbend_collision_models() -> None:
radius = 10.0 radius = 10.0
width = 2.0 width = 2.0
res_bbox = SBend.generate(start, offset, radius, width, collision_type="bbox") res_bbox = SBend.generate(start, offset, radius, width, collision_type="bbox", snap_size=1.0)
# Geometry should be a list of individual bbox polygons for each arc # Geometry should be a list of individual bbox polygons for each arc
assert len(res_bbox.geometry) == 2 assert len(res_bbox.geometry) == 2
res_arc = SBend.generate(start, offset, radius, width, collision_type="arc") res_arc = SBend.generate(start, offset, radius, width, collision_type="arc", snap_size=1.0)
area_bbox = sum(p.area for p in res_bbox.geometry) area_bbox = sum(p.area for p in res_bbox.geometry)
area_arc = sum(p.area for p in res_arc.geometry) area_arc = sum(p.area for p in res_arc.geometry)
assert area_bbox > area_arc assert area_bbox > area_arc
@ -115,7 +101,8 @@ def test_sbend_continuity() -> None:
radius = 20.0 radius = 20.0
width = 1.0 width = 1.0
res = SBend.generate(start, offset, radius, width) # We use snap_size=1.0 so that (10-offset) = 6.0 is EXACTLY hit.
res = SBend.generate(start, offset, radius, width, snap_size=1.0)
# Target orientation should be same as start # Target orientation should be same as start
assert abs(res.end_port.orientation - 90.0) < 1e-6 assert abs(res.end_port.orientation - 90.0) < 1e-6
@ -155,7 +142,7 @@ def test_component_transform_invariance() -> None:
radius = 10.0 radius = 10.0
width = 2.0 width = 2.0
res0 = Bend90.generate(start0, radius, width, direction="CCW") res0 = Bend90.generate(start0, radius, width, direction="CCW", snap_size=1.0)
# Transform: Translate (10, 10) then Rotate 90 # Transform: Translate (10, 10) then Rotate 90
dx, dy = 10.0, 5.0 dx, dy = 10.0, 5.0
@ -166,7 +153,7 @@ def test_component_transform_invariance() -> None:
# 2. Generate at transformed start # 2. Generate at transformed start
start_transformed = rotate_port(translate_port(start0, dx, dy), angle) start_transformed = rotate_port(translate_port(start0, dx, dy), angle)
res_transformed = Bend90.generate(start_transformed, radius, width, direction="CCW") res_transformed = Bend90.generate(start_transformed, radius, width, direction="CCW", snap_size=1.0)
assert abs(res_transformed.end_port.x - p_end_transformed.x) < 1e-6 assert abs(res_transformed.end_port.x - p_end_transformed.x) < 1e-6
assert abs(res_transformed.end_port.y - p_end_transformed.y) < 1e-6 assert abs(res_transformed.end_port.y - p_end_transformed.y) < 1e-6

View file

@ -19,7 +19,7 @@ def basic_evaluator() -> CostEvaluator:
def test_astar_sbend(basic_evaluator: CostEvaluator) -> None: def test_astar_sbend(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, sbend_offsets=[2.0, 5.0]) context = AStarContext(basic_evaluator, snap_size=1.0, sbend_offsets=[2.0, 5.0])
# Start at (0,0), target at (50, 2) -> 2um lateral offset # Start at (0,0), target at (50, 2) -> 2um lateral offset
# This matches one of our discretized SBend offsets. # This matches one of our discretized SBend offsets.
start = Port(0, 0, 0) start = Port(0, 0, 0)
@ -39,7 +39,7 @@ def test_astar_sbend(basic_evaluator: CostEvaluator) -> None:
def test_pathfinder_negotiated_congestion_resolution(basic_evaluator: CostEvaluator) -> None: def test_pathfinder_negotiated_congestion_resolution(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator, bend_radii=[5.0, 10.0]) context = AStarContext(basic_evaluator, snap_size=1.0, bend_radii=[5.0, 10.0])
# Increase base penalty to force detour immediately # Increase base penalty to force detour immediately
pf = PathFinder(context, max_iterations=10, base_congestion_penalty=1000.0) pf = PathFinder(context, max_iterations=10, base_congestion_penalty=1000.0)
@ -59,10 +59,5 @@ def test_pathfinder_negotiated_congestion_resolution(basic_evaluator: CostEvalua
results = pf.route_all(netlist, net_widths) results = pf.route_all(netlist, net_widths)
assert len(results) == 2
assert results["net1"].reached_target
assert results["net2"].reached_target
assert results["net1"].is_valid assert results["net1"].is_valid
assert results["net2"].is_valid assert results["net2"].is_valid
assert results["net1"].collisions == 0
assert results["net2"].collisions == 0

View file

@ -1,4 +1,3 @@
from shapely.geometry import Polygon
from inire.geometry.collision import CollisionEngine from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port from inire.geometry.primitives import Port
from inire.router.cost import CostEvaluator from inire.router.cost import CostEvaluator
@ -38,30 +37,3 @@ def test_cost_calculation() -> None:
# Side check: 2*bp = 20. # Side check: 2*bp = 20.
# Total = 1.1 * (20 + 40) = 66.0 # Total = 1.1 * (20 + 40) = 66.0
assert h_away >= h_90 assert h_away >= h_90
def test_danger_map_kd_tree_and_cache() -> None:
# Test that KD-Tree based danger map works and uses cache
bounds = (0, 0, 1000, 1000)
dm = DangerMap(bounds, resolution=1.0, safety_threshold=10.0)
# Square obstacle at (100, 100) to (110, 110)
obstacle = Polygon([(100, 100), (110, 100), (110, 110), (100, 110)])
dm.precompute([obstacle])
# 1. High cost near boundary
cost_near = dm.get_cost(100.5, 100.5)
assert cost_near > 1.0
# 2. Zero cost far away
cost_far = dm.get_cost(500, 500)
assert cost_far == 0.0
# 3. Check cache usage (internal detail check)
# We can check if calling it again is fast or just verify it returns same result
cost_near_2 = dm.get_cost(100.5, 100.5)
assert cost_near_2 == cost_near
# 4. Out of bounds
assert dm.get_cost(-1, -1) >= 1e12

View file

@ -1,63 +0,0 @@
from __future__ import annotations
import os
import statistics
import pytest
from inire.tests.example_scenarios import SCENARIOS, ScenarioDefinition, ScenarioOutcome
RUN_PERFORMANCE = os.environ.get("INIRE_RUN_PERFORMANCE") == "1"
PERFORMANCE_REPEATS = 3
REGRESSION_FACTOR = 1.5
# Baselines are measured from the current code path without plotting.
BASELINE_SECONDS = {
"example_01_simple_route": 0.0035,
"example_02_congestion_resolution": 0.2666,
"example_03_locked_paths": 0.2304,
"example_04_sbends_and_radii": 1.8734,
"example_05_orientation_stress": 0.5630,
"example_06_bend_collision_models": 5.2382,
"example_07_large_scale_routing": 1.2081,
"example_08_custom_bend_geometry": 4.2111,
"example_09_unroutable_best_effort": 0.0056,
}
EXPECTED_OUTCOMES = {
"example_01_simple_route": {"total_results": 1, "valid_results": 1, "reached_targets": 1},
"example_02_congestion_resolution": {"total_results": 3, "valid_results": 3, "reached_targets": 3},
"example_03_locked_paths": {"total_results": 2, "valid_results": 2, "reached_targets": 2},
"example_04_sbends_and_radii": {"total_results": 2, "valid_results": 2, "reached_targets": 2},
"example_05_orientation_stress": {"total_results": 3, "valid_results": 3, "reached_targets": 3},
"example_06_bend_collision_models": {"total_results": 3, "valid_results": 3, "reached_targets": 3},
"example_07_large_scale_routing": {"total_results": 10, "valid_results": 10, "reached_targets": 10},
"example_08_custom_bend_geometry": {"total_results": 2, "valid_results": 1, "reached_targets": 2},
"example_09_unroutable_best_effort": {"total_results": 1, "valid_results": 0, "reached_targets": 0},
}
def _assert_expected_outcome(name: str, outcome: ScenarioOutcome) -> None:
expected = EXPECTED_OUTCOMES[name]
assert outcome.total_results == expected["total_results"]
assert outcome.valid_results == expected["valid_results"]
assert outcome.reached_targets == expected["reached_targets"]
@pytest.mark.performance
@pytest.mark.skipif(not RUN_PERFORMANCE, reason="set INIRE_RUN_PERFORMANCE=1 to run runtime regression checks")
@pytest.mark.parametrize("scenario", SCENARIOS, ids=[scenario.name for scenario in SCENARIOS])
def test_example_like_runtime_regression(scenario: ScenarioDefinition) -> None:
timings = []
for _ in range(PERFORMANCE_REPEATS):
outcome = scenario.run()
_assert_expected_outcome(scenario.name, outcome)
timings.append(outcome.duration_s)
median_runtime = statistics.median(timings)
assert median_runtime <= BASELINE_SECONDS[scenario.name] * REGRESSION_FACTOR, (
f"{scenario.name} median runtime {median_runtime:.4f}s exceeded "
f"{REGRESSION_FACTOR:.1f}x baseline {BASELINE_SECONDS[scenario.name]:.4f}s "
f"from timings {timings!r}"
)

View file

@ -2,13 +2,15 @@ from typing import Any
import pytest import pytest
from hypothesis import given, settings, strategies as st from hypothesis import given, settings, strategies as st
from shapely.geometry import Point, Polygon from shapely.geometry import Polygon
from inire.geometry.collision import CollisionEngine from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port from inire.geometry.primitives import Port
from inire.router.astar import AStarContext, route_astar from inire.router.astar import AStarContext, route_astar
from inire.router.cost import CostEvaluator from inire.router.cost import CostEvaluator
from inire.router.danger_map import DangerMap from inire.router.danger_map import DangerMap
from inire.router.pathfinder import RoutingResult
from inire.utils.validation import validate_routing_result
@st.composite @st.composite
@ -28,17 +30,9 @@ def random_port(draw: Any) -> Port:
return Port(x, y, orientation) return Port(x, y, orientation)
def _port_has_required_clearance(port: Port, obstacles: list[Polygon], clearance: float, net_width: float) -> bool:
point = Point(float(port.x), float(port.y))
required_gap = (net_width / 2.0) + clearance
return all(point.distance(obstacle) >= required_gap for obstacle in obstacles)
@settings(max_examples=3, deadline=None) @settings(max_examples=3, deadline=None)
@given(obstacles=st.lists(random_obstacle(), min_size=0, max_size=3), start=random_port(), target=random_port()) @given(obstacles=st.lists(random_obstacle(), min_size=0, max_size=3), start=random_port(), target=random_port())
def test_fuzz_astar_no_crash(obstacles: list[Polygon], start: Port, target: Port) -> None: def test_fuzz_astar_no_crash(obstacles: list[Polygon], start: Port, target: Port) -> None:
net_width = 2.0
clearance = 2.0
engine = CollisionEngine(clearance=2.0) engine = CollisionEngine(clearance=2.0)
for obs in obstacles: for obs in obstacles:
engine.add_static_obstacle(obs) engine.add_static_obstacle(obs)
@ -54,14 +48,17 @@ def test_fuzz_astar_no_crash(obstacles: list[Polygon], start: Port, target: Port
try: try:
path = route_astar(start, target, net_width=2.0, context=context) path = route_astar(start, target, net_width=2.0, context=context)
# This is a crash-smoke test rather than a full correctness proof. # Analytic Correctness: if path is returned, verify it's collision-free
# If a full path is returned, it should at least terminate at the requested target. if path:
endpoints_are_clear = ( result = RoutingResult(net_id="default", path=path, is_valid=True, collisions=0)
_port_has_required_clearance(start, obstacles, clearance, net_width) validation = validate_routing_result(
and _port_has_required_clearance(target, obstacles, clearance, net_width) result,
obstacles,
clearance=2.0,
expected_start=start,
expected_end=target,
) )
if path and endpoints_are_clear: assert validation["is_valid"], f"Validation failed: {validation.get('reason')}"
assert path[-1].end_port == target
except Exception as e: except Exception as e:
# Unexpected exceptions are failures # Unexpected exceptions are failures

View file

@ -33,86 +33,3 @@ def test_pathfinder_parallel(basic_evaluator: CostEvaluator) -> None:
assert results["net2"].is_valid assert results["net2"].is_valid
assert results["net1"].collisions == 0 assert results["net1"].collisions == 0
assert results["net2"].collisions == 0 assert results["net2"].collisions == 0
def test_pathfinder_crossing_detection(basic_evaluator: CostEvaluator) -> None:
context = AStarContext(basic_evaluator)
# Force a crossing by setting low iterations and low penalty
pf = PathFinder(context, max_iterations=1, base_congestion_penalty=1.0, warm_start=None)
# Net 1: (0, 25) -> (100, 25) Horizontal
# Net 2: (50, 0) -> (50, 50) Vertical
netlist = {
"net1": (Port(0, 25, 0), Port(100, 25, 0)),
"net2": (Port(50, 0, 90), Port(50, 50, 90)),
}
net_widths = {"net1": 2.0, "net2": 2.0}
results = pf.route_all(netlist, net_widths)
# Both should be invalid because they cross
assert not results["net1"].is_valid
assert not results["net2"].is_valid
assert results["net1"].collisions > 0
assert results["net2"].collisions > 0
def test_pathfinder_refine_paths_reduces_locked_detour_bends() -> None:
bounds = (0, -50, 100, 50)
def build_pathfinder(*, refine_paths: bool) -> tuple[CollisionEngine, PathFinder]:
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, bend_penalty=250.0, sbend_penalty=500.0)
context = AStarContext(evaluator, bend_radii=[10.0])
return engine, PathFinder(context, refine_paths=refine_paths)
base_engine, base_pf = build_pathfinder(refine_paths=False)
base_pf.route_all({"netA": (Port(10, 0, 0), Port(90, 0, 0))}, {"netA": 2.0})
base_engine.lock_net("netA")
base_result = base_pf.route_all({"netB": (Port(50, -20, 90), Port(50, 20, 90))}, {"netB": 2.0})["netB"]
refined_engine, refined_pf = build_pathfinder(refine_paths=True)
refined_pf.route_all({"netA": (Port(10, 0, 0), Port(90, 0, 0))}, {"netA": 2.0})
refined_engine.lock_net("netA")
refined_result = refined_pf.route_all({"netB": (Port(50, -20, 90), Port(50, 20, 90))}, {"netB": 2.0})["netB"]
base_bends = sum(1 for comp in base_result.path if comp.move_type == "Bend90")
refined_bends = sum(1 for comp in refined_result.path if comp.move_type == "Bend90")
assert base_result.is_valid
assert refined_result.is_valid
assert refined_bends < base_bends
assert refined_pf._path_cost(refined_result.path) < base_pf._path_cost(base_result.path)
def test_pathfinder_refine_paths_simplifies_triple_crossing_detours() -> None:
bounds = (0, 0, 100, 100)
netlist = {
"horizontal": (Port(10, 50, 0), Port(90, 50, 0)),
"vertical_up": (Port(45, 10, 90), Port(45, 90, 90)),
"vertical_down": (Port(55, 90, 270), Port(55, 10, 270)),
}
net_widths = {net_id: 2.0 for net_id in netlist}
def build_pathfinder(*, refine_paths: bool) -> PathFinder:
engine = CollisionEngine(clearance=2.0)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute([])
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, bend_penalty=250.0, sbend_penalty=500.0)
context = AStarContext(evaluator, bend_radii=[10.0], sbend_radii=[10.0])
return PathFinder(context, base_congestion_penalty=1000.0, refine_paths=refine_paths)
base_results = build_pathfinder(refine_paths=False).route_all(netlist, net_widths)
refined_results = build_pathfinder(refine_paths=True).route_all(netlist, net_widths)
for net_id in ("vertical_up", "vertical_down"):
base_result = base_results[net_id]
refined_result = refined_results[net_id]
base_bends = sum(1 for comp in base_result.path if comp.move_type == "Bend90")
refined_bends = sum(1 for comp in refined_result.path if comp.move_type == "Bend90")
assert base_result.is_valid
assert refined_result.is_valid
assert refined_bends < base_bends

View file

@ -15,8 +15,8 @@ def port_strategy(draw: Any) -> Port:
def test_port_snapping() -> None: def test_port_snapping() -> None:
p = Port(0.123456, 0.654321, 90) p = Port(0.123456, 0.654321, 90)
assert p.x == 0 assert p.x == 0.123
assert p.y == 1 assert p.y == 0.654
@given(p=port_strategy()) @given(p=port_strategy())
@ -38,13 +38,14 @@ def test_port_transform_invariants(p: Port) -> None:
) )
def test_translate_snapping(p: Port, dx: float, dy: float) -> None: def test_translate_snapping(p: Port, dx: float, dy: float) -> None:
p_trans = translate_port(p, dx, dy) p_trans = translate_port(p, dx, dy)
assert isinstance(p_trans.x, int) # Check that snapped result is indeed multiple of GRID_SNAP_UM (0.001 um = 1nm)
assert isinstance(p_trans.y, int) assert abs(p_trans.x * 1000 - round(p_trans.x * 1000)) < 1e-6
assert abs(p_trans.y * 1000 - round(p_trans.y * 1000)) < 1e-6
def test_orientation_normalization() -> None: def test_orientation_normalization() -> None:
p = Port(0, 0, 360) p = Port(0, 0, 360)
assert p.orientation == 0 assert p.orientation == 0.0
p2 = Port(0, 0, -90) p2 = Port(0, 0, -90)
assert p2.orientation == 270 assert p2.orientation == 270.0

View file

@ -3,49 +3,64 @@ from inire.geometry.primitives import Port
from inire.router.astar import route_astar, AStarContext from inire.router.astar import route_astar, AStarContext
from inire.router.cost import CostEvaluator from inire.router.cost import CostEvaluator
from inire.geometry.collision import CollisionEngine from inire.geometry.collision import CollisionEngine
from inire.geometry.components import snap_search_grid
class TestVariableGrid(unittest.TestCase):
class TestIntegerPorts(unittest.TestCase):
def setUp(self): def setUp(self):
self.ce = CollisionEngine(clearance=2.0) self.ce = CollisionEngine(clearance=2.0)
self.cost = CostEvaluator(self.ce) self.cost = CostEvaluator(self.ce)
def test_route_reaches_integer_target(self): def test_grid_1_0(self):
context = AStarContext(self.cost) """ Test routing with a 1.0um grid. """
start = Port(0, 0, 0) context = AStarContext(self.cost, snap_size=1.0)
target = Port(12, 0, 0)
path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path)
last_port = path[-1].end_port
self.assertEqual(last_port.x, 12)
self.assertEqual(last_port.y, 0)
self.assertEqual(last_port.r, 0)
def test_port_constructor_rounds_to_integer_lattice(self):
context = AStarContext(self.cost)
start = Port(0.0, 0.0, 0.0) start = Port(0.0, 0.0, 0.0)
target = Port(12.3, 0.0, 0.0) # 12.3 should snap to 12.0 on a 1.0um grid
target = Port(12.3, 0.0, 0.0, snap=False)
path = route_astar(start, target, net_width=1.0, context=context) path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path) self.assertIsNotNone(path)
self.assertEqual(target.x, 12)
last_port = path[-1].end_port last_port = path[-1].end_port
self.assertEqual(last_port.x, 12) self.assertEqual(last_port.x, 12.0)
def test_half_step_inputs_use_integerized_targets(self): # Verify component relative grid coordinates
context = AStarContext(self.cost) # rel_gx = round(x / snap)
# For x=12.0, snap=1.0 -> rel_gx=12
self.assertEqual(path[-1].rel_gx, 12)
def test_grid_2_5(self):
""" Test routing with a 2.5um grid. """
context = AStarContext(self.cost, snap_size=2.5)
start = Port(0.0, 0.0, 0.0) start = Port(0.0, 0.0, 0.0)
target = Port(7.5, 0.0, 0.0) # 7.5 is a multiple of 2.5, should be reached exactly
target = Port(7.5, 0.0, 0.0, snap=False)
path = route_astar(start, target, net_width=1.0, context=context) path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path) self.assertIsNotNone(path)
self.assertEqual(target.x, 8)
last_port = path[-1].end_port last_port = path[-1].end_port
self.assertEqual(last_port.x, 8) self.assertEqual(last_port.x, 7.5)
# rel_gx = 7.5 / 2.5 = 3
self.assertEqual(path[-1].rel_gx, 3)
def test_grid_10_0(self):
""" Test routing with a large 10.0um grid. """
context = AStarContext(self.cost, snap_size=10.0)
start = Port(0.0, 0.0, 0.0)
# 15.0 should snap to 20.0 (ties usually round to even or nearest,
# but 15.0 is exactly between 10 and 20.
# snap_search_grid uses round(val/snap)*snap. round(1.5) is 2 in Python 3.
target = Port(15.0, 0.0, 0.0, snap=False)
path = route_astar(start, target, net_width=1.0, context=context)
self.assertIsNotNone(path)
last_port = path[-1].end_port
self.assertEqual(last_port.x, 20.0)
# rel_gx = 20.0 / 10.0 = 2
self.assertEqual(path[-1].rel_gx, 2)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()

View file

@ -17,7 +17,7 @@ def validate_routing_result(
clearance: float, clearance: float,
expected_start: Port | None = None, expected_start: Port | None = None,
expected_end: Port | None = None, expected_end: Port | None = None,
) -> dict[str, Any]: ) -> dict[str, Any]:
""" """
Perform a high-precision validation of a routed path. Perform a high-precision validation of a routed path.
@ -47,11 +47,11 @@ def validate_routing_result(
# Boundary check # Boundary check
if expected_end: if expected_end:
last_port = result.path[-1].end_port last_port = result.path[-1].end_port
dist_to_end = numpy.sqrt(((last_port[:2] - expected_end[:2])**2).sum()) dist_to_end = numpy.sqrt((last_port.x - expected_end.x)**2 + (last_port.y - expected_end.y)**2)
if dist_to_end > 0.005: if dist_to_end > 0.005:
connectivity_errors.append(f"Final port position mismatch: {dist_to_end*1000:.2f}nm") connectivity_errors.append(f"Final port position mismatch: {dist_to_end*1000:.2f}nm")
if abs(last_port[2] - expected_end[2]) > 0.1: if abs(last_port.orientation - expected_end.orientation) > 0.1:
connectivity_errors.append(f"Final port orientation mismatch: {last_port[2]} vs {expected_end[2]}") connectivity_errors.append(f"Final port orientation mismatch: {last_port.orientation} vs {expected_end.orientation}")
# 2. Geometry Buffering # 2. Geometry Buffering
dilation_half = clearance / 2.0 dilation_half = clearance / 2.0

View file

@ -99,8 +99,8 @@ def plot_routing_results(
if netlist: if netlist:
for net_id, (start_p, target_p) in netlist.items(): for net_id, (start_p, target_p) in netlist.items():
for p in [start_p, target_p]: for p in [start_p, target_p]:
rad = numpy.radians(p[2]) rad = numpy.radians(p.orientation)
ax.quiver(*p[:2], numpy.cos(rad), numpy.sin(rad), color="black", ax.quiver(p.x, p.y, numpy.cos(rad), numpy.sin(rad), color="black",
scale=25, width=0.004, pivot="tail", zorder=6) scale=25, width=0.004, pivot="tail", zorder=6)
ax.set_xlim(bounds[0], bounds[2]) ax.set_xlim(bounds[0], bounds[2])
@ -121,7 +121,6 @@ def plot_routing_results(
def plot_danger_map( def plot_danger_map(
danger_map: DangerMap, danger_map: DangerMap,
ax: Axes | None = None, ax: Axes | None = None,
resolution: float | None = None
) -> tuple[Figure, Axes]: ) -> tuple[Figure, Axes]:
""" """
Plot the pre-computed danger map as a heatmap. Plot the pre-computed danger map as a heatmap.
@ -131,30 +130,10 @@ def plot_danger_map(
else: else:
fig = ax.get_figure() fig = ax.get_figure()
# Generate a temporary grid for visualization
res = resolution if resolution is not None else max(1.0, (danger_map.maxx - danger_map.minx) / 200.0)
x_coords = numpy.arange(danger_map.minx + res/2, danger_map.maxx, res)
y_coords = numpy.arange(danger_map.miny + res/2, danger_map.maxy, res)
xv, yv = numpy.meshgrid(x_coords, y_coords, indexing='ij')
if danger_map.tree is not None:
points = numpy.stack([xv.ravel(), yv.ravel()], axis=1)
dists, _ = danger_map.tree.query(points, distance_upper_bound=danger_map.safety_threshold)
# Apply cost function
safe_dists = numpy.maximum(dists, 0.1)
grid_flat = numpy.where(
dists < danger_map.safety_threshold,
danger_map.k / (safe_dists**2),
0.0
)
grid = grid_flat.reshape(xv.shape)
else:
grid = numpy.zeros(xv.shape)
# Need to transpose because grid is [x, y] and imshow expects [row, col] (y, x) # Need to transpose because grid is [x, y] and imshow expects [row, col] (y, x)
# Also origin='lower' to match coordinates
im = ax.imshow( im = ax.imshow(
grid.T, danger_map.grid.T,
origin='lower', origin='lower',
extent=[danger_map.minx, danger_map.maxx, danger_map.miny, danger_map.maxy], extent=[danger_map.minx, danger_map.maxx, danger_map.miny, danger_map.maxy],
cmap='YlOrRd', cmap='YlOrRd',

View file

@ -77,6 +77,4 @@ lint.ignore = [
[tool.pytest.ini_options] [tool.pytest.ini_options]
addopts = "-rsXx" addopts = "-rsXx"
testpaths = ["inire"] testpaths = ["inire"]
markers = [
"performance: opt-in runtime regression checks against example-like routing scenarios",
]