inire/examples/07_large_scale_routing.py

183 lines
7.5 KiB
Python
Raw Normal View History

2026-03-10 21:55:54 -07:00
import numpy as np
2026-03-12 23:50:25 -07:00
import time
2026-03-10 21:55:54 -07:00
from inire.geometry.collision import CollisionEngine
from inire.geometry.primitives import Port
from inire.router.astar import AStarRouter
from inire.router.cost import CostEvaluator
from inire.router.danger_map import DangerMap
from inire.router.pathfinder import PathFinder
2026-03-16 17:05:16 -07:00
from inire.utils.visualization import plot_routing_results, plot_danger_map, plot_expanded_nodes, plot_expansion_density
2026-03-10 21:55:54 -07:00
from shapely.geometry import box
def main() -> None:
2026-03-11 09:37:54 -07:00
print("Running Example 07: Fan-Out (10 Nets, 50um Radius, 5um Grid)...")
2026-03-10 21:55:54 -07:00
# 1. Setup Environment
2026-03-11 09:37:54 -07:00
bounds = (0, 0, 1000, 1000)
engine = CollisionEngine(clearance=6.0)
2026-03-18 00:06:41 -07:00
2026-03-11 09:37:54 -07:00
# Bottleneck at x=500, 200um gap
2026-03-10 21:55:54 -07:00
obstacles = [
2026-03-11 09:37:54 -07:00
box(450, 0, 550, 400),
box(450, 600, 550, 1000),
2026-03-10 21:55:54 -07:00
]
for obs in obstacles:
engine.add_static_obstacle(obs)
danger_map = DangerMap(bounds=bounds)
danger_map.precompute(obstacles)
evaluator = CostEvaluator(engine, danger_map, greedy_h_weight=1.5, unit_length_cost=0.1, bend_penalty=100.0, sbend_penalty=400.0, congestion_penalty=100.0)
2026-03-13 20:13:05 -07:00
2026-03-16 17:05:16 -07:00
router = AStarRouter(evaluator, node_limit=2000000, snap_size=5.0, bend_radii=[50.0], sbend_radii=[50.0], use_analytical_sbends=False)
pf = PathFinder(router, evaluator, max_iterations=15, base_congestion_penalty=100.0, congestion_multiplier=1.4)
2026-03-10 21:55:54 -07:00
2026-03-11 09:37:54 -07:00
# 2. Define Netlist
2026-03-10 21:55:54 -07:00
netlist = {}
num_nets = 10
2026-03-11 09:37:54 -07:00
start_x = 50
start_y_base = 500 - (num_nets * 10.0) / 2.0
2026-03-18 00:06:41 -07:00
2026-03-11 09:37:54 -07:00
end_x = 950
end_y_base = 100
end_y_pitch = 800.0 / (num_nets - 1)
2026-03-10 21:55:54 -07:00
for i in range(num_nets):
2026-03-11 09:37:54 -07:00
sy = round((start_y_base + i * 10.0) / 5.0) * 5.0
ey = round((end_y_base + i * end_y_pitch) / 5.0) * 5.0
netlist[f"net_{i:02d}"] = (Port(start_x, sy, 0), Port(end_x, ey, 0))
2026-03-10 21:55:54 -07:00
net_widths = {nid: 2.0 for nid in netlist}
# 3. Route
2026-03-11 09:37:54 -07:00
print(f"Routing {len(netlist)} nets through 200um bottleneck...")
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
iteration_stats = []
def iteration_callback(idx, current_results):
successes = sum(1 for r in current_results.values() if r.is_valid)
total_collisions = sum(r.collisions for r in current_results.values())
total_nodes = pf.router.metrics['nodes_expanded']
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
# Identify Hotspots
hotspots = {}
overlap_matrix = {} # (net_a, net_b) -> count
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
for nid, res in current_results.items():
if res.path:
for comp in res.path:
for poly in comp.geometry:
# Check what it overlaps with
overlaps = engine.dynamic_index.intersection(poly.bounds)
for other_obj_id in overlaps:
other_nid, other_poly = engine.dynamic_geometries[other_obj_id]
if other_nid != nid:
if poly.intersects(other_poly):
# Record hotspot
cx, cy = poly.centroid.x, poly.centroid.y
grid_key = (int(cx/20)*20, int(cy/20)*20)
hotspots[grid_key] = hotspots.get(grid_key, 0) + 1
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
# Record pair
pair = tuple(sorted((nid, other_nid)))
overlap_matrix[pair] = overlap_matrix.get(pair, 0) + 1
print(f" Iteration {idx} finished. Successes: {successes}/{len(netlist)}, Collisions: {total_collisions}")
if overlap_matrix:
top_pairs = sorted(overlap_matrix.items(), key=lambda x: x[1], reverse=True)[:3]
print(f" Top Conflicts: {top_pairs}")
if hotspots:
top_hotspots = sorted(hotspots.items(), key=lambda x: x[1], reverse=True)[:3]
print(f" Top Hotspots: {top_hotspots}")
2026-03-18 00:06:41 -07:00
# Adaptive Greediness: Decay from 1.5 to 1.1 over 10 iterations
new_greedy = max(1.1, 1.5 - ((idx + 1) / 10.0) * 0.4)
2026-03-16 17:05:16 -07:00
evaluator.greedy_h_weight = new_greedy
print(f" Adaptive Greedy Weight for Next Iteration: {new_greedy:.3f}")
iteration_stats.append({
'Iteration': idx,
'Success': successes,
'Congestion': total_collisions,
'Nodes': total_nodes
})
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
# Save plots only for certain iterations to save time
#if idx % 20 == 0 or idx == pf.max_iterations - 1:
if True:
2026-03-16 17:05:16 -07:00
# Save a plot of this iteration's result
fig, ax = plot_routing_results(current_results, obstacles, bounds, netlist=netlist)
plot_danger_map(danger_map, ax=ax)
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
# Overlay failures: show where they stopped
for nid, res in current_results.items():
if not res.is_valid and res.path:
last_p = res.path[-1].end_port
target_p = netlist[nid][1]
dist = abs(last_p.x - target_p.x) + abs(last_p.y - target_p.y)
ax.scatter(last_p.x, last_p.y, color='red', marker='x', s=100)
ax.text(last_p.x, last_p.y, f" {nid} (rem: {dist:.0f}um)", color='red', fontsize=8)
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
fig.savefig(f"examples/07_iteration_{idx:02d}.png")
import matplotlib.pyplot as plt
plt.close(fig)
# Plot Expansion Density if data is available
if pf.accumulated_expanded_nodes:
fig_d, ax_d = plot_expansion_density(pf.accumulated_expanded_nodes, bounds)
fig_d.savefig(f"examples/07_iteration_{idx:02d}_density.png")
plt.close(fig_d)
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
pf.router.reset_metrics()
2026-03-12 23:50:25 -07:00
import cProfile, pstats
profiler = cProfile.Profile()
profiler.enable()
t0 = time.perf_counter()
2026-03-16 17:05:16 -07:00
results = pf.route_all(netlist, net_widths, store_expanded=True, iteration_callback=iteration_callback, shuffle_nets=True, seed=42)
2026-03-12 23:50:25 -07:00
t1 = time.perf_counter()
profiler.disable()
2026-03-18 00:06:41 -07:00
2026-03-16 17:05:16 -07:00
# ... (rest of the code)
2026-03-12 23:50:25 -07:00
stats = pstats.Stats(profiler).sort_stats('tottime')
stats.print_stats(20)
print(f"Routing took {t1-t0:.4f}s")
2026-03-10 21:55:54 -07:00
# 4. Check Results
2026-03-16 17:05:16 -07:00
print("\n--- Iteration Summary ---")
print(f"{'Iter':<5} | {'Success':<8} | {'Congest':<8} | {'Nodes':<10}")
print("-" * 40)
for s in iteration_stats:
print(f"{s['Iteration']:<5} | {s['Success']:<8} | {s['Congestion']:<8} | {s['Nodes']:<10}")
2026-03-18 00:06:41 -07:00
2026-03-10 21:55:54 -07:00
success_count = sum(1 for res in results.values() if res.is_valid)
2026-03-16 17:05:16 -07:00
print(f"\nFinal: Routed {success_count}/{len(netlist)} nets successfully.")
2026-03-18 00:06:41 -07:00
2026-03-12 23:50:25 -07:00
for nid, res in results.items():
2026-03-16 17:05:16 -07:00
target_p = netlist[nid][1]
2026-03-12 23:50:25 -07:00
if not res.is_valid:
2026-03-16 17:05:16 -07:00
last_p = res.path[-1].end_port if res.path else netlist[nid][0]
dist = abs(last_p.x - target_p.x) + abs(last_p.y - target_p.y)
print(f" FAILED: {nid} (Stopped {dist:.1f}um from target)")
2026-03-12 23:50:25 -07:00
else:
types = [move.move_type for move in res.path]
from collections import Counter
counts = Counter(types)
print(f" {nid}: {len(res.path)} segments, {dict(counts)}")
2026-03-10 21:55:54 -07:00
# 5. Visualize
fig, ax = plot_routing_results(results, obstacles, bounds, netlist=netlist)
2026-03-18 00:06:41 -07:00
2026-03-12 23:50:25 -07:00
# Overlay Danger Map
plot_danger_map(danger_map, ax=ax)
2026-03-18 00:06:41 -07:00
2026-03-12 23:50:25 -07:00
# Overlay Expanded Nodes from last routed net (as an example)
if pf.router.last_expanded_nodes:
print(f"Plotting {len(pf.router.last_expanded_nodes)} expanded nodes for the last net...")
plot_expanded_nodes(pf.router.last_expanded_nodes, ax=ax, color='blue', alpha=0.1)
2026-03-10 21:55:54 -07:00
fig.savefig("examples/07_large_scale_routing.png")
print("Saved plot to examples/07_large_scale_routing.png")
if __name__ == "__main__":
main()