Commit 644fb6f1 by ZhiangWang033

add breaking and merging

parent d0878691
......@@ -4,362 +4,500 @@ import shutil
import sys
sys.path.append('./utils')
#################################################################
### Partitioning the hypergraph using hmetis
#################################################################
def hMetisPartitioner(hmetis_exe, hypergraph_file, Nparts, fixed_file):
# The parameter configuration is the same as Google Brain paper
# UBfactor = 5
# Nruns = 10
# CType = 5
# RType = 3
# Vcycle = 3
# The random seed is 0 by default (in our implementation)
# We use the hMetis C++ API to implement hMetis
cmd = hmetis_exe + " " + hypergraph_file + " " + fixed_file + " " + str(Nparts) + " 5 10 5 3 3 0 0"
os.system(cmd)
#################################################################
### Create cluster commands for Innovus
#################################################################
def CreateInvsCluster(solution_file, io_name_file, instance_name_file, cluster_file):
solution_vector = []
with open(solution_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
solution_vector.append(int(line))
num_clusters = max(solution_vector) + 1
with open(io_name_file) as f:
content = f.read().splitlines()
f.close()
num_ios = len(content)
with open(instance_name_file) as f:
content = f.read().splitlines()
f.close()
f = open(cluster_file, "w")
line = "# This script was written and developed by ABKGroup students at UCSD.\n"
line += "# However, the underlying commands and reports are copyrighted by Cadence.\n"
line += "# We thank Cadence for granting permission to share our research to help \n"
line += "# promote and foster the next generation of innovators.\n"
line += "\n"
f.write(line)
for i in range(num_clusters):
f.write("createInstGroup cluster" + str(i) + "\n")
for i in range(len(content)):
items = content[i].split()
instance_name = items[0]
# ignore all the macros
is_macro = int(items[1])
if (is_macro == 1):
continue
cluster_id = solution_vector[num_ios + i]
line = "addInstToInstGroup cluster" + str(cluster_id) + " " + instance_name + "\n"
class Clustering:
def __init__(self, design, src_dir, fixed_file, step_threshold = 100.6, distance = 1000.0,
max_num_vertices = 1000000, net_size_threshold = 300,
Nparts = 500, setup_file = "setup.tcl", RePlace = True, placement_density = 0.7, GUI = True):
"""
parameter: design, help="design_name: ariane, MegaBoom_x2 ", type = str
parameter: src_dir, help="directory for source codes", type = str
parameter: fixed_file, help="fixed file generated by grouping", type = str
parameter: step_threshold, help = "threshold (x and y) to break clusters (in um)", type = float
parameter: distance, help="distance for merge clusters", type = float
parameter: max_num_vertices, help="threshold for samller clusters", type = int
parameter: net_size_threshold, help="large net threshold", type = int
parameter: Nparts, help = "number of clusters (only for hmetis, default = 500)", type = int
parameter: setup_file, help = "setup file for openroad (default = setup.tcl)", type = str
parameter: RePlace, help = "Run RePlace for blob placement (default = True)", type = bool
parameter: placement_density, help = "Placement density for RePlace (default = 0.7)", type = float
parameter: GUI, help = "Run OpenROAD in GUI Mode (default = True)", type = bool
"""
# initialize parameters
self.design = design
self.src_dir = src_dir
self.fixed_file = fixed_file
self.step_threshold = step_threshold
self.distance = distance
self.max_num_vertices = max_num_vertices
self.net_size_threshold = net_size_threshold
self.Nparts = Nparts
self.setup_file = setup_file
self.RePlace = RePlace
self.placement_density = placement_density
self.GUI = GUI
# Specify the location of hmetis exe and openroad exe and other utilities
self.hmetis_exe = src_dir + "/utils/hmetis"
self.openroad_exe = src_dir + "/utils/openroad"
self.extract_hypergraph_file = src_dir + "/utils/extract_hypergraph.tcl"
self.create_clustered_netlist_def_file = src_dir + "/utils/create_clustered_netlist_def.tcl"
# set up temp report directory
self.rpt_dir = os.getcwd() + "/rtl_mp"
self.hypergraph_file = rpt_dir + "/" + design + ".hgr"
# the io_name_file includes the io location
self.io_name_file = hypergraph_file + ".io"
# the instance_name_file includes name, is_macro, bounding box of the instance
self.instance_name_file = hypergraph_file + ".instance"
self.hypergraph_fix_file = hypergraph_file + ".fix"
# set up result directories
result_dir = "./results"
if not os.path.exists(result_dir):
os.mkdir(result_dir)
cadence_result_dir = result_dir + "/Cadence"
if not os.path.exists(cadence_result_dir):
os.mkdir(cadence_result_dir)
openroad_result_dir = result_dir + "/OpenROAD"
if not os.path.exists(openroad_result_dir):
os.mkdir(openroad_result_dir)
self.cluster_file = cadence_result_dir + "/" + self.design + "_cluster_" + str(self.Nparts) + ".tcl" # for innovus command
self.solution_file = self.hypergraph_file + ".part." + str(self.Nparts) # defined by hemtis automatically
self.cluster_lef_file = openroad_result_dir + "/clusters.lef"
self.cluster_def_file = openroad_result_dir + "/clustered_netlist.def"
self.blob_def_file = openroad_result_dir + "/blob.def"
# netlist information
self.vertex_map = { } # instance_name, vertex_id
self.vertex_list = [ ] # instance_name (io_name)
self.nets = [] # nets
self.is_io_macro_list = [ ] # is io / macro (True or False)
self.num_ios = 0
self.vertex_pos = [ ] # bounding box of instances
self.solution_vector = [] # solution vector, cluster_id for each vertex
self.max_cluster_id = -1
self.vertices_in_cluster = { } # vertices in each cluster
self.cluster_pos = { } # store the coordinates of each cluster
###############################################################################
### Functions
###############################################################################
self.GenerateHypergraph() # Extract netlist information from lef/def/v
self.RemoveLargetNet() # Remove large nets
self.ConvertFixFile() # Convert fixed file
self.hMetisPartitioner() # Partition the hypergraph
self.BreakClusters() # Break clusters spreading apart
self.MergeSmallClusters() # Merge small clusters with its neighbors
self.CreateInvsCluster() # Generate Innovus Clustering Commands
self.CreateDef() # Generate clustered lef and def file
if (self.RePlace == True):
self.RunRePlace() # Generate blob placemment
shutil.rmtree(rpt_dir)
def GenerateHypergraph(self):
# Extract hypergraph from netlist
temp_file = os.getcwd() + "/extract_hypergraph.tcl"
cmd = "cp " + setup_file + " " + temp_file
os.system(cmd)
with open(extract_hypergraph_file) as f:
content = f.read().splitlines()
f.close()
f = open(temp_file, "a")
f.write("\n")
for line in content:
f.write(line + "\n")
f.close()
cmd = openroad_exe + " " + temp_file
os.system(cmd)
cmd = "rm " + temp_file
os.system(cmd)
# read io and instance information
vertex_id = 0
with open(self.io_name_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
items = line.split()
io_name = items[0]
lx = float(items[1])
ly = float(items[2])
ux = float(items[3])
uy = float(items[4])
self.vertex_map[io_name] = vertex_id
self.vertex_list.append(io_name)
self.is_io_macro_list.append(True)
self.vertex_pos.append([lx, ly, ux, uy])
vertex_id += 1
self.num_ios = vertex_id
with open(self.instance_name_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
items = line.split()
instance_name = items[0]
is_macro = int(items[1]) > 0
lx = float(items[2])
ly = float(items[3])
ux = float(items[4])
uy = float(items[5])
self.vertex_map[instance_name] = vertex_id
self.vertex_list.append(instance_name)
self.is_io_macro_list.append(io_macro)
self.vertex_pos.append([lx, ly, ux, uy])
vertex_id += 1
def RemoveLargetNet(self):
# Remove large nets from the hypergraph
with open(self.hypergraph_file) as f:
content = f.read().splitlines()
f.close()
items = content[0].split()
num_hyperedges = int(items[0])
num_vertices = int(items[1])
# update the net
hyperedges_list = []
for i in range(1, len(content)):
items = content[i].split()
if (len(items) < self.net_size_threshold):
hyperedges_list.append(content[i])
net = [int(item) - 1 for item in items]
self.nets.append(net)
f = open(self.hypergraph_file, "w")
line = str(len(hyperedges_list)) + " " + str(num_vertices) + "\n"
f.write(line)
f.close()
########################################################################
### Create clustered netlist (in def format) Based on OpenROAD API
########################################################################
def CreateDef(solution_file, io_name_file, instance_name_file, \
cluster_lef_file, cluster_def_file, \
setup_file, create_clustered_netlist_def_file, \
openroad_exe):
# read solution vector
solution_vector = []
with open(solution_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
solution_vector.append(int(line))
# read io and instance files
with open(io_name_file) as f:
content = f.read().splitlines()
f.close()
num_ios = len(content)
with open(instance_name_file) as f:
content = f.read().splitlines()
f.close()
### Create the related openroad tcl file
file_name = os.getcwd() + "/create_def.tcl"
cmd = "cp " + setup_file + " " + file_name
os.system(cmd)
f = open(file_name, "a")
f.write("\n")
f.write("\n")
f.write("read_verilog $netlist\n")
f.write("link_design $top_design\n")
f.write("read_sdc $sdc\n")
#f.write("read_def $def_file\n")
f.write("read_def $def_file -floorplan_initialize\n")
f.write("\n")
f.write("set db [ord::get_db]\n")
f.write("set block [[$db getChip] getBlock]\n")
f.write("set cluster_lef_file " + cluster_lef_file + "\n")
f.write("set cluster_def_file " + cluster_def_file + "\n")
f.write("\n")
f.write("\n")
for i in range(len(content)):
items = content[i].split()
instance_name = items[0]
# just consider standard cells
is_macro = int(items[1])
if (is_macro == 1):
continue
cluster_id = solution_vector[num_ios + i]
line = "set inst [$block findInst " + instance_name + " ]\n"
for hyperedge in hyperedges_list:
f.write(hyperedge + "\n")
f.close()
def ConvertFixFile(self):
# Convert the grouping information to fix file which can used by hMetis
fixed_part = [-1 for i in range(len(self.vertex_list))]
with open(self.fixed_file) as f:
content = f.read().splitlines()
f.close()
# read the grouping information
for i in range(len(content)):
# i is the group_id
vertices = content[i].split(',')
for vertex in vertices:
fixed_part[self.vertex_map[vertex]] = i
f = open(self.hypergraph_fix_file, "w")
for part in fixed_part:
f.write(str(part) + "\n")
f.close()
def hMetisPartitioner(self):
# Partitioning the hypergraph using hmetis
# The parameter configuration is the same as Google Brain paper
# UBfactor = 5
# Nruns = 10
# CType = 5
# RType = 3
# Vcycle = 3
# The random seed is 0 by default (in our implementation)
# We use the hMetis C++ API to implement hMetis
cmd = self.hmetis_exe + " " + self.hypergraph_file + " " + self.hypergraph_fix_file + " " + str(self.Nparts) + " 5 10 5 3 3 0 0"
os.system(cmd)
# read solution vector
with open(self.solution_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
self.solution_vector.append(int(line))
self.max_cluster_id = max(self.solution_vector)
# update vertices_in_cluster
for i in range(self.max_cluster_id + 1):
self.vertices_in_cluster[i] = []
for i in range(len(self.solution_vector)):
self.vertices_in_cluster[self.solution_vector[i]].append(i)
def GetBoundingBox(self, vertices_in_cluster):
# get the bounding box of the cluster
# initialization
cluster_lx = 1e20
cluster_ly = 1e20
cluster_ux = 0.0
cluster_uy = 0.0
for vertex in vertices_in_cluster:
if (self.is_io_macro_list[vertex] == False):
box = self.vertex_pos[vertex]
cluster_lx = min(cluster_lx, box[0])
cluster_ly = min(cluster_ly, box[1])
cluster_ux = min(cluster_ux, box[2])
cluster_uy = min(cluster_uy, box[3])
return cluster_lx, cluster_ly, cluster_ux, cluster_uy
def BreakLargeCluster(self, cluster_id):
# For a cluster with bounding box (lx, ly, ux, uy)
# if (ux - lx) > threshold or (uy - ly) > threshold, break the cluster
# When breaking the cluster, we set the use the center of cluster as the
# origin and threshold as step size to grid the bounding box
# All the instances in each grid cell (in terms of the center of the instance) forms a new cluster
cluster_lx, cluster_ly, cluster_ux, cluster_uy = GetBoundingBox(self.vertices_in_cluster[cluster_id])
if ((cluster_ux - cluster_lx) <= self.step_threshold)
and (cluster_uy - cluster_ly) <= self.step_threshold)):
return None
cluster_x = (cluster_lx + cluster_ux) / 2.0
cluster_y = (cluster_ly + cluster_uy) / 2.0
num_x_grid = int((cluster_ux - cluster_x) / self.step_threshold)
num_y_grid = int((cluster_uy - cluster_y) / self.step_threshold)
temp_vertices_in_cluster = { }
for i in range((-1) * num_x_grid, num_x_grid + 1):
temp_vertices_in_cluster[i] = { }
for j in range((-1) * num_y_grid, num_y_grid + 1):
temp_vertices_in_cluster[i][j] = [ ]
for vertex in self.vertices_in_cluster[cluster_id]:
lx, ly, ux, uy = self.vertex_pos[vertex]
x = (lx + ux) / 2.0
y = (ly + uy) / 2.0
x_grid = int((x - cluster_x) / self.step_threshold)
y_grid = int((y - cluster_y) / self.step_threshold)
temp_vertices_in_cluster[i][j].append(vertex)
# update the solution vector and vertices_in_cluster
for x_grid, values in temp_vertices_in_cluster.items():
for y_grid, vertices in values.items():
# ignore the original cluster
if (((x_grid == 0) and (y_grid == 0)) or (len(vertices) == 0)):
pass
else:
self.max_cluster_id += 1
self.vertices_in_cluster[self.max_cluster_id] = [ ]
for vertex in vertices:
self.vertices_in_cluster[self.max_cluster_id].append(vertex)
self.solution_vector[vertex] = self.max_cluster_id
def BreakClusters(self):
# In this step, we break clusters which spread around the canvas.
clusters = list(self.vertices_in_cluster.keys())
for cluster in clusters:
self.BreakLargeCluster(cluster)
def IsNearNeighbor(self, cluster, neighbor):
dist = abs(self.cluster_pos[cluster][0] - self.cluster_pos[neighbor][0])
dist += abs(self.cluster_pos[cluster][1] - self.cluster_pos[neighbor][1])
if (dist <= self.distance):
return True
else:
return False
def AddClusterEdge(self, cluster_a, cluster_b):
if cluster_b not in self.cluster_adj[cluster_a]:
self.cluster_adj[cluster_a][cluster_b] = 1
else:
self.cluster_adj[cluster_a][cluster_b] += 1
def MergeSmallClusters(self):
# Merge small clusters to the most adjacent clusters
# within its neighbors (in terms of Manhattan distance)
num_clusters = len(self.vertices_in_cluster)
while(True):
# check if there are possible clusters to be merged
if (len(self.vertices_in_cluster) == num_clusters):
return None
# merge small clusters
num_clusters = len(self.vertices_in_cluster)
small_clusters = []
self.cluster_pos = { }
self.cluster_adj = { }
for cluster_id, vertices in self.vertices_in_cluster.items():
cluster_lx, cluster_ly, cluster_ux, cluster_uy = GetBoundingBox(self.vertices_in_cluster[cluster_id])
self.cluster_pos[cluster_id] = [(cluster_ux + cluster_lx) / 2.0, (cluster_ly + cluster_uy) / 2.0]
self.cluster_adj[cluster_id] = { }
if (len(vertices) > self.max_num_vertices):
small_clusters.append(cluster_id)
# update cluster adjacency matrix
for net in self.nets:
cluster_net = []
for vertex in net:
cluster_id = self.solution_vector[vertex]
if cluster_id not in cluster_net:
cluster_net.append(cluster_id)
if (len(cluster_net) > 1):
for cluster_i in cluster_net:
for cluster_j in cluster_net:
if (cluster_i != cluster_j):
self.AddClusterEdge(cluster_i, cluster_j)
candidate_neighors = { }
for cluster in small_clusters:
for neighbor in self.cluster_adj[cluster]:
if (self.IsNearNeighbor(cluster, neighbor) == True):
candidate_neighbors[cluster].append(neighbor)
### each time we only merge one cluster
small_cluster_id = -1
for small_cluster, neighbors. in candidate_neighbors.items():
if (len(neighbors) > 0):
small_cluster_id = small_cluster
break
# No small clusters existing
if (small_cluster_id == -1):
return None
best_neighbor = -1
best_neighbor_nets = -1
for neighbor in candidate_neighbors[small_cluster_id]:
if (self.cluster_adj[small_cluster_id][neighbor] > best_neighbor_nets):
best_neighbor_nets = self.cluster_adj[small_cluster_id][neighbor]
best_neighbor = neighbor
# update solution vector
for vertex in self.vertices_in_cluster[small_cluster_id]:
self.solution_vector[vertex] = best_neighbor
self.vertices_in_cluster[best_neighbor].append(vertex)
del self.vertices_in_cluster[small_cluster_id]
def CreateInvsCluster(self):
# Create cluster commands for Innovus
f = open(self.cluster_file, "w")
line = "# This script was written and developed by ABKGroup students at UCSD.\n"
line += "# However, the underlying commands and reports are copyrighted by Cadence.\n"
line += "# We thank Cadence for granting permission to share our research to help \n"
line += "# promote and foster the next generation of innovators.\n"
line += "\n"
f.write(line)
f.write("set cluster_id " + str(cluster_id) + "\n")
f.write('set newProperty [odb::dbStringProperty_create $inst "cluster_id" $cluster_id]\n')
f.close()
with open(create_clustered_netlist_def_file) as f:
content = f.read().splitlines()
f.close()
f = open(file_name, "a")
f.write("\n")
for line in content:
f.write(line + "\n")
f.close()
cmd = openroad_exe + " " + file_name
os.system(cmd)
cmd = "rm " + file_name
os.system(cmd)
# Due to some bugs in OpenROAD, we have to manually remove the RESISTANCE section for all the via layers
with open(cluster_lef_file) as f:
content = f.read().splitlines()
f.close()
f = open(cluster_lef_file, "w")
i = 0
while(i < len(content)):
items = content[i].split()
if(len(items) == 2 and items[0] == "LAYER" and items[1][0:-1] == "via"):
while((len(items) == 2 and items[0] == "END") == False):
if(items[0] != "RESISTANCE"):
f.write(content[i] + "\n")
# cluster_id starts with 0, so we have self.max_cluster_id + 1
for i in range(self.max_cluster_id + 1):
f.write("createInstGroup cluster" + str(i) + "\n")
for i in range(len(self.vertex_list)):
if (self.is_io_macro_list[i] == False):
instance_name = self.vertex_list[i]
cluster_id = self.solution_vector[i]
line = "addInstToInstGroup cluster" + str(cluster_id) + " " + instance_name + "\n"
f.write(line)
f.close()
def CreateDef(self):
# Create clustered netlist (in def format) Based on OpenROAD API
# Create the related openroad tcl file
file_name = os.getcwd() + "/create_def.tcl"
cmd = "cp " + self.setup_file + " " + file_name
os.system(cmd)
f = open(file_name, "a")
f.write("\n")
f.write("\n")
f.write("read_verilog $netlist\n")
f.write("link_design $top_design\n")
f.write("read_sdc $sdc\n")
f.write("read_def $def_file -floorplan_initialize\n")
f.write("\n")
f.write("set db [ord::get_db]\n")
f.write("set block [[$db getChip] getBlock]\n")
f.write("set cluster_lef_file " + self.cluster_lef_file + "\n")
f.write("set cluster_def_file " + self.cluster_def_file + "\n")
f.write("\n")
f.write("\n")
for i in range(len(self.vertex_list)):
if (self.is_io_macro_list[i] == False):
instance_name = self.vertex_list[i]
cluster_id = self.solution_vector[i]
line = "set inst [$block findInst " + instance_name + " ]\n"
f.write(line)
f.write("set cluster_id " + str(cluster_id) + "\n")
f.write('set newProperty [odb::dbStringProperty_create $inst "cluster_id" $cluster_id]\n')
f.close()
with open(self.create_clustered_netlist_def_file) as f:
content = f.read().splitlines()
f.close()
f = open(file_name, "a")
f.write("\n")
for line in content:
f.write(line + "\n")
f.close()
cmd = self.openroad_exe + " " + file_name
os.system(cmd)
cmd = "rm " + file_name
os.system(cmd)
# Due to some bugs in OpenROAD, we have to manually remove the RESISTANCE section for all the via layers
with open(self.cluster_lef_file) as f:
content = f.read().splitlines()
f.close()
f = open(self.cluster_lef_file, "w")
i = 0
while(i < len(content)):
items = content[i].split()
if(len(items) == 2 and items[0] == "LAYER" and items[1][0:-1] == "via"):
while((len(items) == 2 and items[0] == "END") == False):
if(items[0] != "RESISTANCE"):
f.write(content[i] + "\n")
i = i + 1
items = content[i].split()
f.write(content[i] + "\n")
i = i + 1
items = content[i].split()
f.write(content[i] + "\n")
i = i + 1
else:
f.write(content[i] + "\n")
i = i + 1
f.close()
########################################################################
### Run RePlace on the clustered netlist
########################################################################
def RunRePlace(cluster_lef_file, cluster_def_file, blob_def_file, setup_file, placement_density, openroad_exe, GUI):
### Create the related openroad tcl file
file_name = os.getcwd() + "/run_replace.tcl"
cmd = "cp " + setup_file + " " + file_name
os.system(cmd)
f = open(file_name, "a")
line = "read_lef " + cluster_lef_file + "\n"
line += "read_def " + cluster_def_file + "\n"
line += "set global_place_density " + str(placement_density) + "\n"
line += "set global_place_density_penalty 8e-5\n"
line += "global_placement -disable_routability_driven -density $global_place_density -init_density_penalty $global_place_density_penalty\n"
line += "write_def " + blob_def_file + "\n"
f.write(line)
if (GUI == False):
f.write("exit\n")
f.close()
cmd = openroad_exe + " -gui " + file_name
if (GUI == False):
cmd = openroad_exe + " " + file_name
os.system(cmd)
cmd = "rm " + file_name
os.system(cmd)
####################################################################################
#### Extract hypergraph from netlist
####################################################################################
def GenerateHypergraph(openroad_exe, setup_file, extract_hypergraph_file):
temp_file = os.getcwd() + "/extract_hypergraph.tcl"
cmd = "cp " + setup_file + " " + temp_file
os.system(cmd)
with open(extract_hypergraph_file) as f:
content = f.read().splitlines()
f.close()
f = open(temp_file, "a")
f.write("\n")
for line in content:
f.write(line + "\n")
f.close()
cmd = openroad_exe + " " + temp_file
os.system(cmd)
cmd = "rm " + temp_file
os.system(cmd)
####################################################################################
#### Remove large nets from the hypergraph
####################################################################################
def RemoveLargetNet(hypergraph_file, net_size_threshold):
with open(hypergraph_file) as f:
content = f.read().splitlines()
f.close()
items = content[0].split()
num_hyperedges = int(items[0])
num_vertices = int(items[1])
hyperedges_list = []
for i in range(1, len(content)):
items = content[i].split()
if (len(items) < net_size_threshold):
hyperedges_list.append(content[i])
f = open(hypergraph_file, "w")
line = str(len(hyperedges_list)) + " " + str(num_vertices) + "\n"
f.write(line)
for hyperedge in hyperedges_list:
f.write(hyperedge + "\n")
f.close()
####################################################################################
#### Convert the grouping information to fix file which can used by hMetis
####################################################################################
def ConvertFixFile(fixed_file, hypergraph_fix_file, io_name_file, instance_name_file):
vertex_id = 0
vertex_map = { }
with open(io_name_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
io_name = line.split()[0]
vertex_map[io_name] = vertex_id
vertex_id += 1
with open(instance_name_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
instance_name = line.split()[0]
vertex_map[instance_name] = vertex_id
vertex_id += 1
fixed_part = [-1 for i in range(vertex_id)]
with open(fixed_file) as f:
content = f.read().splitlines()
f.close()
for i in range(len(content)):
items = content[i].split(',')
for item in items:
fixed_part[vertex_map[item]] = i
f = open(hypergraph_fix_file, "w")
for part in fixed_part:
f.write(str(part) + "\n")
f.close()
def Clustering(design, src_dir, fixed_file, net_size_threshold = 300, Nparts = 500, setup_file = "setup.tcl", RePlace = True, placement_density = 0.7, GUI = True):
"""
parameter: design, help="design_name: ariane, MegaBoom_x2 ", type = str
parameter: src_dir, help="directory for source codes", type = str
parameter: fixed_file, help="fixed file generated by grouping"
parameter: net_size_threshold, help="large net threshold", type = int
parameter: Nparts, help = "number of clusters (only for hmetis, default = 500)", type = int
parameter: setup_file, help = "setup file for openroad (default = setup.tcl)", type = str
parameter: RePlace, help = "Run RePlace for blob placement (default = True)", type = bool
parameter: placement_density, help = "Placement density for RePlace (default = 0.7)", type = float
parameter: GUI, help = "Run OpenROAD in GUI Mode (default = True)", type = bool
"""
pwd = os.getcwd()
# Specify the location of hmetis exe and openroad exe
hmetis_exe = src_dir + "/utils/hmetis"
openroad_exe = src_dir + "/utils/openroad"
extract_hypergraph_file = src_dir + "/utils/extract_hypergraph.tcl"
create_clustered_netlist_def_file = src_dir + "/utils/create_clustered_netlist_def.tcl"
print("[INFO] Design : ", design)
print("[INFO] Nparts : ", Nparts)
result_dir = "./results"
if not os.path.exists(result_dir):
os.mkdir(result_dir)
cadence_result_dir = result_dir + "/Cadence"
if not os.path.exists(cadence_result_dir):
os.mkdir(cadence_result_dir)
openroad_result_dir = result_dir + "/OpenROAD"
if not os.path.exists(openroad_result_dir):
os.mkdir(openroad_result_dir)
# Generate Hypergraph file
rpt_dir = pwd + "/rtl_mp"
hypergraph_file = rpt_dir + "/" + design + ".hgr"
io_name_file = hypergraph_file + ".io"
instance_name_file = hypergraph_file + ".instance"
hypergraph_fix_file = hypergraph_file + ".fix"
GenerateHypergraph(openroad_exe, setup_file, extract_hypergraph_file)
# Remove large nets
RemoveLargetNet(hypergraph_file, net_size_threshold)
# Convert fixed file
ConvertFixFile(fixed_file, hypergraph_fix_file, io_name_file, instance_name_file)
# Partition the hypergraph
cluster_file = cadence_result_dir + "/" + design + "_cluster_" + str(Nparts) + ".tcl" # for innovus command
solution_file = hypergraph_file + ".part." + str(Nparts) # defined by hemtis automatically
hMetisPartitioner(hmetis_exe, hypergraph_file, Nparts, hypergraph_fix_file)
# Generate Innovus Clustering Commands
CreateInvsCluster(solution_file, io_name_file, instance_name_file, cluster_file)
# Generate clustered lef and def file
cluster_lef_file = openroad_result_dir + "/clusters.lef"
cluster_def_file = openroad_result_dir + "/clustered_netlist.def"
CreateDef(solution_file, io_name_file, instance_name_file, cluster_lef_file, cluster_def_file, \
setup_file, create_clustered_netlist_def_file, openroad_exe)
# Generate blob placemment
blob_def_file = openroad_result_dir + "/blob.def"
if (RePlace == True):
RunRePlace(cluster_lef_file, cluster_def_file, blob_def_file, setup_file, placement_density, openroad_exe, GUI)
shutil.rmtree(rpt_dir)
else:
f.write(content[i] + "\n")
i = i + 1
f.close()
def RunRePlace(self):
# Run RePlace on the clustered netlist
# Create the related openroad tcl file
file_name = os.getcwd() + "/run_replace.tcl"
cmd = "cp " + setup_file + " " + file_name
os.system(cmd)
f = open(file_name, "a")
line = "read_lef " + self.cluster_lef_file + "\n"
line += "read_def " + self.cluster_def_file + "\n"
line += "set global_place_density " + str(self.placement_density) + "\n"
line += "set global_place_density_penalty 8e-5\n"
line += "global_placement -disable_routability_driven -density $global_place_density -init_density_penalty $global_place_density_penalty\n"
line += "write_def " + blob_def_file + "\n"
f.write(line)
if (self.GUI == False):
f.write("exit\n")
f.close()
cmd = self.openroad_exe + " -gui " + file_name
if (self.GUI == False):
cmd = self.openroad_exe + " " + file_name
os.system(cmd)
cmd = "rm " + file_name
os.system(cmd)
......@@ -10,6 +10,9 @@ if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--design", help="design_name: ariane, MegaBoom_x2 ", type = str, default = "ariane")
parser.add_argument("--fixed_file", help="fixed file generated by grouping", type = str, default = "./fix_files_grouping/ariane.fix.old")
parser.add_argument("--step_threshold", help = "threshold (x and y) to break clusters (in um)", type = float, default = 100.6)
parser.add_argument("--distance", help="distance for merge clusters", type = float, default = 1000.0)
parser.add_argument("--max_num_vertices", help="threshold for samller clusters", type = int, default = 1000000)
parser.add_argument("--net_size_threshold", help = "large net threshold", type = int, default = 300)
parser.add_argument("--Nparts", help = "number of clusters (only for hmetis, default = 500)", type = int, default = 500)
parser.add_argument("--setup_file", help = "setup file for openroad (default = setup.tcl)", type = str, default = "setup.tcl")
......@@ -27,6 +30,9 @@ if __name__ == '__main__':
# Then we will remove all the IOs and Macros when we create soft blocks.
fixed_file = args.fixed_file
step_threshold = args.step_threshold
distance = args.distance
max_num_vertices = args.max_num_vertices
net_size_threshold = args.net_size_threshold
Nparts = args.Nparts
setup_file = args.setup_file
......@@ -38,7 +44,11 @@ if __name__ == '__main__':
# To use the grouping function, you need to specify the directory of src file
src_dir = "../src"
Clustering(design, src_dir, fixed_file, net_size_threshold, Nparts, setup_file, RePlace, placement_density, GUI)
Clustering(design, src_dir, fixed_file, step_threshold, distance,
max_num_vertices, net_size_threshold, Nparts, setup_file,
RePlace, placement_density, GUI)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment