Commit 47efba15 by ZhiangWang033

update clustering implementation

parent b6ba1cf4
import os
import igraph as ig
import leidenalg as la
from igraph import *
import argparse
import time
import shutil
import sys
......@@ -11,7 +7,7 @@ sys.path.append('./utils')
#################################################################
### Partitioning the hypergraph using hmetis
#################################################################
def hMetisPartitioner(hmetis_exe, hypergraph_file, Nparts):
def hMetisPartitioner(hmetis_exe, hypergraph_file, Nparts, fixed_file):
# The parameter configuration is the same as Google Brain paper
# UBfactor = 5
# Nruns = 10
......@@ -20,69 +16,11 @@ def hMetisPartitioner(hmetis_exe, hypergraph_file, Nparts):
# Vcycle = 3
# The random seed is 0 by default (in our implementation)
# We use the hMetis C++ API to implement hMetis
cmd = hmetis_exe + " " + hypergraph_file + " " + str(Nparts) + " 5 10 5 3 3 0 0"
cmd = hmetis_exe + " " + hypergraph_file + " " + fixed_file + " " + str(Nparts) + " 5 10 5 3 3 0 0"
os.system(cmd)
#################################################################
### Partitioning the hypergraph using Leiden algorithm
#################################################################
def LeidenPartitioner(hypergraph_file, solution_file):
with open(hypergraph_file) as f:
content = f.read().splitlines()
f.close()
items = content[0].split()
num_hyperedges = int(items[0])
num_vertices = int(items[1])
edge_list = [0 for i in range(num_vertices)]
for i in range(num_vertices):
edge_list[i] = { }
# Clique model
for i in range(1, len(content)):
items = content[i].split()
hyperedge = [int(item) - 1 for item in items]
if(len(hyperedge) > 20):
continue
hyperedge.sort()
weight = 1.0 / (len(hyperedge) - 1)
for i in range(len(hyperedge) - 1):
for j in range(i + 1, len(hyperedge)):
src = hyperedge[i]
target = hyperedge[j]
if target in edge_list[src]:
edge_list[src][target] += weight
else:
edge_list[src][target] = weight
tuple_edge_list = []
weights = []
for i in range(len(edge_list)):
for key, value in edge_list[i].items():
tuple_edge_list.append((i, key))
weights.append(value)
g = Graph(directed = False)
g.add_vertices(num_vertices)
g.add_edges(tuple_edge_list)
g.es["weight"] = weights
partition = la.find_partition(g, la.ModularityVertexPartition)
solution_vector = partition.membership
num_clusters = max(solution_vector) + 1
print("[INFO] number of clusters : ", num_clusters)
solution_file = hypergraph_file + ".cluster"
f = open(solution_file, "w")
for solution in solution_vector:
f.write(str(solution) + "\n")
f.close()
#################################################################
### Create cluster commands for Innovus
#################################################################
def CreateInvsCluster(solution_file, io_name_file, instance_name_file, cluster_file):
......@@ -108,11 +46,24 @@ def CreateInvsCluster(solution_file, io_name_file, instance_name_file, cluster_f
f = open(cluster_file, "w")
line = "# This script was written and developed by ABKGroup students at UCSD.\n"
line += "# However, the underlying commands and reports are copyrighted by Cadence.\n"
line += "# We thank Cadence for granting permission to share our research to help \n"
line += "# promote and foster the next generation of innovators.\n"
line += "\n"
f.write(line)
for i in range(num_clusters):
f.write("createInstGroup cluster" + str(i) + "\n")
for i in range(len(content)):
instance_name = content[i]
items = content[i].split()
instance_name = items[0]
# ignore all the macros
is_macro = int(items[1])
if (is_macro == 1):
continue
cluster_id = solution_vector[num_ios + i]
line = "addInstToInstGroup cluster" + str(cluster_id) + " " + instance_name + "\n"
f.write(line)
......@@ -144,7 +95,6 @@ def CreateDef(solution_file, io_name_file, instance_name_file, \
content = f.read().splitlines()
f.close()
### Create the related openroad tcl file
file_name = os.getcwd() + "/create_def.tcl"
cmd = "cp " + setup_file + " " + file_name
......@@ -166,7 +116,13 @@ def CreateDef(solution_file, io_name_file, instance_name_file, \
f.write("\n")
f.write("\n")
for i in range(len(content)):
instance_name = content[i]
items = content[i].split()
instance_name = items[0]
# just consider standard cells
is_macro = int(items[1])
if (is_macro == 1):
continue
cluster_id = solution_vector[num_ios + i]
line = "set inst [$block findInst " + instance_name + " ]\n"
f.write(line)
......@@ -267,72 +223,143 @@ def GenerateHypergraph(openroad_exe, setup_file, extract_hypergraph_file):
cmd = "rm " + temp_file
os.system(cmd)
####################################################################################
#### Remove large nets from the hypergraph
####################################################################################
def RemoveLargetNet(hypergraph_file, net_size_threshold):
with open(hypergraph_file) as f:
content = f.read().splitlines()
f.close()
items = content[0].split()
num_hyperedges = int(items[0])
num_vertices = int(items[1])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("design", help="design_name: ariane, MegaBoom_x2 ", type = str)
parser.add_argument("partitioner", help="hmetis, leiden", type = str)
parser.add_argument("--Nparts", help = "number of clusters (only for hmetis, default = 500)", type = int, default = 500)
parser.add_argument("--setup_file", help = "setup file for openroad (default = setup.tcl)", type = str, default = "setup.tcl")
parser.add_argument("--RePlace", help = "Run RePlace for blob placement (default = True)", type = bool, default = True)
parser.add_argument("--placement_density", help = "Placement density for RePlace (default = 0.7)", type = float, default = 0.7)
parser.add_argument("--GUI", help = "Run OpenROAD in GUI Mode (default = True)", type = bool, default = True)
args = parser.parse_args()
hyperedges_list = []
for i in range(1, len(content)):
items = content[i].split()
if (len(items) < net_size_threshold):
hyperedges_list.append(content[i])
f = open(hypergraph_file, "w")
line = str(len(hyperedges_list)) + " " + str(num_vertices) + "\n"
f.write(line)
for hyperedge in hyperedges_list:
f.write(hyperedge + "\n")
f.close()
####################################################################################
#### Convert the grouping information to fix file which can used by hMetis
####################################################################################
def ConvertFixFile(fixed_file, hypergraph_fix_file, io_name_file, instance_name_file):
vertex_id = 0
vertex_map = { }
with open(io_name_file) as f:
content = f.read().splitlines()
f.close()
design = args.design
partitioner = args.partitioner
Nparts = args.Nparts
setup_file = args.setup_file
RePlace = args.RePlace
placement_density = args.placement_density
GUI = args.GUI
for line in content:
io_name = line.split()[0]
vertex_map[io_name] = vertex_id
vertex_id += 1
with open(instance_name_file) as f:
content = f.read().splitlines()
f.close()
for line in content:
instance_name = line.split()[0]
vertex_map[instance_name] = vertex_id
vertex_id += 1
fixed_part = [-1 for i in range(vertex_id)]
with open(fixed_file) as f:
content = f.read().splitlines()
f.close()
for i in range(len(content)):
items = content[i].split(',')
for item in items:
fixed_part[vertex_map[item]] = i
f = open(hypergraph_fix_file, "w")
for part in fixed_part:
f.write(str(part) + "\n")
f.close()
def Clustering(design, src_dir, fixed_file, net_size_threshold = 300, Nparts = 500, setup_file = "setup.tcl", RePlace = True, placement_density = 0.7, GUI = True):
"""
parameter: design, help="design_name: ariane, MegaBoom_x2 ", type = str
parameter: src_dir, help="directory for source codes", type = str
parameter: fixed_file, help="fixed file generated by grouping"
parameter: net_size_threshold, help="large net threshold", type = int
parameter: Nparts, help = "number of clusters (only for hmetis, default = 500)", type = int
parameter: setup_file, help = "setup file for openroad (default = setup.tcl)", type = str
parameter: RePlace, help = "Run RePlace for blob placement (default = True)", type = bool
parameter: placement_density, help = "Placement density for RePlace (default = 0.7)", type = float
parameter: GUI, help = "Run OpenROAD in GUI Mode (default = True)", type = bool
"""
pwd = os.getcwd()
# Specify the location of hmetis exe and openroad exe
hmetis_exe = pwd + "/utils/hmetis"
openroad_exe = pwd + "/utils/openroad"
extract_hypergraph_file = pwd + "/utils/extract_hypergraph.tcl"
create_clustered_netlist_def_file = pwd + "/utils/create_clustered_netlist_def.tcl"
hmetis_exe = src_dir + "/utils/hmetis"
openroad_exe = src_dir + "/utils/openroad"
extract_hypergraph_file = src_dir + "/utils/extract_hypergraph.tcl"
create_clustered_netlist_def_file = src_dir + "/utils/create_clustered_netlist_def.tcl"
print("[INFO] Design : ", design)
print("[INFO] Partitioner : ", partitioner)
print("[INFO] Nparts : ", Nparts)
result_dir = "./results"
if not os.path.exists(result_dir):
os.mkdir(result_dir)
cadence_result_dir = result_dir + "/Cadence"
if not os.path.exists(cadence_result_dir):
os.mkdir(cadence_result_dir)
openroad_result_dir = result_dir + "/OpenROAD"
if not os.path.exists(openroad_result_dir):
os.mkdir(openroad_result_dir)
# Generate Hypergraph file
rpt_dir = pwd + "/rtl_mp"
hypergraph_file = rpt_dir + "/" + design + ".hgr"
io_name_file = hypergraph_file + ".io"
instance_name_file = hypergraph_file + ".instance"
hypergraph_fix_file = hypergraph_file + ".fix"
GenerateHypergraph(openroad_exe, setup_file, extract_hypergraph_file)
# Remove large nets
RemoveLargetNet(hypergraph_file, net_size_threshold)
# Convert fixed file
ConvertFixFile(fixed_file, hypergraph_fix_file, io_name_file, instance_name_file)
# Partition the hypergraph
cluster_file = rpt_dir + "/" + design + "_cluster_" + partitioner + ".tcl" # for innovus command
solution_file = hypergraph_file + ".cluster"
if partitioner == "leiden":
LeidenPartitioner(hypergraph_file, solution_file)
elif partitioner == "hmetis":
cluster_file = rpt_dir + "/" + design + "_cluster_" + partitioner + "_" + str(Nparts) + ".tcl" # for innovus command
solution_file = hypergraph_file + ".part." + str(Nparts) # defined by hemtis automatically
hMetisPartitioner(hmetis_exe, hypergraph_file, Nparts)
else:
print("[ERROR] The partitioner is not defined!")
exit()
cluster_file = cadence_result_dir + "/" + design + "_cluster_" + str(Nparts) + ".tcl" # for innovus command
solution_file = hypergraph_file + ".part." + str(Nparts) # defined by hemtis automatically
hMetisPartitioner(hmetis_exe, hypergraph_file, Nparts, hypergraph_fix_file)
# Generate Innovus Clustering Commands
CreateInvsCluster(solution_file, io_name_file, instance_name_file, cluster_file)
# Generate clustered lef and def file
cluster_lef_file = rpt_dir + "/clusters.lef"
cluster_def_file = rpt_dir + "/clustered_netlist.def"
cluster_lef_file = openroad_result_dir + "/clusters.lef"
cluster_def_file = openroad_result_dir + "/clustered_netlist.def"
CreateDef(solution_file, io_name_file, instance_name_file, cluster_lef_file, cluster_def_file, \
setup_file, create_clustered_netlist_def_file, openroad_exe)
# Generate blob placemment
blob_def_file = rpt_dir + "/blob.def"
blob_def_file = openroad_result_dir + "/blob.def"
if (RePlace == True):
RunRePlace(cluster_lef_file, cluster_def_file, blob_def_file, setup_file, placement_density, openroad_exe, GUI)
shutil.rmtree(rpt_dir)
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
import os
import argparse
import time
import shutil
import sys
sys.path.append('../src')
from clustering import Clustering
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--design", help="design_name: ariane, MegaBoom_x2 ", type = str, default = "ariane")
parser.add_argument("--fixed_file", help="fixed file generated by grouping", type = str, default = "./fix_files_grouping/ariane.fix.old")
parser.add_argument("--net_size_threshold", help = "large net threshold", type = int, default = 300)
parser.add_argument("--Nparts", help = "number of clusters (only for hmetis, default = 500)", type = int, default = 500)
parser.add_argument("--setup_file", help = "setup file for openroad (default = setup.tcl)", type = str, default = "setup.tcl")
parser.add_argument("--RePlace", help = "Run RePlace for blob placement (default = True)", type = bool, default = True)
parser.add_argument("--placement_density", help = "Placement density for RePlace (default = 0.7)", type = float, default = 0.7)
parser.add_argument("--GUI", help = "Run OpenROAD in GUI Mode (default = True)", type = bool, default = True)
args = parser.parse_args()
design = args.design
# The fixed file should be generated by our grouping script in the repo.
# Here we should use *.fix.old as the fix file.
# *.fix.old includes the IOs and Macros in the corresponding group, thus
# we don't need to change the hypergraph when we do partitioning.
# Then we will remove all the IOs and Macros when we create soft blocks.
fixed_file = args.fixed_file
net_size_threshold = args.net_size_threshold
Nparts = args.Nparts
setup_file = args.setup_file
RePlace = args.RePlace
placement_density = args.placement_density
GUI = args.GUI
# To use the grouping function, you need to specify the directory of src file
src_dir = "../src"
Clustering(design, src_dir, fixed_file, net_size_threshold, Nparts, setup_file, RePlace, placement_density, GUI)
......@@ -14,7 +14,7 @@ if __name__ == '__main__':
parser.add_argument("--K_in", help = "K_in", type = int, default = "1")
parser.add_argument("--K_out", help = "K_out", type = int, default = "1")
parser.add_argument("--setup_file", help = "setup file for openroad (default = setup.tcl)", type = str, default = "setup.tcl")
parser.add_argument("--global_net_threshold", help = "global net threshold", type = int, default = 100)
parser.add_argument("--global_net_threshold", help = "global net threshold", type = int, default = 300)
args = parser.parse_args()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment