import os import time import shutil import sys sys.path.append('./utils') sys.path.append('../../FormatTranslators/src') from math import sqrt from FormatTranslators import Port from FormatTranslators import Macro from FormatTranslators import MacroPin class Clustering: def __init__(self, design, src_dir, fixed_file, step_threshold = 100.6, distance = 1000.0, max_num_vertices = 1000000, net_size_threshold = 300, Nparts = 500, setup_file = "setup.tcl", RePlace = True, placement_density = 0.7, GUI = True): """ parameter: design, help="design_name: ariane, MegaBoom_x2 ", type = str parameter: src_dir, help="directory for source codes", type = str parameter: fixed_file, help="fixed file generated by grouping", type = str parameter: step_threshold, help = "threshold (x and y) to break clusters (in um)", type = float parameter: distance, help="distance for merge clusters", type = float parameter: max_num_vertices, help="threshold for smaller clusters", type = int parameter: net_size_threshold, help="large net threshold", type = int parameter: Nparts, help = "number of clusters (only for hmetis, default = 500)", type = int parameter: setup_file, help = "setup file for openroad (default = setup.tcl)", type = str parameter: RePlace, help = "Run RePlace for blob placement (default = True)", type = bool parameter: placement_density, help = "Placement density for RePlace (default = 0.7)", type = float parameter: GUI, help = "Run OpenROAD in GUI Mode (default = True)", type = bool """ # initialize parameters self.design = design self.src_dir = src_dir self.fixed_file = fixed_file self.step_threshold = step_threshold self.distance = distance self.max_num_vertices = max_num_vertices self.net_size_threshold = net_size_threshold self.Nparts = Nparts self.setup_file = setup_file self.RePlace = RePlace self.placement_density = placement_density self.GUI = GUI ### Print Information print("[INFO] step_threshold : ", self.step_threshold) print("[INFO] distance : ", self.distance) print("[INFO] max_num_vertices : ", self.max_num_vertices) # Specify the location of hmetis exe and openroad exe and other utilities self.hmetis_exe = src_dir + "/utils/hmetis" self.openroad_exe = src_dir + "/utils/openroad" self.extract_hypergraph_file = src_dir + "/utils/extract_hypergraph.tcl" self.create_clustered_netlist_def_file = src_dir + "/utils/create_clustered_netlist_def.tcl" # set up temp report directory rpt_dir = os.getcwd() + "/rtl_mp" self.hypergraph_file = rpt_dir + "/" + design + ".hgr" # the io_name_file includes the io location self.io_name_file = self.hypergraph_file + ".io" # the instance_name_file includes name, is_macro, bounding box of the instance self.instance_name_file = self.hypergraph_file + ".instance" self.hypergraph_fix_file = self.hypergraph_file + ".fix" self.macro_pin_file = self.hypergraph_file + ".macro_pin" self.outline_file = self.hypergraph_file + ".outline" self.net_file = self.hypergraph_file + ".net" # set up result directories result_dir = "./results" if not os.path.exists(result_dir): os.mkdir(result_dir) pbf_result_dir = result_dir + "/Protocol_buffer_format" if not os.path.exists(pbf_result_dir): os.mkdir(pbf_result_dir) cadence_result_dir = result_dir + "/Cadence" if not os.path.exists(cadence_result_dir): os.mkdir(cadence_result_dir) openroad_result_dir = result_dir + "/OpenROAD" if not os.path.exists(openroad_result_dir): os.mkdir(openroad_result_dir) # for protocol buffer format self.pbf_file = pbf_result_dir + "/" + self.design + ".pb.txt" # for innovus command self.cluster_file = cadence_result_dir + "/" + self.design self.cluster_file += "_cluster_" + str(self.Nparts) + ".tcl" self.solution_file = self.hypergraph_file + ".part." + str(self.Nparts) # defined by hemtis automatically self.cluster_lef_file = openroad_result_dir + "/clusters.lef" self.cluster_def_file = openroad_result_dir + "/clustered_netlist.def" self.blob_def_file = openroad_result_dir + "/blob.def" # netlist information self.vertex_map = { } # instance_name, vertex_id self.vertex_list = [ ] # instance_name (io_name) self.nets = [] # nets self.is_io_macro_list = [ ] # is io / macro (True or False) self.num_ios = 0 self.vertex_pos = [ ] # bounding box of instances self.solution_vector = [] # solution vector, cluster_id for each vertex self.max_cluster_id = -1 self.vertices_in_cluster = { } # vertices in each cluster self.cluster_pos = { } # store the coordinates of each cluster self.soft_macros = [ ] # list of the std cell instances in each soft macro ############################################################################### ### Functions ############################################################################### self.GenerateHypergraph() # Extract netlist information from lef/def/v self.RemoveLargetNet() # Remove large nets self.ConvertFixFile() # Convert fixed file self.hMetisPartitioner() # Partition the hypergraph self.BreakClusters() # Break clusters spreading apart print("[INFO] After finishing BreakClusters(), ", end = "") print("num_clusters = ", len(self.vertices_in_cluster)) self.MergeSmallClusters() # Merge small clusters with its neighbors print("[INFO] After finishing MergeSmallClusters(), ", end = "") self.GenerateSoftMacros() ProBufFormat(self.io_name_file, self.macro_pin_file, \ self.instance_name_file, self.outline_file, \ self.net_file, self.soft_macros, \ self.pbf_file, self.net_size_threshold, 1.0) self.CreateInvsCluster() # Generate Innovus Clustering Commands self.CreateDef() # Generate clustered lef and def file if (self.RePlace == True): self.RunRePlace() # Generate blob placemment shutil.rmtree(rpt_dir) def GenerateHypergraph(self): # Extract hypergraph from netlist temp_file = os.getcwd() + "/extract_hypergraph.tcl" cmd = "cp " + self.setup_file + " " + temp_file os.system(cmd) with open(self.extract_hypergraph_file) as f: content = f.read().splitlines() f.close() f = open(temp_file, "a") f.write("\n") for line in content: f.write(line + "\n") f.close() cmd = self.openroad_exe + " " + temp_file os.system(cmd) cmd = "rm " + temp_file os.system(cmd) # read io and instance information vertex_id = 0 with open(self.io_name_file) as f: content = f.read().splitlines() f.close() for line in content: items = line.split() io_name = items[0] lx = float(items[1]) ly = float(items[2]) ux = float(items[3]) uy = float(items[4]) self.vertex_map[io_name] = vertex_id self.vertex_list.append(io_name) self.is_io_macro_list.append(True) self.vertex_pos.append([lx, ly, ux, uy]) vertex_id += 1 self.num_ios = vertex_id with open(self.instance_name_file) as f: content = f.read().splitlines() f.close() for line in content: items = line.split() instance_name = items[0] is_macro = int(items[1]) > 0 lx = float(items[2]) ly = float(items[3]) ux = float(items[4]) uy = float(items[5]) self.vertex_map[instance_name] = vertex_id self.vertex_list.append(instance_name) self.is_io_macro_list.append(is_macro) self.vertex_pos.append([lx, ly, ux, uy]) vertex_id += 1 def RemoveLargetNet(self): # Remove large nets from the hypergraph with open(self.hypergraph_file) as f: content = f.read().splitlines() f.close() items = content[0].split() num_hyperedges = int(items[0]) num_vertices = int(items[1]) # update the net hyperedges_list = [] for i in range(1, len(content)): items = content[i].split() if (len(items) < self.net_size_threshold): hyperedges_list.append(content[i]) net = [int(item) - 1 for item in items] self.nets.append(net) f = open(self.hypergraph_file, "w") line = str(len(hyperedges_list)) + " " + str(num_vertices) + "\n" f.write(line) for hyperedge in hyperedges_list: f.write(hyperedge + "\n") f.close() def ConvertFixFile(self): # Convert the grouping information to fix file which can used by hMetis fixed_part = [-1 for i in range(len(self.vertex_list))] with open(self.fixed_file) as f: content = f.read().splitlines() f.close() # read the grouping information for i in range(len(content)): # i is the group_id vertices = content[i].split(',') for vertex in vertices: fixed_part[self.vertex_map[vertex]] = i f = open(self.hypergraph_fix_file, "w") for part in fixed_part: f.write(str(part) + "\n") f.close() def hMetisPartitioner(self): # Partitioning the hypergraph using hmetis # The parameter configuration is the same as Google Brain paper # UBfactor = 5 # Nruns = 10 # CType = 5 # RType = 3 # Vcycle = 3 # The random seed is 0 by default (in our implementation) # We use the hMetis C++ API to implement hMetis cmd = self.hmetis_exe + " " + self.hypergraph_file + " " + self.hypergraph_fix_file + " " cmd += str(self.Nparts) + " 5 10 5 3 3 0 0" os.system(cmd) # read solution vector with open(self.solution_file) as f: content = f.read().splitlines() f.close() for line in content: self.solution_vector.append(int(line)) self.max_cluster_id = max(self.solution_vector) # update vertices_in_cluster for i in range(self.max_cluster_id + 1): self.vertices_in_cluster[i] = [] for i in range(len(self.solution_vector)): self.vertices_in_cluster[self.solution_vector[i]].append(i) def GetBoundingBox(self, vertices_in_cluster): # get the bounding box of the cluster # initialization cluster_lx = 1e20 cluster_ly = 1e20 cluster_ux = 0.0 cluster_uy = 0.0 for vertex in vertices_in_cluster: if (self.is_io_macro_list[vertex] == False): box = self.vertex_pos[vertex] cluster_lx = min(cluster_lx, box[0]) cluster_ly = min(cluster_ly, box[1]) cluster_ux = max(cluster_ux, box[2]) cluster_uy = max(cluster_uy, box[3]) return cluster_lx, cluster_ly, cluster_ux, cluster_uy def BreakLargeCluster(self, cluster_id): # For a cluster with bounding box (lx, ly, ux, uy) # if (ux - lx) > threshold or (uy - ly) > threshold, break the cluster # When breaking the cluster, we set the use the center of cluster as the # origin and threshold as step size to grid the bounding box # All the instances in each grid cell (in terms of the center of the instance) forms a new cluster cluster_lx, cluster_ly, cluster_ux, cluster_uy = self.GetBoundingBox(self.vertices_in_cluster[cluster_id]) if (((cluster_ux - cluster_lx) <= self.step_threshold) and ((cluster_uy - cluster_ly) <= self.step_threshold)): return None cluster_x = (cluster_lx + cluster_ux) / 2.0 cluster_y = (cluster_ly + cluster_uy) / 2.0 num_x_grid = int((cluster_ux - cluster_x) / self.step_threshold) num_y_grid = int((cluster_uy - cluster_y) / self.step_threshold) temp_vertices_in_cluster = { } for i in range((-1) * num_x_grid, num_x_grid + 1): temp_vertices_in_cluster[i] = { } for j in range((-1) * num_y_grid, num_y_grid + 1): temp_vertices_in_cluster[i][j] = [ ] for vertex in self.vertices_in_cluster[cluster_id]: lx, ly, ux, uy = self.vertex_pos[vertex] x = (lx + ux) / 2.0 y = (ly + uy) / 2.0 x_grid = int((x - cluster_x) / self.step_threshold) y_grid = int((y - cluster_y) / self.step_threshold) temp_vertices_in_cluster[i][j].append(vertex) # update the solution vector and vertices_in_cluster for x_grid, values in temp_vertices_in_cluster.items(): for y_grid, vertices in values.items(): # ignore the original cluster if (((x_grid == 0) and (y_grid == 0)) or (len(vertices) == 0)): pass else: self.max_cluster_id += 1 self.vertices_in_cluster[self.max_cluster_id] = [ ] for vertex in vertices: self.vertices_in_cluster[self.max_cluster_id].append(vertex) self.solution_vector[vertex] = self.max_cluster_id def BreakClusters(self): # In this step, we break clusters which spread around the canvas. clusters = list(self.vertices_in_cluster.keys()) for cluster in clusters: self.BreakLargeCluster(cluster) def IsNearNeighbor(self, cluster, neighbor): dist = abs(self.cluster_pos[cluster][0] - self.cluster_pos[neighbor][0]) dist += abs(self.cluster_pos[cluster][1] - self.cluster_pos[neighbor][1]) if (dist <= self.distance): return True else: return False def AddClusterEdge(self, cluster_a, cluster_b): if cluster_b not in self.cluster_adj[cluster_a]: self.cluster_adj[cluster_a][cluster_b] = 1 else: self.cluster_adj[cluster_a][cluster_b] += 1 def MergeSmallClusters(self): # Merge small clusters to the most adjacent clusters # within its neighbors (in terms of Manhattan distance) #num_clusters = len(self.vertices_in_cluster) while(True): # check if there are possible clusters to be merged #if (len(self.vertices_in_cluster) == num_clusters): # return None # merge small clusters num_clusters = len(self.vertices_in_cluster) small_clusters = [] self.cluster_pos = { } self.cluster_adj = { } for cluster_id, vertices in self.vertices_in_cluster.items(): cluster_lx, cluster_ly, cluster_ux, cluster_uy = self.GetBoundingBox(self.vertices_in_cluster[cluster_id]) self.cluster_pos[cluster_id] = [(cluster_ux + cluster_lx) / 2.0, (cluster_ly + cluster_uy) / 2.0] self.cluster_adj[cluster_id] = { } if (len(vertices) < self.max_num_vertices): small_clusters.append(cluster_id) # update cluster adjacency matrix for net in self.nets: cluster_net = [] for vertex in net: cluster_id = self.solution_vector[vertex] if cluster_id not in cluster_net: cluster_net.append(cluster_id) if (len(cluster_net) > 1): for cluster_i in cluster_net: for cluster_j in cluster_net: if (cluster_i != cluster_j): self.AddClusterEdge(cluster_i, cluster_j) candidate_neighbors = { } for cluster in small_clusters: candidate_neighbors[cluster] = [] for cluster in small_clusters: for neighbor in self.cluster_adj[cluster]: if (self.IsNearNeighbor(cluster, neighbor) == True): candidate_neighbors[cluster].append(neighbor) ### each time we only merge one cluster small_cluster_id = -1 for small_cluster, neighbors in candidate_neighbors.items(): if (len(neighbors) > 0): small_cluster_id = small_cluster break # No small clusters existing if (small_cluster_id == -1): return None best_neighbor = -1 best_neighbor_nets = -1 for neighbor in candidate_neighbors[small_cluster_id]: if (self.cluster_adj[small_cluster_id][neighbor] > best_neighbor_nets): best_neighbor_nets = self.cluster_adj[small_cluster_id][neighbor] best_neighbor = neighbor # update solution vector for vertex in self.vertices_in_cluster[small_cluster_id]: self.solution_vector[vertex] = best_neighbor self.vertices_in_cluster[best_neighbor].append(vertex) del self.vertices_in_cluster[small_cluster_id] def CreateInvsCluster(self): # Create cluster commands for Innovus f = open(self.cluster_file, "w") line = "# This script was written and developed by ABKGroup students at UCSD.\n" line += "# However, the underlying commands and reports are copyrighted by Cadence.\n" line += "# We thank Cadence for granting permission to share our research to help \n" line += "# promote and foster the next generation of innovators.\n" line += "\n" f.write(line) # cluster_id starts with 0, so we have self.max_cluster_id + 1 for i in range(self.max_cluster_id + 1): f.write("createInstGroup cluster" + str(i) + "\n") for i in range(len(self.vertex_list)): if (self.is_io_macro_list[i] == False): instance_name = self.vertex_list[i] cluster_id = self.solution_vector[i] line = "addInstToInstGroup cluster" + str(cluster_id) + " " + instance_name + "\n" f.write(line) f.close() def CreateDef(self): # Create clustered netlist (in def format) Based on OpenROAD API # Create the related openroad tcl file file_name = os.getcwd() + "/create_def.tcl" cmd = "cp " + self.setup_file + " " + file_name os.system(cmd) f = open(file_name, "a") f.write("\n") f.write("\n") f.write("read_verilog $netlist\n") f.write("link_design $top_design\n") f.write("read_def $def_file -floorplan_initialize\n") #f.write("read_def $def_file\n") f.write("\n") f.write("set db [ord::get_db]\n") f.write("set block [[$db getChip] getBlock]\n") f.write("set cluster_lef_file " + self.cluster_lef_file + "\n") f.write("set cluster_def_file " + self.cluster_def_file + "\n") f.write("\n") f.write("\n") for i in range(len(self.vertex_list)): if (self.is_io_macro_list[i] == False): instance_name = self.vertex_list[i] cluster_id = self.solution_vector[i] # fix the `\' and '[' in the names new_instance_name = "" for char in instance_name: if (char == '\\') or (char == '[') or (char == ']'): new_instance_name += '\\' + char else: new_instance_name += char instance_name = new_instance_name line = "set inst [$block findInst " + instance_name + " ]\n" f.write(line) f.write("set cluster_id " + str(cluster_id) + "\n") f.write('set newProperty [odb::dbStringProperty_create $inst "cluster_id" $cluster_id]\n') f.close() with open(self.create_clustered_netlist_def_file) as f: content = f.read().splitlines() f.close() f = open(file_name, "a") f.write("\n") for line in content: f.write(line + "\n") f.close() cmd = self.openroad_exe + " " + file_name os.system(cmd) cmd = "rm " + file_name os.system(cmd) # Due to some bugs in OpenROAD, we have to manually remove the RESISTANCE section for all the via layers with open(self.cluster_lef_file) as f: content = f.read().splitlines() f.close() f = open(self.cluster_lef_file, "w") i = 0 while(i < len(content)): items = content[i].split() if(len(items) == 2 and items[0] == "LAYER" and items[1][0:-1] == "via"): while((len(items) == 2 and items[0] == "END") == False): if(items[0] != "RESISTANCE"): f.write(content[i] + "\n") i = i + 1 items = content[i].split() f.write(content[i] + "\n") i = i + 1 else: f.write(content[i] + "\n") i = i + 1 f.close() def RunRePlace(self): # Run RePlace on the clustered netlist # Create the related openroad tcl file file_name = os.getcwd() + "/run_replace.tcl" cmd = "cp " + self.setup_file + " " + file_name os.system(cmd) f = open(file_name, "a") line = "read_lef " + self.cluster_lef_file + "\n" line += "read_def " + self.cluster_def_file + "\n" line += "set global_place_density " + str(self.placement_density) + "\n" line += "set global_place_density_penalty 8e-5\n" line += "global_placement -disable_routability_driven -density $global_place_density -init_density_penalty $global_place_density_penalty\n" line += "write_def " + self.blob_def_file + "\n" f.write(line) if (self.GUI == False): f.write("exit\n") f.close() cmd = self.openroad_exe + " -gui " + file_name if (self.GUI == False): cmd = self.openroad_exe + " " + file_name os.system(cmd) cmd = "rm " + file_name os.system(cmd) def GetNumClusters(self): return len(self.vertices_in_cluster) def GenerateSoftMacros(self): for key, value in self.vertices_in_cluster.items(): soft_macro = [] for vertex_id in value: if self.is_io_macro_list[vertex_id] == False: soft_macro.append(self.vertex_list[vertex_id]) self.soft_macros.append(soft_macro) # Generate Clustered Netlist in Protocol Buffer Format class ProBufFormat: def __init__(self, io_file, macro_pin_file, inst_file, outline_file, net_file, soft_macros, pbf_file, net_size_threshold, aspect_ratio): self.io_file = io_file self.macro_pin_file = macro_pin_file self.inst_file = inst_file self.outline_file = outline_file self.net_file = net_file self.soft_macros = soft_macros self.pbf_file = pbf_file self.net_size_threshold = net_size_threshold self.aspect_ratio = aspect_ratio self.insts = { } # map name to Port, Macro, Soft Macro self.macro_pin_map = { } # map macro_pin to macro self.macro_pin_offset = { } # map macro_pin_name to offset self.std_cell_pos = { } # instance_name, bounding box self.std_cell_map = { } # instance_name, soft macro name self.ios = [] # circuit row information self.fp_lx = 0.0 self.fp_ly = 0.0 self.fp_ux = 0.0 self.fp_uy = 0.0 self.ReadOutlineFile() self.ReadIOFile() self.ReadInstFile() self.ReadMacroPinFile() self.ReadNetFile() self.Output() # Read outline file def ReadOutlineFile(self): with open(self.outline_file) as f: content = f.read().splitlines() f.close() items = content[0].split() self.fp_lx = float(items[0]) self.fp_ly = float(items[1]) self.fp_ux = float(items[2]) self.fp_uy = float(items[3]) print('*'*80) print("Outline Information") print("[INFO] Core Size : ", self.fp_lx, self.fp_ly, self.fp_ux, self.fp_uy) print("\n\n") # Read IO file def ReadIOFile(self): with open(self.io_file) as f: content = f.read().splitlines() f.close() for line in content: items = line.split() io_name = items[0] lx = float(items[1]) ly = float(items[2]) ux = float(items[3]) uy = float(items[4]) side = "LEFT" if (lx <= self.fp_lx): side = "LEFT" elif (ux >= self.fp_ux): side = "RIGHT" elif (uy >= self.fp_uy): side = "TOP" else: side = "BOTTOM" self.insts[io_name] = Port(io_name, (lx + ux) / 2.0, (ly + uy) / 2.0, side) self.ios.append(io_name) # Read instance file def ReadInstFile(self): with open(self.inst_file) as f: content = f.read().splitlines() f.close() for line in content: items = line.split() inst_name = items[0] block_flag = items[1] lx = float(items[2]) ly = float(items[3]) ux = float(items[4]) uy = float(items[5]) orientation = items[6] width = ux - lx height = uy - ly x = (lx + ux) / 2.0 y = (ly + uy) / 2.0 if block_flag == '1': self.insts[inst_name] = Macro(inst_name, width, height, x, y, orientation) else: self.std_cell_pos[inst_name] = [lx, ly, ux, uy] # Create Soft Macros soft_macro_id = 0 for soft_macro in self.soft_macros: macro_name = "Grp_" + str(soft_macro_id) lx = 1e20 ly = 1e20 ux = 0.0 uy = 0.0 area = 0.0 for std_cell in soft_macro: bbox = self.std_cell_pos[std_cell] lx = min(lx, bbox[0]) ly = min(ly, bbox[1]) ux = max(ux, bbox[2]) uy = max(uy, bbox[3]) area += (bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) self.std_cell_map[std_cell] = macro_name x = (lx + ux) / 2.0 y = (ly + uy) / 2.0 height = sqrt(area * self.aspect_ratio) width = area / height self.insts[macro_name] = Macro(macro_name, width, height, x, y) self.insts[macro_name].IsSoft() soft_macro_id += 1 # Read Macro Pin file def ReadMacroPinFile(self): with open(self.macro_pin_file) as f: content = f.read().splitlines() f.close() for line in content: items = line.split() pin_name = items[0] + '/' + items[1] macro_name = items[0] self.macro_pin_map[pin_name] = macro_name x_offset = float(items[2]) y_offset = float(items[3]) self.macro_pin_offset[pin_name] = [x_offset, y_offset] # Read Net file def ReadNetFile(self): with open(self.net_file) as f: content = f.read().splitlines() f.close() adj_list = { } for io in self.ios: adj_list[io] = { } for macro_pin in self.macro_pin_map.keys(): adj_list[macro_pin] = { } for i in range(len(self.soft_macros)): adj_list["Grp_" + str(i)] = { } for line in content: items = line.split() num_pin = len(items) / 4 if (num_pin > self.net_size_threshold): pass else: driver_name = items[2] driver_pin_name = driver_name + '/' + items[0] driver = None if (driver_name in self.ios): driver = driver_name elif (driver_pin_name in self.macro_pin_offset): driver = driver_pin_name else: driver = self.std_cell_map[driver_name] sinks_name = [] for i in range(1, int(num_pin)): inst_name = items[4 * i + 2] pin_name = inst_name + '/' + items[4 * i] if (inst_name in self.ios): sinks_name.append(inst_name) elif (pin_name in self.macro_pin_offset): sinks_name.append(pin_name) else: sinks_name.append(self.std_cell_map[inst_name] + "/Input") unique_sinks = [] for sink in sinks_name: if sink not in unique_sinks: unique_sinks.append(sink) for sink in unique_sinks: if sink not in adj_list[driver]: adj_list[driver][sink] = 1 else: adj_list[driver][sink] += 1 # Create Macro Pins for io in self.ios: self.insts[io].AddSinks(adj_list[io].keys()) for macro_pin in self.macro_pin_map.keys(): if (len(adj_list[macro_pin]) == 0): offset = self.macro_pin_offset[macro_pin] inst_name = self.macro_pin_map[macro_pin] self.insts[inst_name].AddInputPin(MacroPin(macro_pin, inst_name, \ offset[0], offset[1])) else: offset = self.macro_pin_offset[macro_pin] inst_name = self.macro_pin_map[macro_pin] Pin = MacroPin(macro_pin, inst_name, offset[0], offset[1]) Pin.AddSinks(adj_list[macro_pin].keys()) self.insts[inst_name].AddOutputPin(Pin) for i in range(len(self.soft_macros)): # add input pin inst_name = "Grp_" + str(i) macro_pin = inst_name + "/Input" self.insts[inst_name].AddInputPin(MacroPin(macro_pin, inst_name, 0.0, 0.0)) output_idx = 1 for key, weight in adj_list[inst_name].items(): macro_pin = inst_name + "/Output_" + str(output_idx) output_idx += 1 Pin = MacroPin(macro_pin, inst_name, 0.0, 0.0) Pin.AddSink(key) if key not in self.ios or key not in self.macro_pin_map: Pin.SpecifyWeight(weight) self.insts[inst_name].AddOutputPin(Pin) # Generate.pb.txt file def Output(self): f = open(self.pbf_file, "w") for inst_name, inst in self.insts.items(): f.write(str(inst)) if (inst.GetType() == "MACRO"): for macro_pin in inst.GetPins(): f.write(str(macro_pin)) f.close()