Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
M
macroplacement
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lvzhengyang
macroplacement
Commits
6f2cd19a
Commit
6f2cd19a
authored
Jul 03, 2022
by
sakundu
Browse files
Options
Browse Files
Download
Plain Diff
Merge branch 'main' of github.com:TILOS-AI-Institute/MacroPlacement into flow_chart_update
parents
252eefac
f5db6d18
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
24 additions
and
11 deletions
+24
-11
CodeElements/Clustering/src/__pycache__/clustering.cpython-37.pyc
+0
-0
CodeElements/Clustering/src/clustering.py
+22
-9
CodeElements/Clustering/test/test_clustering.py
+2
-2
CodeElements/FormatTranslators/src/__pycache__/FormatTranslators.cpython-37.pyc
+0
-0
No files found.
CodeElements/Clustering/src/__pycache__/clustering.cpython-37.pyc
View file @
6f2cd19a
No preview for this file type
CodeElements/Clustering/src/clustering.py
View file @
6f2cd19a
...
@@ -74,7 +74,10 @@ class Clustering:
...
@@ -74,7 +74,10 @@ class Clustering:
if
not
os
.
path
.
exists
(
openroad_result_dir
):
if
not
os
.
path
.
exists
(
openroad_result_dir
):
os
.
mkdir
(
openroad_result_dir
)
os
.
mkdir
(
openroad_result_dir
)
self
.
cluster_file
=
cadence_result_dir
+
"/"
+
self
.
design
+
"_cluster_"
+
str
(
self
.
Nparts
)
+
".tcl"
# for innovus command
# for innovus command
self
.
cluster_file
=
cadence_result_dir
+
"/"
+
self
.
design
self
.
cluster_file
+=
"_cluster_"
+
str
(
self
.
Nparts
)
+
".tcl"
self
.
solution_file
=
self
.
hypergraph_file
+
".part."
+
str
(
self
.
Nparts
)
# defined by hemtis automatically
self
.
solution_file
=
self
.
hypergraph_file
+
".part."
+
str
(
self
.
Nparts
)
# defined by hemtis automatically
self
.
cluster_lef_file
=
openroad_result_dir
+
"/clusters.lef"
self
.
cluster_lef_file
=
openroad_result_dir
+
"/clusters.lef"
self
.
cluster_def_file
=
openroad_result_dir
+
"/clustered_netlist.def"
self
.
cluster_def_file
=
openroad_result_dir
+
"/clustered_netlist.def"
...
@@ -92,7 +95,6 @@ class Clustering:
...
@@ -92,7 +95,6 @@ class Clustering:
self
.
vertices_in_cluster
=
{
}
# vertices in each cluster
self
.
vertices_in_cluster
=
{
}
# vertices in each cluster
self
.
cluster_pos
=
{
}
# store the coordinates of each cluster
self
.
cluster_pos
=
{
}
# store the coordinates of each cluster
###############################################################################
###############################################################################
### Functions
### Functions
###############################################################################
###############################################################################
...
@@ -108,6 +110,13 @@ class Clustering:
...
@@ -108,6 +110,13 @@ class Clustering:
print
(
"[INFO] After finishing MergeSmallClusters(), "
,
end
=
""
)
print
(
"[INFO] After finishing MergeSmallClusters(), "
,
end
=
""
)
print
(
"num_clusters = "
,
len
(
self
.
vertices_in_cluster
))
print
(
"num_clusters = "
,
len
(
self
.
vertices_in_cluster
))
print
(
len
(
self
.
vertices_in_cluster
))
print
(
len
(
self
.
cluster_pos
))
exit
()
self
.
CreateInvsCluster
()
# Generate Innovus Clustering Commands
self
.
CreateInvsCluster
()
# Generate Innovus Clustering Commands
self
.
CreateDef
()
# Generate clustered lef and def file
self
.
CreateDef
()
# Generate clustered lef and def file
if
(
self
.
RePlace
==
True
):
if
(
self
.
RePlace
==
True
):
...
@@ -230,7 +239,8 @@ class Clustering:
...
@@ -230,7 +239,8 @@ class Clustering:
# Vcycle = 3
# Vcycle = 3
# The random seed is 0 by default (in our implementation)
# The random seed is 0 by default (in our implementation)
# We use the hMetis C++ API to implement hMetis
# We use the hMetis C++ API to implement hMetis
cmd
=
self
.
hmetis_exe
+
" "
+
self
.
hypergraph_file
+
" "
+
self
.
hypergraph_fix_file
+
" "
+
str
(
self
.
Nparts
)
+
" 5 10 5 3 3 0 0"
cmd
=
self
.
hmetis_exe
+
" "
+
self
.
hypergraph_file
+
" "
+
self
.
hypergraph_fix_file
+
" "
cmd
+=
str
(
self
.
Nparts
)
+
" 5 10 5 3 3 0 0"
os
.
system
(
cmd
)
os
.
system
(
cmd
)
# read solution vector
# read solution vector
...
@@ -336,11 +346,11 @@ class Clustering:
...
@@ -336,11 +346,11 @@ class Clustering:
def
MergeSmallClusters
(
self
):
def
MergeSmallClusters
(
self
):
# Merge small clusters to the most adjacent clusters
# Merge small clusters to the most adjacent clusters
# within its neighbors (in terms of Manhattan distance)
# within its neighbors (in terms of Manhattan distance)
num_clusters
=
len
(
self
.
vertices_in_cluster
)
#
num_clusters = len(self.vertices_in_cluster)
while
(
True
):
while
(
True
):
# check if there are possible clusters to be merged
# check if there are possible clusters to be merged
if
(
len
(
self
.
vertices_in_cluster
)
==
num_clusters
):
#
if (len(self.vertices_in_cluster) == num_clusters):
return
None
#
return None
# merge small clusters
# merge small clusters
num_clusters
=
len
(
self
.
vertices_in_cluster
)
num_clusters
=
len
(
self
.
vertices_in_cluster
)
...
@@ -348,10 +358,10 @@ class Clustering:
...
@@ -348,10 +358,10 @@ class Clustering:
self
.
cluster_pos
=
{
}
self
.
cluster_pos
=
{
}
self
.
cluster_adj
=
{
}
self
.
cluster_adj
=
{
}
for
cluster_id
,
vertices
in
self
.
vertices_in_cluster
.
items
():
for
cluster_id
,
vertices
in
self
.
vertices_in_cluster
.
items
():
cluster_lx
,
cluster_ly
,
cluster_ux
,
cluster_uy
=
GetBoundingBox
(
self
.
vertices_in_cluster
[
cluster_id
])
cluster_lx
,
cluster_ly
,
cluster_ux
,
cluster_uy
=
self
.
GetBoundingBox
(
self
.
vertices_in_cluster
[
cluster_id
])
self
.
cluster_pos
[
cluster_id
]
=
[(
cluster_ux
+
cluster_lx
)
/
2.0
,
(
cluster_ly
+
cluster_uy
)
/
2.0
]
self
.
cluster_pos
[
cluster_id
]
=
[(
cluster_ux
+
cluster_lx
)
/
2.0
,
(
cluster_ly
+
cluster_uy
)
/
2.0
]
self
.
cluster_adj
[
cluster_id
]
=
{
}
self
.
cluster_adj
[
cluster_id
]
=
{
}
if
(
len
(
vertices
)
>
self
.
max_num_vertices
):
if
(
len
(
vertices
)
<
self
.
max_num_vertices
):
small_clusters
.
append
(
cluster_id
)
small_clusters
.
append
(
cluster_id
)
# update cluster adjacency matrix
# update cluster adjacency matrix
...
@@ -367,7 +377,10 @@ class Clustering:
...
@@ -367,7 +377,10 @@ class Clustering:
if
(
cluster_i
!=
cluster_j
):
if
(
cluster_i
!=
cluster_j
):
self
.
AddClusterEdge
(
cluster_i
,
cluster_j
)
self
.
AddClusterEdge
(
cluster_i
,
cluster_j
)
candidate_neighors
=
{
}
candidate_neighbors
=
{
}
for
cluster
in
small_clusters
:
candidate_neighbors
[
cluster
]
=
[]
for
cluster
in
small_clusters
:
for
cluster
in
small_clusters
:
for
neighbor
in
self
.
cluster_adj
[
cluster
]:
for
neighbor
in
self
.
cluster_adj
[
cluster
]:
if
(
self
.
IsNearNeighbor
(
cluster
,
neighbor
)
==
True
):
if
(
self
.
IsNearNeighbor
(
cluster
,
neighbor
)
==
True
):
...
...
CodeElements/Clustering/test/test_clustering.py
View file @
6f2cd19a
...
@@ -12,11 +12,11 @@ if __name__ == '__main__':
...
@@ -12,11 +12,11 @@ if __name__ == '__main__':
parser
.
add_argument
(
"--fixed_file"
,
help
=
"fixed file generated by grouping"
,
type
=
str
,
default
=
"./fix_files_grouping/ariane.fix.old"
)
parser
.
add_argument
(
"--fixed_file"
,
help
=
"fixed file generated by grouping"
,
type
=
str
,
default
=
"./fix_files_grouping/ariane.fix.old"
)
parser
.
add_argument
(
"--step_threshold"
,
help
=
"threshold (x and y) to break clusters (in um)"
,
type
=
float
,
default
=
400.0
)
parser
.
add_argument
(
"--step_threshold"
,
help
=
"threshold (x and y) to break clusters (in um)"
,
type
=
float
,
default
=
400.0
)
parser
.
add_argument
(
"--distance"
,
help
=
"distance for merge clusters"
,
type
=
float
,
default
=
200.0
)
parser
.
add_argument
(
"--distance"
,
help
=
"distance for merge clusters"
,
type
=
float
,
default
=
200.0
)
parser
.
add_argument
(
"--max_num_vertices"
,
help
=
"threshold for samller clusters"
,
type
=
int
,
default
=
44
)
parser
.
add_argument
(
"--max_num_vertices"
,
help
=
"threshold for samller clusters"
,
type
=
int
,
default
=
100
)
parser
.
add_argument
(
"--net_size_threshold"
,
help
=
"large net threshold"
,
type
=
int
,
default
=
300
)
parser
.
add_argument
(
"--net_size_threshold"
,
help
=
"large net threshold"
,
type
=
int
,
default
=
300
)
parser
.
add_argument
(
"--Nparts"
,
help
=
"number of clusters (only for hmetis, default = 500)"
,
type
=
int
,
default
=
500
)
parser
.
add_argument
(
"--Nparts"
,
help
=
"number of clusters (only for hmetis, default = 500)"
,
type
=
int
,
default
=
500
)
parser
.
add_argument
(
"--setup_file"
,
help
=
"setup file for openroad (default = setup.tcl)"
,
type
=
str
,
default
=
"setup.tcl"
)
parser
.
add_argument
(
"--setup_file"
,
help
=
"setup file for openroad (default = setup.tcl)"
,
type
=
str
,
default
=
"setup.tcl"
)
parser
.
add_argument
(
"--RePlace"
,
help
=
"Run RePlace for blob placement (default = True)"
,
type
=
bool
,
default
=
Tru
e
)
parser
.
add_argument
(
"--RePlace"
,
help
=
"Run RePlace for blob placement (default = True)"
,
type
=
bool
,
default
=
Fals
e
)
parser
.
add_argument
(
"--placement_density"
,
help
=
"Placement density for RePlace (default = 0.7)"
,
type
=
float
,
default
=
0.7
)
parser
.
add_argument
(
"--placement_density"
,
help
=
"Placement density for RePlace (default = 0.7)"
,
type
=
float
,
default
=
0.7
)
parser
.
add_argument
(
"--GUI"
,
help
=
"Run OpenROAD in GUI Mode (default = True)"
,
type
=
bool
,
default
=
False
)
parser
.
add_argument
(
"--GUI"
,
help
=
"Run OpenROAD in GUI Mode (default = True)"
,
type
=
bool
,
default
=
False
)
args
=
parser
.
parse_args
()
args
=
parser
.
parse_args
()
...
...
CodeElements/FormatTranslators/src/__pycache__/FormatTranslators.cpython-37.pyc
View file @
6f2cd19a
No preview for this file type
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment