Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
M
macroplacement
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
lvzhengyang
macroplacement
Commits
e634766f
Commit
e634766f
authored
Oct 04, 2022
by
Dinple
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
reupload
parent
4c146b37
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
1400 additions
and
0 deletions
+1400
-0
CodeElements/Plc_client/observation_extractor_os.py
+404
-0
CodeElements/Plc_client/placement_util_os.py
+996
-0
No files found.
CodeElements/Plc_client/observation_extractor_os.py
0 → 100644
View file @
e634766f
# coding=utf-8
# Copyright 2021 The Circuit Training Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This class extracts features from observations."""
from
typing
import
Dict
,
Optional
,
Text
,
Tuple
from
Plc_client
import
observation_config
as
observation_config_lib
from
Plc_client
import
plc_client
import
gin
import
numpy
as
np
@gin.configurable
class
ObservationExtractor
(
object
):
"""Extracts observation features from plc."""
EPSILON
=
1E-6
def
__init__
(
self
,
plc
:
plc_client
.
PlacementCost
,
observation_config
:
Optional
[
observation_config_lib
.
ObservationConfig
]
=
None
,
default_location_x
:
float
=
0.5
,
default_location_y
:
float
=
0.5
):
self
.
plc
=
plc
self
.
_observation_config
=
(
observation_config
or
observation_config_lib
.
ObservationConfig
())
self
.
_default_location_x
=
default_location_x
self
.
_default_location_y
=
default_location_y
self
.
width
,
self
.
height
=
self
.
plc
.
get_canvas_width_height
()
self
.
num_cols
,
self
.
num_rows
=
self
.
plc
.
get_grid_num_columns_rows
()
self
.
grid_width
=
self
.
width
/
self
.
num_cols
self
.
grid_height
=
self
.
height
/
self
.
num_rows
# Since there are too many I/O ports, we have to cluster them together to
# make it manageable for the model to process. The ports that are located in
# the same grid cell are clustered togheter.
self
.
adj_vec
,
grid_cell_of_clustered_ports_vec
=
self
.
plc
.
get_macro_and_clustered_port_adjacency
(
)
self
.
clustered_port_locations_vec
=
[
self
.
_get_clustered_port_locations
(
i
)
for
i
in
grid_cell_of_clustered_ports_vec
]
# Extract static features.
self
.
_features
=
self
.
_extract_static_features
()
# done
def
_extract_static_features
(
self
)
->
Dict
[
Text
,
np
.
ndarray
]:
"""Static features that are invariant across training steps."""
features
=
dict
()
self
.
_extract_num_macros
(
features
)
self
.
_extract_technology_info
(
features
)
self
.
_extract_node_types
(
features
)
self
.
_extract_macro_size
(
features
)
self
.
_extract_macro_and_port_adj_matrix
(
features
)
self
.
_extract_canvas_size
(
features
)
self
.
_extract_grid_size
(
features
)
self
.
_extract_initial_node_locations
(
features
)
self
.
_extract_normalized_static_features
(
features
)
return
features
# done
def
_extract_normalized_static_features
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Normalizes static features."""
self
.
_add_netlist_metadata
(
features
)
self
.
_normalize_adj_matrix
(
features
)
self
.
_pad_adj_matrix
(
features
)
self
.
_pad_macro_static_features
(
features
)
self
.
_normalize_macro_size_by_canvas
(
features
)
self
.
_normalize_grid_size
(
features
)
self
.
_normalize_locations_by_canvas
(
features
)
self
.
_replace_unplace_node_location
(
features
)
self
.
_pad_macro_dynamic_features
(
features
)
# done
def
_extract_num_macros
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
features
[
'num_macros'
]
=
np
.
asarray
([
len
(
self
.
plc
.
get_macro_indices
())
])
.
astype
(
np
.
int32
)
# done
def
_extract_technology_info
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Extracts Technology-related information."""
routing_resources
=
{
'horizontal_routes_per_micron'
:
self
.
plc
.
get_routes_per_micron
()[
0
],
'vertical_routes_per_micron'
:
self
.
plc
.
get_routes_per_micron
()[
1
],
'macro_horizontal_routing_allocation'
:
self
.
plc
.
get_macro_routing_allocation
()[
0
],
'macro_vertical_routing_allocation'
:
self
.
plc
.
get_macro_routing_allocation
()[
0
],
}
for
k
in
routing_resources
:
features
[
k
]
=
np
.
asarray
([
routing_resources
[
k
]])
.
astype
(
np
.
float32
)
# done
def
_extract_initial_node_locations
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Extracts initial node locations."""
locations_x
=
[]
locations_y
=
[]
is_node_placed
=
[]
for
macro_idx
in
self
.
plc
.
get_macro_indices
():
x
,
y
=
self
.
plc
.
get_node_location
(
macro_idx
)
locations_x
.
append
(
x
)
locations_y
.
append
(
y
)
is_node_placed
.
append
(
1
if
self
.
plc
.
is_node_placed
(
macro_idx
)
else
0
)
for
x
,
y
in
self
.
clustered_port_locations_vec
:
locations_x
.
append
(
x
)
locations_y
.
append
(
y
)
is_node_placed
.
append
(
1
)
features
[
'locations_x'
]
=
np
.
asarray
(
locations_x
)
.
astype
(
np
.
float32
)
features
[
'locations_y'
]
=
np
.
asarray
(
locations_y
)
.
astype
(
np
.
float32
)
features
[
'is_node_placed'
]
=
np
.
asarray
(
is_node_placed
)
.
astype
(
np
.
int32
)
# done
def
_extract_node_types
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Extracts node types."""
types
=
[]
for
macro_idx
in
self
.
plc
.
get_macro_indices
():
if
self
.
plc
.
is_node_soft_macro
(
macro_idx
):
types
.
append
(
observation_config_lib
.
SOFT_MACRO
)
else
:
types
.
append
(
observation_config_lib
.
HARD_MACRO
)
for
_
in
range
(
len
(
self
.
clustered_port_locations_vec
)):
types
.
append
(
observation_config_lib
.
PORT_CLUSTER
)
features
[
'node_types'
]
=
np
.
asarray
(
types
)
.
astype
(
np
.
int32
)
def
_extract_macro_size
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Extracts macro sizes."""
macros_w
=
[]
macros_h
=
[]
for
macro_idx
in
self
.
plc
.
get_macro_indices
():
if
self
.
plc
.
is_node_soft_macro
(
macro_idx
):
# Width and height of soft macros are set to zero.
width
=
0
height
=
0
else
:
width
,
height
=
self
.
plc
.
get_node_width_height
(
macro_idx
)
macros_w
.
append
(
width
)
macros_h
.
append
(
height
)
for
_
in
range
(
len
(
self
.
clustered_port_locations_vec
)):
macros_w
.
append
(
0
)
macros_h
.
append
(
0
)
features
[
'macros_w'
]
=
np
.
asarray
(
macros_w
)
.
astype
(
np
.
float32
)
features
[
'macros_h'
]
=
np
.
asarray
(
macros_h
)
.
astype
(
np
.
float32
)
# done
def
_extract_macro_and_port_adj_matrix
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Extracts adjacency matrix."""
num_nodes
=
len
(
self
.
plc
.
get_macro_indices
())
+
len
(
self
.
clustered_port_locations_vec
)
assert
num_nodes
*
num_nodes
==
len
(
self
.
adj_vec
)
sparse_adj_i
=
[]
sparse_adj_j
=
[]
sparse_adj_weight
=
[]
edge_counts
=
np
.
zeros
((
self
.
_observation_config
.
max_num_nodes
,),
dtype
=
np
.
int32
)
# issue with determine max_num_nodes
for
i
in
range
(
num_nodes
):
for
j
in
range
(
i
+
1
,
num_nodes
):
weight
=
self
.
adj_vec
[
i
+
num_nodes
*
j
]
if
weight
>
0
:
sparse_adj_i
.
append
(
i
)
sparse_adj_j
.
append
(
j
)
sparse_adj_weight
.
append
(
weight
)
edge_counts
[
i
]
+=
1
edge_counts
[
j
]
+=
1
features
[
'sparse_adj_i'
]
=
np
.
asarray
(
sparse_adj_i
)
.
astype
(
np
.
int32
)
features
[
'sparse_adj_j'
]
=
np
.
asarray
(
sparse_adj_j
)
.
astype
(
np
.
int32
)
features
[
'sparse_adj_weight'
]
=
np
.
asarray
(
sparse_adj_weight
)
.
astype
(
np
.
float32
)
features
[
'edge_counts'
]
=
edge_counts
# if not enough edges
# print("edge_counts ", np.sum(features['edge_counts'])) # 16624
# done
def
_extract_canvas_size
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
features
[
'canvas_width'
]
=
np
.
asarray
([
self
.
width
])
features
[
'canvas_height'
]
=
np
.
asarray
([
self
.
height
])
# done
def
_extract_grid_size
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
features
[
'grid_cols'
]
=
np
.
asarray
([
self
.
num_cols
])
.
astype
(
np
.
float32
)
features
[
'grid_rows'
]
=
np
.
asarray
([
self
.
num_rows
])
.
astype
(
np
.
float32
)
# done
def
_get_clustered_port_locations
(
self
,
grid_cell_index
:
int
)
->
Tuple
[
float
,
float
]:
"""Returns clustered port locations.
This function returns an approximation location of the ports in a grid
cell. Depending on the cell location in the canvas, the approximation
differs.
Args:
grid_cell_index: The index of the grid cell where the cluster port is
located.
Returns:
A tuple of float: Approximate x, y location of the port cluster in the
grid cell in the same unit as canvas width and height (micron).
"""
col
=
grid_cell_index
%
self
.
num_cols
row
=
grid_cell_index
//
self
.
num_cols
if
col
==
0
and
row
==
0
:
return
0
,
0
elif
col
==
0
and
row
==
self
.
num_rows
-
1
:
return
0
,
self
.
height
elif
col
==
self
.
num_cols
-
1
and
row
==
0
:
return
self
.
width
,
0
elif
col
==
self
.
num_cols
-
1
and
row
==
self
.
num_rows
-
1
:
return
self
.
width
,
self
.
height
elif
col
==
0
:
return
0
,
(
row
+
0.5
)
*
self
.
grid_height
elif
col
==
self
.
num_cols
-
1
:
return
self
.
width
,
(
row
+
0.5
)
*
self
.
grid_height
elif
row
==
0
:
return
(
col
+
0.5
)
*
self
.
grid_width
,
0
elif
row
==
self
.
num_rows
-
1
:
return
(
col
+
0.5
)
*
self
.
grid_width
,
self
.
height
else
:
return
(
col
+
0.5
)
*
self
.
grid_width
,
(
row
+
0.5
)
*
self
.
grid_height
def
_add_netlist_metadata
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Adds netlist metadata info."""
features
[
'normalized_num_edges'
]
=
np
.
asarray
([
np
.
sum
(
features
[
'sparse_adj_weight'
])
/
self
.
_observation_config
.
max_num_edges
])
.
astype
(
np
.
float32
)
features
[
'normalized_num_hard_macros'
]
=
np
.
asarray
([
np
.
sum
(
np
.
equal
(
features
[
'node_types'
],
observation_config_lib
.
HARD_MACRO
)
.
astype
(
np
.
float32
))
/
self
.
_observation_config
.
max_num_nodes
])
.
astype
(
np
.
float32
)
features
[
'normalized_num_soft_macros'
]
=
np
.
asarray
([
np
.
sum
(
np
.
equal
(
features
[
'node_types'
],
observation_config_lib
.
SOFT_MACRO
)
.
astype
(
np
.
float32
))
/
self
.
_observation_config
.
max_num_nodes
])
.
astype
(
np
.
float32
)
features
[
'normalized_num_port_clusters'
]
=
np
.
asarray
([
np
.
sum
(
np
.
equal
(
features
[
'node_types'
],
observation_config_lib
.
PORT_CLUSTER
)
.
astype
(
np
.
float32
))
/
self
.
_observation_config
.
max_num_nodes
])
.
astype
(
np
.
float32
)
def
_normalize_adj_matrix
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Normalizes adj matrix weights."""
mean_weight
=
np
.
mean
(
features
[
'sparse_adj_weight'
])
features
[
'sparse_adj_weight'
]
=
(
features
[
'sparse_adj_weight'
]
/
(
mean_weight
+
ObservationExtractor
.
EPSILON
))
.
astype
(
np
.
float32
)
def
_pad_1d_tensor
(
self
,
tensor
:
np
.
ndarray
,
pad_size
:
int
)
->
np
.
ndarray
:
if
(
pad_size
-
tensor
.
shape
[
0
])
<
0
:
print
(
"padding not applied"
,
pad_size
,
tensor
.
shape
[
0
])
return
np
.
pad
(
tensor
,
(
0
,
0
),
mode
=
'constant'
,
constant_values
=
0
)
else
:
return
np
.
pad
(
tensor
,
(
0
,
pad_size
-
tensor
.
shape
[
0
]),
mode
=
'constant'
,
constant_values
=
0
)
def
_pad_adj_matrix
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Pads indices and weights with zero to make their shape known."""
for
var
in
[
'sparse_adj_i'
,
'sparse_adj_j'
,
'sparse_adj_weight'
]:
features
[
var
]
=
self
.
_pad_1d_tensor
(
features
[
var
],
self
.
_observation_config
.
max_num_edges
)
def
_pad_macro_static_features
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Pads macro features to make their shape knwon."""
for
var
in
[
'macros_w'
,
'macros_h'
,
'node_types'
,
]:
features
[
var
]
=
self
.
_pad_1d_tensor
(
features
[
var
],
self
.
_observation_config
.
max_num_nodes
)
def
_pad_macro_dynamic_features
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Pads macro features to make their shape knwon."""
for
var
in
[
'locations_x'
,
'locations_y'
,
'is_node_placed'
,
]:
features
[
var
]
=
self
.
_pad_1d_tensor
(
features
[
var
],
self
.
_observation_config
.
max_num_nodes
)
def
_normalize_grid_size
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
features
[
'grid_cols'
]
=
(
features
[
'grid_cols'
]
/
self
.
_observation_config
.
max_grid_size
)
.
astype
(
np
.
float32
)
features
[
'grid_rows'
]
=
(
features
[
'grid_rows'
]
/
self
.
_observation_config
.
max_grid_size
)
.
astype
(
np
.
float32
)
def
_normalize_macro_size_by_canvas
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Normalizes macro sizes with the canvas size."""
features
[
'macros_w'
]
=
(
features
[
'macros_w'
]
/
(
features
[
'canvas_width'
]
+
ObservationExtractor
.
EPSILON
))
.
astype
(
np
.
float32
)
features
[
'macros_h'
]
=
(
features
[
'macros_h'
]
/
(
features
[
'canvas_height'
]
+
ObservationExtractor
.
EPSILON
))
.
astype
(
np
.
float32
)
def
_normalize_locations_by_canvas
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Normalizes locations with the canvas size."""
features
[
'locations_x'
]
=
(
features
[
'locations_x'
]
/
(
features
[
'canvas_width'
]
+
ObservationExtractor
.
EPSILON
))
.
astype
(
np
.
float32
)
features
[
'locations_y'
]
=
(
features
[
'locations_y'
]
/
(
features
[
'canvas_height'
]
+
ObservationExtractor
.
EPSILON
))
.
astype
(
np
.
float32
)
def
_replace_unplace_node_location
(
self
,
features
:
Dict
[
Text
,
np
.
ndarray
])
->
None
:
"""Replace the location of the unplaced macros with a constant."""
is_node_placed
=
np
.
equal
(
features
[
'is_node_placed'
],
1
)
features
[
'locations_x'
]
=
np
.
where
(
is_node_placed
,
features
[
'locations_x'
],
self
.
_default_location_x
*
np
.
ones_like
(
features
[
'locations_x'
]),
)
.
astype
(
np
.
float32
)
features
[
'locations_y'
]
=
np
.
where
(
is_node_placed
,
features
[
'locations_y'
],
self
.
_default_location_y
*
np
.
ones_like
(
features
[
'locations_y'
]),
)
.
astype
(
np
.
float32
)
def
get_static_features
(
self
)
->
Dict
[
Text
,
np
.
ndarray
]:
return
{
key
:
self
.
_features
[
key
]
for
key
in
observation_config_lib
.
STATIC_OBSERVATIONS
}
def
get_initial_features
(
self
)
->
Dict
[
Text
,
np
.
ndarray
]:
return
{
key
:
self
.
_features
[
key
]
for
key
in
observation_config_lib
.
INITIAL_OBSERVATIONS
}
def
_update_dynamic_features
(
self
,
previous_node_index
:
int
,
current_node_index
:
int
,
mask
:
np
.
ndarray
)
->
None
:
"""Updates the dynamic features."""
if
previous_node_index
>=
0
:
x
,
y
=
self
.
plc
.
get_node_location
(
self
.
plc
.
get_macro_indices
()[
previous_node_index
])
self
.
_features
[
'locations_x'
][
previous_node_index
]
=
(
x
/
(
self
.
width
+
ObservationExtractor
.
EPSILON
))
self
.
_features
[
'locations_y'
][
previous_node_index
]
=
(
y
/
(
self
.
height
+
ObservationExtractor
.
EPSILON
))
self
.
_features
[
'is_node_placed'
][
previous_node_index
]
=
1
self
.
_features
[
'mask'
]
=
mask
.
astype
(
np
.
int32
)
self
.
_features
[
'current_node'
]
=
np
.
asarray
([
current_node_index
])
.
astype
(
np
.
int32
)
def
get_dynamic_features
(
self
,
previous_node_index
:
int
,
current_node_index
:
int
,
mask
:
np
.
ndarray
)
->
Dict
[
Text
,
np
.
ndarray
]:
self
.
_update_dynamic_features
(
previous_node_index
,
current_node_index
,
mask
)
return
{
key
:
self
.
_features
[
key
]
for
key
in
observation_config_lib
.
DYNAMIC_OBSERVATIONS
if
key
in
self
.
_features
}
def
get_all_features
(
self
,
previous_node_index
:
int
,
current_node_index
:
int
,
mask
:
np
.
ndarray
)
->
Dict
[
Text
,
np
.
ndarray
]:
features
=
self
.
get_static_features
()
features
.
update
(
self
.
get_dynamic_features
(
previous_node_index
=
previous_node_index
,
current_node_index
=
current_node_index
,
mask
=
mask
))
return
features
\ No newline at end of file
CodeElements/Plc_client/placement_util_os.py
0 → 100644
View file @
e634766f
# coding=utf-8
# Copyright 2021 The Circuit Training Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A collection of non-prod utility functions for placement.
All the dependencies in this files should be non-prod.
"""
import
os
import
datetime
import
re
import
textwrap
from
typing
import
Dict
,
Iterator
,
List
,
Optional
,
Tuple
,
Any
,
Union
from
absl
import
logging
logging
.
set_verbosity
(
logging
.
INFO
)
# from circuit_training.environment import plc_client
from
Plc_client
import
plc_client_os
as
plc_client
import
numpy
as
np
# Internal gfile dependencies
# done
def
nodes_of_types
(
plc
:
plc_client
.
PlacementCost
,
type_list
:
List
[
str
])
->
Iterator
[
int
]:
"""Yields the index of a node of certain types."""
i
=
0
while
True
:
node_type
=
plc
.
get_node_type
(
i
)
if
not
node_type
:
break
if
node_type
in
type_list
:
yield
i
i
+=
1
# done
def
get_node_xy_coordinates
(
plc
:
plc_client
.
PlacementCost
)
->
Dict
[
int
,
Tuple
[
float
,
float
]]:
"""Returns all node x,y coordinates (canvas) in a dict."""
node_coords
=
dict
()
for
node_index
in
nodes_of_types
(
plc
,
[
'MACRO'
,
'STDCELL'
,
'PORT'
]):
if
plc
.
is_node_placed
(
node_index
):
node_coords
[
node_index
]
=
plc
.
get_node_location
(
node_index
)
return
node_coords
# done
def
get_macro_orientations
(
plc
:
plc_client
.
PlacementCost
)
->
Dict
[
int
,
int
]:
"""Returns all macros' orientations in a dict."""
macro_orientations
=
dict
()
for
node_index
in
nodes_of_types
(
plc
,
[
'MACRO'
]):
macro_orientations
[
node_index
]
=
plc
.
get_macro_orientation
(
node_index
)
return
macro_orientations
# done
def
restore_node_xy_coordinates
(
plc
:
plc_client
.
PlacementCost
,
node_coords
:
Dict
[
int
,
Tuple
[
float
,
float
]])
->
None
:
for
node_index
,
coords
in
node_coords
.
items
():
if
not
plc
.
is_node_fixed
(
node_index
):
plc
.
update_node_coords
(
node_index
,
coords
[
0
],
coords
[
1
])
# done
def
restore_macro_orientations
(
plc
:
plc_client
.
PlacementCost
,
macro_orientations
:
Dict
[
int
,
int
])
->
None
:
for
node_index
,
orientation
in
macro_orientations
.
items
():
plc
.
update_macro_orientation
(
node_index
,
orientation
)
# done
def
extract_attribute_from_comments
(
attribute
:
str
,
filenames
:
List
[
str
])
->
Optional
[
str
]:
"""Parses the files' comments section, tries to extract the attribute.
Args:
attribute: attribute to look for (case sensetive).
filenames: List of protobuf file or a plc file.
Returns:
Attribute name string, or None if not found.
"""
for
filename
in
filenames
:
if
filename
:
f
=
filename
.
split
(
','
)[
0
]
if
f
:
with
open
(
f
,
'rt'
)
as
infile
:
for
line
in
infile
:
if
line
.
startswith
(
'#'
):
match
=
re
.
search
(
fr
'{attribute} : ([-
\
w]+)'
,
line
)
if
match
:
return
match
.
group
(
1
)
else
:
# Do not parse the rest of the file, since all the comments are at
# the top.
break
return
None
# done
def
get_blockages_from_comments
(
filenames
:
Union
[
str
,
List
[
str
]])
->
Optional
[
List
[
List
[
float
]]]:
"""Returns list of blockages if they exist in the file's comments section."""
for
filename
in
filenames
:
if
not
filename
:
continue
blockages
=
[]
# Read the first file if filename is comma separated list.
# Expected blockage info line format is:
# "# Blockage : <float> <float> <float> <float> <float>"
# where first four float numbers correspond to minx, miny, maxx, maxy of
# the rectangular region, the fifth one is the blockage rate. It's usually
# set to 1.
try
:
with
open
(
filename
,
'rt'
)
as
infile
:
for
line
in
infile
:
if
line
.
startswith
(
'# Blockage : '
):
blockages
.
append
([
float
(
x
)
for
x
in
line
.
split
()[
3
:
8
]])
elif
not
line
.
startswith
(
'#'
):
break
except
OSError
:
logging
.
error
(
'could not read file
%
s.'
,
filename
)
if
blockages
:
return
blockages
#done
def
extract_sizes_from_comments
(
filenames
:
List
[
str
])
->
Optional
[
Tuple
[
float
,
float
,
int
,
int
]]:
"""Parses the file's comments section, tries to extract canvas/grid sizes.
Args:
filenames: A list of netlist (.pb.txt) or placement (.plc) files.
Returns:
Tuple of canvas_width, canvas_height, grid_cols, grid_rows
"""
for
filename
in
filenames
:
if
not
filename
:
continue
canvas_width
,
canvas_height
=
None
,
None
grid_cols
,
grid_rows
=
None
,
None
with
open
(
filename
,
'rt'
)
as
infile
:
for
line
in
infile
:
if
line
.
startswith
(
'#'
):
fp_re
=
re
.
search
(
r'FP bbox: \{([\d\.]+) ([\d\.]+)\} \{([\d\.]+) ([\d\.]+)\}'
,
line
)
if
fp_re
:
canvas_width
=
float
(
fp_re
.
group
(
3
))
canvas_height
=
float
(
fp_re
.
group
(
4
))
continue
plc_wh
=
re
.
search
(
r'Width : ([\d\.]+) Height : ([\d\.]+)'
,
line
)
if
plc_wh
:
canvas_width
=
float
(
plc_wh
.
group
(
1
))
canvas_height
=
float
(
plc_wh
.
group
(
2
))
continue
plc_cr
=
re
.
search
(
r'Columns : ([\d]+) Rows : ([\d]+)'
,
line
)
if
plc_cr
:
grid_cols
=
int
(
plc_cr
.
group
(
1
))
grid_rows
=
int
(
plc_cr
.
group
(
2
))
else
:
# Do not parse the rest of the file, since all the comments are at the
# top.
break
if
canvas_width
and
canvas_height
and
grid_cols
and
grid_rows
:
return
canvas_width
,
canvas_height
,
grid_cols
,
grid_rows
# done
def
fix_port_coordinates
(
plc
:
plc_client
.
PlacementCost
)
->
None
:
"""Find all ports and fix their coordinates.
Args:
plc: the placement cost object.
"""
for
node
in
nodes_of_types
(
plc
,
[
'PORT'
]):
# print("node to fix:", node)
plc
.
fix_node_coord
(
node
)
# done
# The routing capacities are calculated based on the public information about
# 7nm technology (https://en.wikichip.org/wiki/7_nm_lithography_process)
# with an arbitary, yet reasonable, assumption of 18% of the tracks for
# the power grids.
def
create_placement_cost
(
plc_client
:
None
,
netlist_file
:
str
,
init_placement
:
Optional
[
str
]
=
None
,
overlap_threshold
:
float
=
4e-3
,
congestion_smooth_range
:
int
=
2
,
# TODO(b/211039937): Increase macro spacing to 3-5um, after matching the
# performance for Ariane.
macro_macro_x_spacing
:
float
=
0.1
,
macro_macro_y_spacing
:
float
=
0.1
,
boundary_check
:
bool
=
False
,
horizontal_routes_per_micron
:
float
=
70.33
,
vertical_routes_per_micron
:
float
=
74.51
,
macro_horizontal_routing_allocation
:
float
=
51.79
,
macro_vertical_routing_allocation
:
float
=
51.79
,
)
->
plc_client
.
PlacementCost
:
"""Creates a placement_cost object.
Args:
netlist_file: Path to the netlist proto text file.
init_placement: Path to the inital placement .plc file.
overlap_threshold: Used for macro overlap detection.
congestion_smooth_range: Smoothing factor used for congestion estimation.
Congestion is distributed to this many neighboring columns/rows.'
macro_macro_x_spacing: Macro-to-macro x spacing in microns.
macro_macro_y_spacing: Macro-to-macro y spacing in microns.
boundary_check: Do a boundary check during node placement.
horizontal_routes_per_micron: Horizontal route capacity per micros.
vertical_routes_per_micron: Vertical route capacity per micros.
macro_horizontal_routing_allocation: Macro horizontal routing allocation.
macro_vertical_routing_allocation: Macro vertical routing allocation.
Returns:
A PlacementCost object.
"""
if
not
netlist_file
:
raise
ValueError
(
'netlist_file should be provided.'
)
block_name
=
extract_attribute_from_comments
(
'Block'
,
[
init_placement
,
netlist_file
])
if
not
block_name
:
logging
.
warning
(
'block_name is not set. '
'Please add the block_name in:
\n
%
s
\n
or in:
\n
%
s'
,
netlist_file
,
init_placement
)
plc
=
plc_client
.
PlacementCost
(
netlist_file
,
macro_macro_x_spacing
,
macro_macro_y_spacing
)
blockages
=
get_blockages_from_comments
([
netlist_file
,
init_placement
])
if
blockages
:
print
(
blockages
)
for
blockage
in
blockages
:
print
(
*
blockage
)
plc
.
create_blockage
(
*
blockage
)
sizes
=
extract_sizes_from_comments
([
netlist_file
,
init_placement
])
print
(
sizes
)
if
sizes
:
canvas_width
,
canvas_height
,
grid_cols
,
grid_rows
=
sizes
if
canvas_width
and
canvas_height
and
grid_cols
and
grid_rows
:
plc
.
set_canvas_size
(
canvas_width
,
canvas_height
)
plc
.
set_placement_grid
(
grid_cols
,
grid_rows
)
plc
.
set_project_name
(
'circuit_training'
)
plc
.
set_block_name
(
block_name
or
'unset_block'
)
plc
.
set_routes_per_micron
(
horizontal_routes_per_micron
,
vertical_routes_per_micron
)
plc
.
set_macro_routing_allocation
(
macro_horizontal_routing_allocation
,
macro_vertical_routing_allocation
)
plc
.
set_congestion_smooth_range
(
congestion_smooth_range
)
plc
.
set_overlap_threshold
(
overlap_threshold
)
plc
.
set_canvas_boundary_check
(
boundary_check
)
plc
.
make_soft_macros_square
()
# exit(0)
# print(plc.get_soft_macros_count())
if
init_placement
:
plc
.
restore_placement
(
init_placement
)
fix_port_coordinates
(
plc
)
return
plc
def
get_node_type_counts
(
plc
:
plc_client
.
PlacementCost
)
->
Dict
[
str
,
int
]:
"""Returns number of each type of nodes in the netlist.
Args:
plc: the placement cost object.
Returns:
Number of each type of node in a dict.
"""
counts
=
{
'MACRO'
:
0
,
'STDCELL'
:
0
,
'PORT'
:
0
,
'MACRO_PIN'
:
0
,
'SOFT_MACRO'
:
0
,
'HARD_MACRO'
:
0
,
'SOFT_MACRO_PIN'
:
0
,
'HARD_MACRO_PIN'
:
0
}
for
node_index
in
nodes_of_types
(
plc
,
[
'MACRO'
,
'STDCELL'
,
'PORT'
,
'MACRO_PIN'
]):
node_type
=
plc
.
get_node_type
(
node_index
)
counts
[
node_type
]
+=
1
if
node_type
==
'MACRO'
:
if
plc
.
is_node_soft_macro
(
node_index
):
counts
[
'SOFT_MACRO'
]
+=
1
else
:
counts
[
'HARD_MACRO'
]
+=
1
if
node_type
==
'MACRO_PIN'
:
ref_id
=
plc
.
get_ref_node_id
(
node_index
)
# print("ref_id: ", ref_id)
if
plc
.
is_node_soft_macro
(
ref_id
):
counts
[
'SOFT_MACRO_PIN'
]
+=
1
else
:
counts
[
'HARD_MACRO_PIN'
]
+=
1
return
counts
# done
def
make_blockage_text
(
plc
:
plc_client
.
PlacementCost
)
->
str
:
ret
=
''
for
blockage
in
plc
.
get_blockages
():
ret
+=
'Blockage : {}
\n
'
.
format
(
' '
.
join
([
str
(
b
)
for
b
in
blockage
]))
return
ret
# done
def
save_placement
(
plc
:
plc_client
.
PlacementCost
,
filename
:
str
,
user_comments
:
str
=
''
)
->
None
:
"""Saves the placement file with some information in the comments section."""
cols
,
rows
=
plc
.
get_grid_num_columns_rows
()
width
,
height
=
plc
.
get_canvas_width_height
()
hor_routes
,
ver_routes
=
plc
.
get_routes_per_micron
()
hor_macro_alloc
,
ver_macro_alloc
=
plc
.
get_macro_routing_allocation
()
smooth
=
plc
.
get_congestion_smooth_range
()
info
=
textwrap
.
dedent
(
"""
\
Placement file for Circuit Training
Source input file(s) : {src_filename}
This file : {filename}
Date : {date}
Columns : {cols} Rows : {rows}
Width : {width:.3f} Height : {height:.3f}
Area : {area}
Wirelength : {wl:.3f}
Wirelength cost : {wlc:.4f}
Congestion cost : {cong:.4f}
Density cost : {density:.4f}
Project : {project}
Block : {block_name}
Routes per micron, hor : {hor_routes:.3f} ver : {ver_routes:.3f}
Routes used by macros, hor : {hor_macro_alloc:.3f} ver : {ver_macro_alloc:.3f}
Smoothing factor : {smooth}
Overlap threshold : {overlap_threshold}
"""
.
format
(
src_filename
=
plc
.
get_source_filename
(),
filename
=
filename
,
date
=
datetime
.
datetime
.
now
()
.
strftime
(
'
%
Y-
%
m-
%
d
%
H:
%
M:
%
S'
),
cols
=
cols
,
rows
=
rows
,
width
=
width
,
height
=
height
,
area
=
plc
.
get_area
(),
wl
=
plc
.
get_wirelength
(),
wlc
=
plc
.
get_cost
(),
cong
=
plc
.
get_congestion_cost
(),
density
=
plc
.
get_density_cost
(),
project
=
plc
.
get_project_name
(),
block_name
=
plc
.
get_block_name
(),
hor_routes
=
hor_routes
,
ver_routes
=
ver_routes
,
hor_macro_alloc
=
hor_macro_alloc
,
ver_macro_alloc
=
ver_macro_alloc
,
smooth
=
smooth
,
overlap_threshold
=
plc
.
get_overlap_threshold
()))
info
+=
'
\n
'
+
make_blockage_text
(
plc
)
+
'
\n
'
info
+=
'
\n
Counts of node types:
\n
'
node_type_counts
=
get_node_type_counts
(
plc
)
for
node_type
in
sorted
(
node_type_counts
):
info
+=
'{:<15} : {:>9}
\n
'
.
format
(
node_type
+
's'
,
node_type_counts
[
node_type
])
if
user_comments
:
info
+=
'
\n
User comments:
\n
'
+
user_comments
+
'
\n
'
info
+=
'
\n
node_index x y orientation fixed'
# print(info)
return
plc
.
save_placement
(
filename
,
info
)
# TODO: plc.optimize_stdcells
def
fd_placement_schedule
(
plc
:
plc_client
.
PlacementCost
,
num_steps
:
Tuple
[
int
,
...
]
=
(
100
,
100
,
100
),
io_factor
:
float
=
1.0
,
move_distance_factors
:
Tuple
[
float
,
...
]
=
(
1.0
,
1.0
,
1.0
),
attract_factor
:
Tuple
[
float
,
...
]
=
(
100.0
,
1.0e-3
,
1.0e-5
),
repel_factor
:
Tuple
[
float
,
...
]
=
(
0.0
,
1.0e6
,
1.0e7
),
use_current_loc
:
bool
=
False
,
move_macros
:
bool
=
False
)
->
None
:
"""A placement schedule that uses force directed method.
Args:
plc: The plc object.
num_steps: Number of steps of the force-directed algorithm during each call.
io_factor: I/O attract factor.
move_distance_factors: Maximum distance relative to canvas size that a node
can move in a single step of the force-directed algorithm.
attract_factor: The spring constants between two connected nodes in the
force-directed algorithm. The FD algorithm will be called size of this
list times. Make sure that the size of fd_repel_factor has the same size.
repel_factor: The repellent factor for spreading the nodes to avoid
congestion in the force-directed algorithm.'
use_current_loc: If true, use the current location as the initial location.
move_macros: If true, also move the macros.
"""
assert
len
(
num_steps
)
==
len
(
move_distance_factors
)
assert
len
(
num_steps
)
==
len
(
repel_factor
)
assert
len
(
num_steps
)
==
len
(
attract_factor
)
canvas_size
=
max
(
plc
.
get_canvas_width_height
())
max_move_distance
=
[
f
*
canvas_size
/
s
for
s
,
f
in
zip
(
num_steps
,
move_distance_factors
)
]
move_stdcells
=
True
log_scale_conns
=
False
use_sizes
=
False
plc
.
optimize_stdcells
(
use_current_loc
,
move_stdcells
,
move_macros
,
log_scale_conns
,
use_sizes
,
io_factor
,
num_steps
,
max_move_distance
,
attract_factor
,
repel_factor
)
# not tested
def
get_ordered_node_indices
(
mode
:
str
,
plc
:
plc_client
.
PlacementCost
,
exclude_fixed_nodes
:
bool
=
True
)
->
List
[
int
]:
"""Returns an ordering of node indices according to the specified mode.
Args:
mode: node ordering mode
plc: placement cost object
exclude_fixed_nodes: Whether fixed nodes should be excluded.
Returns:
Node indices sorted according to the mode.
"""
macro_indices
=
plc
.
get_macro_indices
()
hard_macro_indices
=
[
m
for
m
in
macro_indices
if
not
plc
.
is_node_soft_macro
(
m
)
]
soft_macro_indices
=
[
m
for
m
in
macro_indices
if
plc
.
is_node_soft_macro
(
m
)]
def
macro_area
(
idx
):
w
,
h
=
plc
.
get_node_width_height
(
idx
)
return
w
*
h
if
mode
==
'descending_size_macro_first'
:
ordered_indices
=
(
sorted
(
hard_macro_indices
,
key
=
macro_area
)[::
-
1
]
+
sorted
(
soft_macro_indices
,
key
=
macro_area
)[::
-
1
])
elif
mode
==
'random'
:
np
.
random
.
shuffle
(
macro_indices
)
ordered_indices
=
macro_indices
elif
mode
==
'random_macro_first'
:
np
.
random
.
shuffle
(
hard_macro_indices
)
ordered_indices
=
hard_macro_indices
+
soft_macro_indices
else
:
raise
ValueError
(
'{} is an unsupported node placement mode.'
.
format
(
mode
))
if
exclude_fixed_nodes
:
ordered_indices
=
[
m
for
m
in
ordered_indices
if
not
plc
.
is_node_fixed
(
m
)]
return
ordered_indices
# done
def
extract_parameters_from_comments
(
filename
:
str
)
->
Tuple
[
float
,
float
,
int
,
int
]:
"""Parses the file's comments section, tries to extract canvas/grid sizes.
Args:
filename: protobuf file or a plc file.
Returns:
Tuple of canvas_width, canvas_height, grid_cols, grid_rows
"""
filename0
=
filename
.
split
(
','
)[
0
]
canvas_width
,
canvas_height
=
None
,
None
grid_cols
,
grid_rows
=
None
,
None
with
open
(
filename0
,
'r'
)
as
infile
:
for
line
in
infile
:
if
line
.
startswith
(
'#'
):
fp_re
=
re
.
search
(
r'FP bbox: \{([\d\.]+) ([\d\.]+)\} \{([\d\.]+) ([\d\.]+)\}'
,
line
)
if
fp_re
:
# NOTE: first two argument contains origin coord but not used
canvas_width
=
float
(
fp_re
.
group
(
3
))
canvas_height
=
float
(
fp_re
.
group
(
4
))
continue
plc_wh
=
re
.
search
(
r'Width : ([\d\.]+) Height : ([\d\.]+)'
,
line
)
if
plc_wh
:
canvas_width
=
float
(
plc_wh
.
group
(
1
))
canvas_height
=
float
(
plc_wh
.
group
(
2
))
continue
plc_cr
=
re
.
search
(
r'Columns : ([\d]+) Rows : ([\d]+)'
,
line
)
if
plc_cr
:
grid_cols
=
int
(
plc_cr
.
group
(
1
))
grid_rows
=
int
(
plc_cr
.
group
(
2
))
else
:
# Do not parse the rest of the file, since all the comments are at the
# top.
break
return
canvas_width
,
canvas_height
,
grid_cols
,
grid_rows
# done
def
get_routing_resources
()
->
Dict
[
str
,
float
]:
"""Currently we only use default parameter settings.
In the future, for specific project, the resources may need to be tuned.
Returns:
Routing resources.
"""
return
{
'horizontal_routes_per_micron'
:
57.031
,
'vertical_routes_per_micron'
:
56.818
,
'macro_horizontal_routing_allocation'
:
39.583
,
'macro_vertical_routing_allocation'
:
30.303
,
}
# done
def
nodes_of_types
(
plc
:
plc_client
.
PlacementCost
,
type_list
:
List
[
str
]):
"""Yields the index of a node of certain types."""
i
=
0
while
True
:
node_type
=
plc
.
get_node_type
(
i
)
if
not
node_type
:
break
if
node_type
in
type_list
:
yield
i
i
+=
1
# done
def
num_nodes_of_type
(
plc
,
node_type
):
"""Returns number of node of a particular type."""
count
=
0
for
_
in
nodes_of_types
(
plc
,
[
node_type
]):
count
+=
1
return
count
# not tested
def
extract_blockages_from_tcl
(
filename
:
str
,
block_name
:
str
,
canvas_width
:
float
,
canvas_height
:
float
,
is_rectilinear
:
bool
=
False
):
"""Reads blockage information from a given tcl file."""
# Assumptions: project is viperlite or viperfish.
# This is not a TCL parser, it just reads in a line of the format:
# dict set ::clockstrap <block name> <blockage index> <corner> <float number>
# corner is expected to be one of lly, ury.
blockage_info
=
dict
()
try
:
with
open
(
filename
,
'r'
)
as
infile
:
for
line
in
infile
:
if
line
.
startswith
(
'dict set ::clockstrap '
):
block
,
index
,
corner
,
value
=
line
.
split
()[
3
:
7
]
if
block
!=
block_name
:
continue
blockage_info
[
corner
+
index
]
=
float
(
value
)
except
gfile
.
FileError
:
logging
.
error
(
'could not read file
%
s'
,
filename
)
return
[]
blockages
=
[]
if
is_rectilinear
:
# Use blockage to model rectilinear floorplan.
index
=
0
while
(
'llx'
+
str
(
index
)
in
blockage_info
and
'lly'
+
str
(
index
)
in
blockage_info
and
'urx'
+
str
(
index
)
in
blockage_info
and
'ury'
+
str
(
index
)
in
blockage_info
):
minx
=
blockage_info
[
'llx'
+
str
(
index
)]
maxx
=
blockage_info
[
'urx'
+
str
(
index
)]
miny
=
blockage_info
[
'lly'
+
str
(
index
)]
maxy
=
blockage_info
[
'ury'
+
str
(
index
)]
if
minx
<
0
:
raise
ValueError
(
f
'Illegal blockage at index {index}: llx {minx} < 0'
)
if
maxx
>
canvas_width
:
raise
ValueError
(
f
'Illegal blockage at index {index}: urx {maxx} > canvas '
f
'width {canvas_width}'
)
if
miny
<
0
:
raise
ValueError
(
f
'Illegal blockage at index {index}: lly {miny} < 0'
)
if
maxy
>
canvas_height
:
raise
ValueError
(
f
'Illegal blockage at index {index}: ury {maxy} > canvas '
f
'height {canvas_height}'
)
blockages
.
append
([
minx
,
miny
,
maxx
,
maxy
,
1
])
index
+=
1
else
:
# Fully horizontal or vertical blockage.
# Horizontal straps.
index
=
0
while
'lly'
+
str
(
index
)
in
blockage_info
and
'ury'
+
str
(
index
)
in
blockage_info
:
minx
=
0.0
maxx
=
canvas_width
miny
=
blockage_info
[
'lly'
+
str
(
index
)]
maxy
=
blockage_info
[
'ury'
+
str
(
index
)]
blockages
.
append
([
minx
,
miny
,
maxx
,
maxy
,
1
])
index
+=
1
# We don't have any vertical straps, now. Should we still support it?
# Vertical straps.
index
=
0
while
'llx'
+
str
(
index
)
in
blockage_info
and
'urx'
+
str
(
index
)
in
blockage_info
:
minx
=
blockage_info
[
'llx'
+
str
(
index
)]
maxx
=
blockage_info
[
'urx'
+
str
(
index
)]
miny
=
0.0
maxy
=
canvas_height
blockages
.
append
([
minx
,
miny
,
maxx
,
maxy
,
1
])
index
+=
1
return
blockages
# done
def
get_ascii_picture
(
vect
:
List
[
Any
],
cols
:
int
,
rows
:
int
,
scale
:
float
=
10
)
->
str
:
"""Returns an ascii picture for the input as a human readable matrix."""
ret_str
=
' '
for
c
in
range
(
cols
):
ret_str
+=
'|'
+
str
(
int
(
c
/
10
)
%
10
)
ret_str
+=
'|
\n
'
for
c
in
range
(
cols
):
ret_str
+=
'|'
+
str
(
c
%
10
)
ret_str
+=
'|
\n
-'
+
'-'
*
2
*
cols
+
'
\n
'
for
r
in
range
(
rows
-
1
,
-
1
,
-
1
):
ret_str
+=
format
(
'
%3
d'
%
r
)
for
c
in
range
(
cols
):
mindex
=
r
*
cols
+
c
val
=
int
(
scale
*
vect
[
mindex
])
if
val
>
scale
:
ret_str
+=
'|!'
elif
val
==
scale
:
ret_str
+=
'|#'
elif
val
==
0
:
ret_str
+=
'| '
else
:
ret_str
+=
'|'
+
str
(
val
)
ret_str
+=
'|
\n
'
ret_str
+=
' -'
+
'-'
*
2
*
cols
+
'
\n
'
return
ret_str
# done
def
get_hard_macro_density_map
(
plc
:
plc_client
.
PlacementCost
)
->
List
[
float
]:
"""Returns the placement density map for hard macros only."""
# Unplaces all standard cells and soft macros, so that grid cell density
# only contains hard macros.
placements_to_restore
=
dict
()
for
node_index
in
nodes_of_types
(
plc
,
[
'STDCELL'
]):
if
plc
.
is_node_placed
(
node_index
):
placements_to_restore
[
node_index
]
=
plc
.
get_node_location
(
node_index
)
plc
.
unplace_node
(
node_index
)
for
node_index
in
nodes_of_types
(
plc
,
[
'MACRO'
]):
if
plc
.
is_node_soft_macro
(
node_index
)
and
plc
.
is_node_placed
(
node_index
):
placements_to_restore
[
node_index
]
=
plc
.
get_node_location
(
node_index
)
plc
.
unplace_node
(
node_index
)
hard_macro_density
=
plc
.
get_grid_cells_density
()
check_boundary
=
plc
.
get_canvas_boundary_check
()
# Restores placements, but original placement may be illegal (outside canvas
# area), ignore those cases.
plc
.
set_canvas_boundary_check
(
False
)
for
node_index
,
coords
in
placements_to_restore
.
items
():
plc
.
update_node_coords
(
node_index
,
coords
[
0
],
coords
[
1
])
plc
.
set_canvas_boundary_check
(
check_boundary
)
return
hard_macro_density
# done
def
save_placement_with_info
(
plc
:
plc_client
.
PlacementCost
,
filename
:
str
,
user_comments
:
str
=
''
)
->
None
:
"""Saves the placement file with some information in the comments section."""
cols
,
rows
=
plc
.
get_grid_num_columns_rows
()
width
,
height
=
plc
.
get_canvas_width_height
()
hor_routes
,
ver_routes
=
plc
.
get_routes_per_micron
()
hor_macro_alloc
,
ver_macro_alloc
=
plc
.
get_macro_routing_allocation
()
smooth
=
plc
.
get_congestion_smooth_range
()
init_placement_config
=
''
# Do not change the format of the comments section before updating
# extract_parameters_from_comments and extract_netlist_file_from_comments
# functions.
info
=
textwrap
.
dedent
(
"""
\
Placement file for Circuit Training
Source input file(s) : {src_filename}
This file : {filename}
Original initial placement : {init_placement_config}
Date : {date}
Columns : {cols} Rows : {rows}
Width : {width:.3f} Height : {height:.3f}
Area (stdcell+macros) : {area}
Wirelength : {wl:.3f}
Wirelength cost : {wlc:.4f}
Congestion cost : {cong:.4f}
Density cost : {density:.4f}
Fake net cost : {fake_net:.4f}
90
%
Congestion metric: {cong90}
Project : {project}
Block : {block_name}
Routes per micron, hor : {hor_routes:.3f} ver : {ver_routes:.3f}
Routes used by macros, hor : {hor_macro_alloc:.3f} ver : {ver_macro_alloc:.3f}
Smoothing factor : {smooth}
Use incremental cost : {incr_cost}
To view this file (most options are default):
viewer_binary
\
--netlist_file {src_filename}
\
--canvas_width {width} --canvas_height {height}
\
--grid_cols {cols} --grid_rows={rows}
\
--init_placement {filename}
\
--project {project}
\
--block_name {block_name}
\
--congestion_smooth_range {smooth}
\
--overlap_threshold {overlap_threshold}
\
--noboundary_check
or you can simply run:
viewer_binary
\
--init_placement {filename}
"""
.
format
(
src_filename
=
plc
.
get_source_filename
(),
filename
=
filename
,
init_placement_config
=
init_placement_config
,
date
=
datetime
.
datetime
.
now
()
.
strftime
(
'
%
Y-
%
m-
%
d
%
H:
%
M:
%
S'
),
cols
=
cols
,
rows
=
rows
,
width
=
width
,
height
=
height
,
area
=
plc
.
get_area
(),
wl
=
plc
.
get_wirelength
(),
wlc
=
plc
.
get_cost
(),
cong
=
plc
.
get_congestion_cost
(),
cong90
=
plc
.
get_congestion_cost_threshold
(
0.9
),
density
=
plc
.
get_density_cost
(),
fake_net
=
plc
.
get_fake_net_cost
(),
project
=
plc
.
get_project_name
(),
block_name
=
plc
.
get_block_name
(),
hor_routes
=
hor_routes
,
ver_routes
=
ver_routes
,
hor_macro_alloc
=
hor_macro_alloc
,
ver_macro_alloc
=
ver_macro_alloc
,
smooth
=
smooth
,
incr_cost
=
plc
.
get_use_incremental_cost
(),
overlap_threshold
=
plc
.
get_overlap_threshold
()))
info
+=
'
\n
'
+
make_blockage_text
(
plc
)
+
'
\n
'
info
+=
'
\n
Counts of node types:
\n
'
node_type_counts
=
get_node_type_counts
(
plc
)
for
node_type
in
sorted
(
node_type_counts
):
info
+=
'{:<15} : {:>9}
\n
'
.
format
(
node_type
+
's'
,
node_type_counts
[
node_type
])
info
+=
'
\n
Hard Macro Placements:
\n
'
info
+=
get_ascii_picture
(
get_hard_macro_density_map
(
plc
),
cols
,
rows
)
info
+=
'
\n
Overall Placement Density:
\n
'
info
+=
get_ascii_picture
(
plc
.
get_grid_cells_density
(),
cols
,
rows
)
info
+=
'
\n
Horizontal Routing Congestion:
\n
'
info
+=
get_ascii_picture
(
plc
.
get_horizontal_routing_congestion
(),
cols
,
rows
)
info
+=
'
\n
Vertical Routing Congestion:
\n
'
info
+=
get_ascii_picture
(
plc
.
get_vertical_routing_congestion
(),
cols
,
rows
)
if
user_comments
:
info
+=
'
\n
User comments:
\n
'
+
user_comments
+
'
\n
'
info
+=
'
\n
node_index x y orientation fixed'
return
plc
.
save_placement
(
filename
,
info
)
# done
def
create_placement_cost_using_common_arguments
(
plc_client
:
None
,
netlist_file
:
str
,
init_placement
:
Optional
[
str
]
=
None
,
canvas_width
:
Optional
[
float
]
=
None
,
canvas_height
:
Optional
[
float
]
=
None
,
grid_cols
:
Optional
[
int
]
=
None
,
grid_rows
:
Optional
[
int
]
=
None
,
project
:
Optional
[
str
]
=
None
,
block_name
:
Optional
[
str
]
=
None
,
congestion_smooth_range
:
Optional
[
int
]
=
None
,
overlap_threshold
:
Optional
[
float
]
=
None
,
use_incremental_cost
:
Optional
[
bool
]
=
None
,
boundary_check
:
Optional
[
bool
]
=
None
,
blockages
:
Optional
[
List
[
List
[
float
]]]
=
None
,
fix_ports
:
Optional
[
bool
]
=
True
)
->
plc_client
.
PlacementCost
:
"""Creates a placement_cost object using the common arguments."""
if
not
project
:
logging
.
info
(
'Reading project name from file.'
)
project
=
extract_attribute_from_comments
(
'Project'
,
[
init_placement
,
netlist_file
])
if
init_placement
and
not
block_name
:
logging
.
info
(
'Reading block name from file.'
)
block_name
=
extract_attribute_from_comments
(
'Block'
,
[
init_placement
,
netlist_file
])
if
not
block_name
:
logging
.
warning
(
'block_name is not set. Please add the block_name in:
\n
%
s'
,
init_placement
)
plc
=
plc_client
.
PlacementCost
(
netlist_file
)
# Create blockages.
if
blockages
is
None
:
# Try to read blockages from input files. To avoid file I/O, pass blockages,
# or an empty list if there are none.
logging
.
info
(
'Reading blockages from file.'
)
for
filename
in
[
netlist_file
,
init_placement
]:
if
filename
is
None
:
continue
blockages
=
get_blockages_from_comments
([
filename
])
# Only read blockages from one file.
if
blockages
:
break
if
blockages
:
for
blockage
in
blockages
:
plc
.
create_blockage
(
*
blockage
)
# Give precedence to command line parameters for canvas/grid sizes.
canvas_size_set
=
False
if
canvas_width
and
canvas_height
:
plc
.
set_canvas_size
(
canvas_width
,
canvas_height
)
canvas_size_set
=
True
grid_size_set
=
False
if
grid_cols
and
grid_rows
:
grid_size_set
=
True
plc
.
set_placement_grid
(
grid_cols
,
grid_rows
)
# Extract and set canvas, grid sizes if they are not already set.
if
not
canvas_size_set
or
not
grid_size_set
:
logging
.
info
(
'Reading netlist sizes from file.'
)
for
filename
in
[
netlist_file
,
init_placement
]:
if
filename
is
None
:
continue
sizes
=
extract_parameters_from_comments
(
filename
)
canvas_width
,
canvas_height
,
grid_cols
,
grid_rows
=
sizes
if
canvas_width
and
canvas_height
and
not
canvas_size_set
:
plc
.
set_canvas_size
(
canvas_width
,
canvas_height
)
if
grid_cols
and
grid_rows
and
not
grid_size_set
:
plc
.
set_placement_grid
(
grid_cols
,
grid_rows
)
routing_resources
=
get_routing_resources
()
plc
.
set_project_name
(
project
or
'unset_project'
)
plc
.
set_block_name
(
block_name
or
'unset_block'
)
plc
.
set_routes_per_micron
(
routing_resources
[
'horizontal_routes_per_micron'
],
routing_resources
[
'vertical_routes_per_micron'
])
plc
.
set_macro_routing_allocation
(
routing_resources
[
'macro_horizontal_routing_allocation'
],
routing_resources
[
'macro_vertical_routing_allocation'
])
plc
.
set_congestion_smooth_range
(
congestion_smooth_range
)
plc
.
set_overlap_threshold
(
overlap_threshold
)
plc
.
set_canvas_boundary_check
(
boundary_check
)
# Set macros to initial locations.
if
init_placement
:
logging
.
info
(
'Reading init_placement from file
%
s'
,
init_placement
)
# I/O is forbidden in forked child processes.
# Reads init placement from file only if init_locations are not provided.
plc
.
restore_placement
(
init_placement
)
if
fix_ports
:
fix_port_coordinates
(
plc
)
plc
.
set_use_incremental_cost
(
use_incremental_cost
)
return
plc
# done, but need verify
def
get_node_locations
(
plc
:
plc_client
.
PlacementCost
)
->
Dict
[
int
,
int
]:
"""Returns all node grid locations (macros and stdcells) in a dict."""
node_locations
=
dict
()
for
i
in
nodes_of_types
(
plc
,
[
'MACRO'
,
'STDCELL'
]):
node_locations
[
i
]
=
plc
.
get_grid_cell_of_node
(
i
)
return
node_locations
def
get_node_ordering_by_size
(
plc
:
plc_client
.
PlacementCost
)
->
List
[
int
]:
"""Returns the list of nodes (macros and stdcells) ordered by area."""
node_areas
=
dict
()
for
i
in
nodes_of_types
(
plc
,
[
'MACRO'
,
'STDCELL'
]):
if
plc
.
is_node_fixed
(
i
):
continue
w
,
h
=
plc
.
get_node_width_height
(
i
)
node_areas
[
i
]
=
w
*
h
return
sorted
(
node_areas
,
key
=
node_areas
.
get
,
reverse
=
True
)
# not tested
def
grid_locations_near
(
plc
:
plc_client
.
PlacementCost
,
start_grid_index
:
int
)
->
Iterator
[
int
]:
"""Yields node indices closest to the start_grid_index."""
# Starting from the start_grid_index, it goes around the area from closest
# (manhattan distance) to the farthest. For example, if the start grid index
# is at 0, the order of the next grid cells will be like:
# 24
# 22 12 23
# 20 10 4 11 21
# 18 8 2 0 3 9 19
# 16 6 1 7 17
# 14 5 15
# 13
cols
,
rows
=
plc
.
get_grid_num_columns_rows
()
start_col
,
start_row
=
start_grid_index
%
cols
,
int
(
start_grid_index
/
cols
)
# TODO(mustafay): This may be improved, but it's not crucial now.
for
distance
in
range
(
cols
+
rows
):
for
row_offset
in
range
(
-
distance
,
distance
+
1
):
for
col_offset
in
range
(
-
distance
,
distance
+
1
):
if
abs
(
row_offset
)
+
abs
(
col_offset
)
!=
distance
:
continue
new_col
=
start_col
+
col_offset
new_row
=
start_row
+
row_offset
if
new_col
<
0
or
new_row
<
0
or
new_col
>=
cols
or
new_row
>=
rows
:
continue
yield
int
(
new_col
+
new_row
*
cols
)
# not tested
def
place_near
(
plc
:
plc_client
.
PlacementCost
,
node_index
:
int
,
location
:
int
)
->
bool
:
"""Places a node (legally) closest to the given location.
Args:
plc: placement_cost object.
node_index: index of a node.
location: target grid cell location. (row * num_cols + num_cols)
Returns:
True on success, False if this node was not placed on any grid legally.
"""
for
loc
in
grid_locations_near
(
plc
,
location
):
if
plc
.
can_place_node
(
node_index
,
loc
):
plc
.
place_node
(
node_index
,
loc
)
return
True
return
False
# not tested
def
disconnect_high_fanout_nets
(
plc
:
plc_client
.
PlacementCost
,
max_allowed_fanouts
:
int
=
500
)
->
None
:
high_fanout_nets
=
[]
for
i
in
nodes_of_types
(
plc
,
[
'PORT'
,
'STDCELL'
,
'MACRO_PIN'
]):
num_fanouts
=
len
(
plc
.
get_fan_outs_of_node
(
i
))
if
num_fanouts
>
max_allowed_fanouts
:
print
(
'Disconnecting node: {} with {} fanouts.'
.
format
(
plc
.
get_node_name
(
i
),
num_fanouts
))
high_fanout_nets
.
append
(
i
)
plc
.
disconnect_nets
(
high_fanout_nets
)
# not tested
def
legalize_placement
(
plc
:
plc_client
.
PlacementCost
)
->
bool
:
"""Places the nodes to legal positions snapping to grid cells."""
# Unplace all except i/o's.
fix_port_coordinates
(
plc
)
# First save each node's locations on the grid.
# Note that the orientations are not changed by this utility, we do not
# need saving/restoring existing orientations.
node_locations
=
get_node_locations
(
plc
)
previous_xy_coords
=
get_node_xy_coordinates
(
plc
)
total_macro_displacement
=
0
total_macros
=
0
plc
.
unplace_all_nodes
()
# Starting with the biggest, place them trying to be as close as possible
# to the original position.
ordered_nodes
=
get_node_ordering_by_size
(
plc
)
for
node
in
ordered_nodes
:
if
not
place_near
(
plc
,
node
,
node_locations
[
node
]):
print
(
'Could not place node'
)
return
False
if
node
in
previous_xy_coords
and
not
plc
.
is_node_soft_macro
(
node
):
x
,
y
=
plc
.
get_node_location
(
node
)
px
,
py
=
previous_xy_coords
[
node
]
print
(
'x/y displacement: dx = {}, dy = {}, macro: {}'
.
format
(
x
-
px
,
y
-
py
,
plc
.
get_node_name
(
node
)))
total_macro_displacement
+=
abs
(
x
-
px
)
+
abs
(
y
-
py
)
total_macros
+=
1
print
(
'Total macro displacement: {}, avg: {}'
.
format
(
total_macro_displacement
,
total_macro_displacement
/
total_macros
))
return
True
def
main
():
""" Run Command:
python3 -m Plc_client.placement_util_os
"""
test_netlist_dir
=
'./Plc_client/test/'
+
'ariane'
netlist_file
=
os
.
path
.
join
(
test_netlist_dir
,
'netlist.pb.txt'
)
init_placement
=
os
.
path
.
join
(
test_netlist_dir
,
'initial.plc'
)
plc
=
create_placement_cost
(
plc_client
=
plc_client
,
netlist_file
=
netlist_file
,
init_placement
=
init_placement
)
# plc = create_placement_cost_using_common_arguments(netlist_file=netlist_file, init_placement=init_placement,
# grid_cols=10, grid_rows=10, congestion_smooth_range=2.0, overlap_threshold=0.004, use_incremental_cost=False)
print
(
make_blockage_text
(
plc
))
# save_placement(plc, "save_test", 'this is a comment')
# plc.nodes_of_types()
# node_xy_coordinates
NODE_XY_DICT
=
{}
for
i
in
nodes_of_types
(
plc
,
[
'MACRO'
,
'macro'
,
'STDCELL'
,
'PORT'
]):
NODE_XY_DICT
[
i
]
=
(
100
,
100
)
restore_node_xy_coordinates
(
plc
,
NODE_XY_DICT
)
# print(get_node_xy_coordinates(plc))
# macro_orientation
MACRO_ORIENTATION
=
{}
for
i
in
nodes_of_types
(
plc
,
[
'MACRO'
,
'macro'
]):
MACRO_ORIENTATION
[
i
]
=
"S"
restore_macro_orientations
(
plc
,
MACRO_ORIENTATION
)
# print(get_macro_orientations(plc))
fix_port_coordinates
(
plc
)
# write out new plc
save_placement
(
plc
,
"save_test"
,
'this is a comment'
)
# needs more testing
print
(
get_node_locations
(
plc
))
# num_nodes_of_type
print
(
"num_nodes_of_type 'MACRO':"
,
num_nodes_of_type
(
plc
,
"MACRO"
))
# get_hard_macro_density_map
print
(
"get_hard_macro_density_map:
\n
"
,
get_hard_macro_density_map
(
plc
))
print
(
"get_hard_macro_density_map in ASCII:
\n
"
,
get_ascii_picture
(
get_hard_macro_density_map
(
plc
),
*
plc
.
get_grid_num_columns_rows
()))
print
()
if
__name__
==
'__main__'
:
main
()
\ No newline at end of file
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment