Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support running DP in circuit training #93

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions dreamplace/Params.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,3 +169,70 @@ def solution_file_suffix(self):
return "def"
else: # Bookshelf
return "pl"

def get_bin_size(width, height, num_bins=128*128):
"""
@brief find the power two bin size closest to the canvas ratio.
"""
num_bins_x = math.sqrt(num_bins * width / height)
num_bins_x = int(math.pow(2, round(math.log(num_bins_x) / math.log(2))))
num_bins_x = max(min(num_bins_x, num_bins), 1) # constrain num_bins_x between 1 and num_bins
num_bins_y = int(num_bins / num_bins_x)

return num_bins_x, num_bins_y

def get_dreamplace_params(
iteration,
target_density,
learning_rate,
canvas_width=None,
canvas_height=None,
num_bins_x=None,
num_bins_y=None,
gpu=False,
result_dir='results',
legalize_flag=False,
stop_overflow=0.1,
routability_opt_flag=False):
""""
@brief return the parameters to config Dreamplace in circuit training.
"""
params = Params()
params.circuit_training_mode = True

# set number of bins
if num_bins_x and num_bins_y:
params.num_bins_x = num_bins_x
params.num_bins_y = num_bins_y
elif canvas_width and canvas_height:
# extract #bins from canvas info
params.num_bins_x, params.num_bins_y = get_bin_size(canvas_width,
canvas_height)
else:
num_bins_x = 128
num_bins_y = 128

params.global_place_stages = [{
'num_bins_x': params.num_bins_x,
'num_bins_y': params.num_bins_y,
'iteration': iteration,
'learning_rate': learning_rate,
'wirelength': 'weighted_average',
'optimizer': 'nesterov',
}]
params.legalize_flag = legalize_flag
params.detailed_place_flag = False
params.target_density = target_density
params.density_weight = 8e-5
params.gpu = gpu
params.result_dir = result_dir
params.stop_overflow = stop_overflow

# disable regioning
params.regioning = False

# routability related flag
params.routability_opt_flag = routability_opt_flag
params.adjust_nctugr_area_flag = False

return params
64 changes: 36 additions & 28 deletions dreamplace/PlaceDB.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,8 @@ def __init__(self):
self.bin_size_y = None
self.num_bins_x = None
self.num_bins_y = None

self.num_non_movable_macros = None

self.num_movable_pins = None

Expand Down Expand Up @@ -439,25 +441,26 @@ def print_row(self, row_id):
"""
logging.debug("row %d %s" % (row_id, self.rows[row_id]))

#def flatten_nested_map(self, net2pin_map):
# """
# @brief flatten an array of array to two arrays like CSV format
# @param net2pin_map array of array
# @return a pair of (elements, cumulative column indices of the beginning element of each row)
# """
# # flat netpin map, length of #pins
# flat_net2pin_map = np.zeros(len(pin2net_map), dtype=np.int32)
# # starting index in netpin map for each net, length of #nets+1, the last entry is #pins
# flat_net2pin_start_map = np.zeros(len(net2pin_map)+1, dtype=np.int32)
# count = 0
# for i in range(len(net2pin_map)):
# flat_net2pin_map[count:count+len(net2pin_map[i])] = net2pin_map[i]
# flat_net2pin_start_map[i] = count
# count += len(net2pin_map[i])
# assert flat_net2pin_map[-1] != 0
# flat_net2pin_start_map[len(net2pin_map)] = len(pin2net_map)

# return flat_net2pin_map, flat_net2pin_start_map
## add this func for circuit training.
def flatten_nested_map(self, pin2net_map, net2pin_map):
"""
@brief flatten an array of array to two arrays like CSV format
@param net2pin_map array of array
@return a pair of (elements, cumulative column indices of the beginning element of each row)
"""
# flat netpin map, length of #pins
flat_net2pin_map = np.zeros(len(pin2net_map), dtype=np.int32)
# starting index in netpin map for each net, length of #nets+1, the last entry is #pins
flat_net2pin_start_map = np.zeros(len(net2pin_map)+1, dtype=np.int32)
count = 0
for i in range(len(net2pin_map)):
flat_net2pin_map[count:count+len(net2pin_map[i])] = net2pin_map[i]
flat_net2pin_start_map[i] = count
count += len(net2pin_map[i])
assert flat_net2pin_map[-1] != 0
flat_net2pin_start_map[len(net2pin_map)] = len(pin2net_map)

return flat_net2pin_map, flat_net2pin_start_map

def read(self, params):
"""
Expand Down Expand Up @@ -582,7 +585,9 @@ def __call__(self, params):
"""
tt = time.time()

self.read(params)
# circuit training will not read local design.
if not params.circuit_training_mode:
self.read(params)
self.initialize(params)

logging.info("reading benchmark takes %g seconds" % (time.time()-tt))
Expand Down Expand Up @@ -679,14 +684,17 @@ def initialize(self, params):

# shift and scale
# adjust shift_factor and scale_factor if not set
params.shift_factor[0] = self.xl
params.shift_factor[1] = self.yl
logging.info("set shift_factor = (%g, %g), as original row bbox = (%g, %g, %g, %g)"
% (params.shift_factor[0], params.shift_factor[1], self.xl, self.yl, self.xh, self.yh))
if params.scale_factor == 0.0 or self.site_width != 1.0:
params.scale_factor = 1.0 / self.site_width
logging.info("set scale_factor = %g, as site_width = %g" % (params.scale_factor, self.site_width))
self.scale(params.shift_factor, params.scale_factor)
if not params.circuit_training_mode:
params.shift_factor[0] = self.xl
params.shift_factor[1] = self.yl
logging.info("set shift_factor = (%g, %g), as original row bbox = (%g, %g, %g, %g)"
% (params.shift_factor[0], params.shift_factor[1], self.xl, self.yl, self.xh, self.yh))
if params.scale_factor == 0.0 or self.site_width != 1.0:
params.scale_factor = 1.0 / self.site_width
logging.info("set scale_factor = %g, as site_width = %g" % (params.scale_factor, self.site_width))
self.scale(params.shift_factor, params.scale_factor)
else:
params.scale_factor = 1.0

content = """
================================= Benchmark Statistics =================================
Expand Down
4 changes: 4 additions & 0 deletions dreamplace/params.json
Original file line number Diff line number Diff line change
Expand Up @@ -219,5 +219,9 @@
"deterministic_flag" : {
"descripton" : "whether require run-to-run determinism, may have efficiency overhead",
"default" : 0
},
"circuit_training_mode" : {
"descripton" : "whether use Dreamplace for circuit training",
"default" : 0
}
}