Skip to content

Commit

Permalink
Various Fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
T3C42 committed Dec 7, 2024
1 parent 8ab7428 commit d7a91ab
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 8 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/branch_ci.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
name: Mt-KaHyPar Fast CI

on:
push:
branches-ignore:
- master
#on:
# push:
# branches-ignore:
# - master

jobs:
mt_kahypar_compiler_version_test:
Expand Down
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ option(KAHYPAR_ENABLE_STEINER_TREE_METRIC
"Enables the Steiner tree metric. Can be turned off for faster compilation." OFF)

option(KAHYPAR_PYTHON
"Build the Python interface. Can be turned off in case Python is not available." ON)
"Build the Python interface. Can be turned off in case Python is not available." OFF)

option(MT_KAHYPAR_DISABLE_BOOST
"Whether to exclude components requiring Boost::program_options. Will result in no binary target and the C and Python interface not being able to load configuration files." OFF)
Expand Down
6 changes: 6 additions & 0 deletions mt-kahypar/dynamic/strategies/localFM.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,12 @@ namespace mt_kahypar::dyn {
//use local_fm to refine partitioned_hypergraph_s
void local_fm(ds::StaticHypergraph& hypergraph, Context& context, const HypernodeID& hn) {

//TODO
//context.refinement.fm.algorithm = FMAlgorithm::kway_fm;

//TODO
//context.refinement.fm.multitry_rounds = 1;

//GainCachePtr::deleteGainCache(_gain_cache);
//TODO maybe
GainCachePtr::resetGainCache(_gain_cache);
Expand Down
11 changes: 8 additions & 3 deletions mt-kahypar/dynamic/strategies/localFM_factor.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ namespace mt_kahypar::dyn {
gain_cache_t _gain_cache;
std::unique_ptr<IRefiner> _fm;
std::unique_ptr<IRebalancer> _rebalancer;
parallel::scalable_vector<HypernodeID> nodes_to_partition;

void repartition(ds::StaticHypergraph& hypergraph_s, Context& context) {
std::cout << "Repartitioning" << std::endl;
Expand All @@ -32,7 +33,7 @@ namespace mt_kahypar::dyn {
}

//use local_fm to refine partitioned_hypergraph_s
void local_fm(ds::StaticHypergraph& hypergraph, Context& context, const HypernodeID& hn) {
void local_fm(ds::StaticHypergraph& hypergraph, Context& context) {

//GainCachePtr::deleteGainCache(_gain_cache);
//TODO maybe
Expand All @@ -47,11 +48,13 @@ namespace mt_kahypar::dyn {
Metrics best_Metrics = {mt_kahypar::metrics::quality(*partitioned_hypergraph_s, Objective::km1),
mt_kahypar::metrics::imbalance(*partitioned_hypergraph_s, context)};

_fm->refine(partitioned_hypergraph, {hn}, best_Metrics, std::numeric_limits<double>::max());
_fm->refine(partitioned_hypergraph, nodes_to_partition, best_Metrics, std::numeric_limits<double>::max());
}

PartitionID add_node_to_partitioned_hypergraph(ds::StaticHypergraph& hypergraph, Context& context, const HypernodeID& hn) {

nodes_to_partition.push_back(hn);

//compute for each block the number of nodes it is connected to
std::vector<std::tuple<int,int>> block_connectivities(context.partition.k, std::make_tuple(0,0));
for ( PartitionID p = 0; p < context.partition.k; ++p ) {
Expand Down Expand Up @@ -81,6 +84,7 @@ namespace mt_kahypar::dyn {

//on first call, initialize partitioned_hypergraph_s
if (!partitioned_hypergraph_s) {
nodes_to_partition = parallel::scalable_vector<HypernodeID>(changes_size);
repartition(hypergraph, context);
}

Expand Down Expand Up @@ -110,7 +114,8 @@ namespace mt_kahypar::dyn {
if (skipped_changes >= step_size) {
skipped_changes = 0;
step_size *= 2;
local_fm(hypergraph, context, hn);
local_fm(hypergraph, context);
nodes_to_partition = parallel::scalable_vector<HypernodeID>(changes_size);
} else {
skipped_changes++;
}
Expand Down

0 comments on commit d7a91ab

Please sign in to comment.