Commit f737ca45 by Bin Cheng

Record and restore postorder information in breaking alias sccs.

gcc/
	PR tree-optimization/95638
	* tree-loop-distribution.c (pg_edge_callback_data): New field.
	(loop_distribution::break_alias_scc_partitions): Record and restore
	postorder information.  Fix memory leak.

gcc/testsuite/
	PR tree-optimization/95638
	* g++.dg/tree-ssa/pr95638.C: New test.

(cherry picked from commit 2c0069fafb53ccb7a45a6815025dfcbd2882a36e)
parent e1d26d11
// PR tree-optimization/95638
// { dg-do run }
// { dg-options "-O2 -std=c++14" }
#include <cassert>
#include <cstdio>
#include <cstdint>
#include <memory>
#include <type_traits>
#include <utility>
template <typename Derived>
class intrusive_ref_counter
{
public:
intrusive_ref_counter() = default;
intrusive_ref_counter(intrusive_ref_counter const&) :
_counter(0)
{
}
friend void intrusive_ptr_add_ref(const intrusive_ref_counter<Derived>* p)
{
++p->_counter;
}
friend void intrusive_ptr_release(const intrusive_ref_counter<Derived>* p)
{
if (--p->_counter == 0)
{
delete static_cast<const Derived*>(p);
}
}
private:
mutable int _counter = 0;
};
template <typename T>
class intrusive_ptr
{
public:
intrusive_ptr() = default;
intrusive_ptr(T* p): px(p)
{
if (px != 0) intrusive_ptr_add_ref(px);
}
intrusive_ptr(intrusive_ptr const & rhs): px(rhs.px)
{
if (px != 0) intrusive_ptr_add_ref(px);
}
~intrusive_ptr()
{
if (px != 0) intrusive_ptr_release(px);
}
intrusive_ptr(intrusive_ptr && rhs) : px(rhs.px)
{
rhs.px = 0;
}
explicit operator bool() const
{
return px != 0;
}
private:
T* px = nullptr;
};
template <typename T, uint32_t MaxSize = 1>
class Storage
{
public:
Storage() = default;
Storage(Storage&& other)
{
for (int i = 0; i < other._size; ++i)
{
new (data() + i) T(std::move(other.data()[i]));
++_size;
}
}
~Storage()
{
for (int i = 0; i < _size; ++i)
{
data()[i].~T();
}
}
void push_back(const T& value)
{
assert(_size < MaxSize);
new (data() + _size) T(value);
++_size;
}
T& operator[](size_t idx)
{
assert(idx < _size);
return data()[idx];
}
private:
T* data()
{
return reinterpret_cast<T*>(&_data);
}
private:
uint32_t _size = 0;
std::aligned_storage_t<sizeof(T[MaxSize])> _data;
};
struct Item: intrusive_ref_counter<Item>
{
};
using Collection = Storage<intrusive_ptr<Item>>;
struct Holder
{
__attribute__ ((noinline)) Holder(Collection collection) : collection(std::move(collection)) {}
int64_t dummy = 0; // this is needed to reproduce the issue.
Collection collection;
};
int main()
{
Collection collection;
collection.push_back(intrusive_ptr<Item>(new Item()));
Holder holder(std::move(collection));
auto item = holder.collection[0];
if (!item)
__builtin_abort ();
return 0;
}
......@@ -2145,6 +2145,8 @@ struct pg_edge_callback_data
bitmap sccs_to_merge;
/* Array constains component information for all vertices. */
int *vertices_component;
/* Array constains postorder information for all vertices. */
int *vertices_post;
/* Vector to record all data dependence relations which are needed
to break strong connected components by runtime alias checks. */
vec<ddr_p> *alias_ddrs;
......@@ -2401,7 +2403,7 @@ loop_distribution::break_alias_scc_partitions (struct graph *rdg,
vec<struct partition *> *partitions,
vec<ddr_p> *alias_ddrs)
{
int i, j, k, num_sccs, num_sccs_no_alias;
int i, j, k, num_sccs, num_sccs_no_alias = 0;
/* Build partition dependence graph. */
graph *pg = build_partition_graph (rdg, partitions, false);
......@@ -2452,6 +2454,7 @@ loop_distribution::break_alias_scc_partitions (struct graph *rdg,
cbdata.sccs_to_merge = sccs_to_merge;
cbdata.alias_ddrs = alias_ddrs;
cbdata.vertices_component = XNEWVEC (int, pg->n_vertices);
cbdata.vertices_post = XNEWVEC (int, pg->n_vertices);
/* Record the component information which will be corrupted by next
graph scc finding call. */
for (i = 0; i < pg->n_vertices; ++i)
......@@ -2460,6 +2463,11 @@ loop_distribution::break_alias_scc_partitions (struct graph *rdg,
/* Collect data dependences for runtime alias checks to break SCCs. */
if (bitmap_count_bits (sccs_to_merge) != (unsigned) num_sccs)
{
/* Record the postorder information which will be corrupted by next
graph SCC finding call. */
for (i = 0; i < pg->n_vertices; ++i)
cbdata.vertices_post[i] = pg->vertices[i].post;
/* Run SCC finding algorithm again, with alias dependence edges
skipped. This is to topologically sort partitions according to
compilation time known dependence. Note the topological order
......@@ -2490,11 +2498,6 @@ loop_distribution::break_alias_scc_partitions (struct graph *rdg,
if (cbdata.vertices_component[k] != i)
continue;
/* Update to the minimal postordeer number of vertices in scc so
that merged partition is sorted correctly against others. */
if (pg->vertices[j].post > pg->vertices[k].post)
pg->vertices[j].post = pg->vertices[k].post;
partition_merge_into (NULL, first, partition, FUSE_SAME_SCC);
(*partitions)[k] = NULL;
partition_free (partition);
......@@ -2505,6 +2508,14 @@ loop_distribution::break_alias_scc_partitions (struct graph *rdg,
first->type = PTYPE_SEQUENTIAL;
}
}
/* Restore the postorder information if it's corrupted in finding SCC
with alias dependence edges skipped. */
if (num_sccs_no_alias > 0)
for (i = 0; i < pg->n_vertices; ++i)
pg->vertices[i].post = cbdata.vertices_post[i];
free (cbdata.vertices_component);
free (cbdata.vertices_post);
}
sort_partitions_by_post_order (pg, partitions);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment