Open rockingdice opened 1 year ago
I'm using apple-clang on mac os. c++17
here's a screenshot of the crash moment:
This is the moment that freed the entry:
which will trigger the heap-use-after-free problem.
It's very likely the same problem as I mentioned:
possible related threads:
I've tried to modify this line:
data_.reserve(4);
to
data_.reserve(400);
And it won't lead to the issue anymore, so reallocating the vector is probably the problem. The code from:
class sparse_map {
using Entry = std::pair<KeyT, ValueT>;
using iterator = typename std::vector<Entry>::iterator;
public:
explicit sparse_map() : data_{} {
data_.reserve(4); <<<<<<<<<<<< this line
}
Hi, thanks for reporting! Do you happen to have a small(ish) testcase that reproduces the problem?
As I mentioned, the project is too big, and I may not have enough time to make one... but what I did is trivial:
I put four separate entities in the game, and they are moving towards the same goal so that they will get together eventually.
Keep relocating their positions in the multimap ph-tree in the update function for each frame:
When they are near enough, the problem will arise. You can modify the
data_.reserve(4);
to
data_.reserve(1);
It will happen as soon as two entities are in the same entry, which will definitely trigger the problem in my case.
To test the asan problem, you need to turn on this option in the Xcode:
I haven't managed to reproduce it. Could you do (some) of the following?
PhThreeMultimapBoxD
is that correct? Are you using any converters?Btw, running the unit tests with ASAN can be done with bazel test ... --config=asan
.
Could you try running the following test with ASAN and see weather it fails?
#include "phtree/phtree.h"
#include "phtree/phtree_multimap.h"
#include <include/gtest/gtest.h>
#include <random>
#include <vector>
using namespace improbable::phtree;
namespace phtree_test_issue_153 {
// Number of entries that have the same coordinate
static const size_t NUM_DUPL = 1;
static const double WORLD_MIN = -100;
static const double WORLD_MAX = 100;
static const double BOX_LEN = 1;
template <dimension_t DIM>
using TestKey = PhBoxD<DIM>;
class DoubleRng {
public:
DoubleRng(double minIncl, double maxExcl) : eng(), rnd{minIncl, maxExcl} {}
double next() {
return rnd(eng);
}
private:
std::default_random_engine eng;
std::uniform_real_distribution<double> rnd;
};
template <dimension_t DIM>
void generateCube(std::vector<TestKey<DIM>>& points, size_t N) {
assert(N % NUM_DUPL == 0);
DoubleRng rng(WORLD_MIN, WORLD_MAX);
auto reference_set = std::unordered_map<TestKey<DIM>, size_t>();
points.reserve(N);
for (size_t i = 0; i < N / NUM_DUPL; i++) {
// create duplicates, i.e. entries with the same coordinates. However, avoid unintentional
// duplicates.
TestKey<DIM> key{};
for (dimension_t d = 0; d < DIM; ++d) {
key.min()[d] = rng.next();
key.max()[d] = key.min()[d] + BOX_LEN;
}
if (reference_set.count(key) != 0) {
i--;
continue;
}
reference_set.emplace(key, i);
for (size_t dupl = 0; dupl < NUM_DUPL; dupl++) {
auto point = TestKey<DIM>(key);
points.push_back(point);
}
}
ASSERT_EQ(reference_set.size(), N / NUM_DUPL);
ASSERT_EQ(points.size(), N);
}
TEST(PhTreeTestIssue153, TestIssue153) {
/*
* This used to cause a heap-use-after-free problem in sparse_map
*/
auto points = std::vector<PhBoxD<2>>();
points.emplace_back(PhBoxD<2>{{1, 1}, {1, 1}});
points.emplace_back(PhBoxD<2>{{3, 3}, {3, 3}});
points.emplace_back(PhBoxD<2>{{1, 3}, {1, 3}});
points.emplace_back(PhBoxD<2>{{3, 1}, {3, 1}});
points.emplace_back(PhBoxD<2>{{0, 0}, {0, 0}});
// The real test is ABOVE, see "#define CALLBACK"
auto tree = PhTreeMultiMapBoxD<2, size_t>();
for (size_t i = 0; i < points.size(); ++i) {
tree.emplace(points[i], i);
}
PhBoxD<2> b{{2, 2}, {2, 2}};
for (size_t r = 0; r < 100; ++r) {
for (size_t i = 0; i < points.size(); ++i) {
PhBoxD<2>& bOld = points[i];
double m0 = (bOld.min()[0] + b.min()[0]) / 2;
double m1 = (bOld.min()[1] + b.min()[1]) / 2;
double m2 = (bOld.max()[0] + b.max()[0]) / 2;
double m3 = (bOld.max()[1] + b.max()[1]) / 2;
PhBoxD<2> bNew{{m0, m1}, {m2, m3}};
ASSERT_EQ(1, tree.relocate(bOld, bNew, i));
points[i] = bNew;
}
}
}
/*
* Try move in ever smaller steps.
*/
TEST(PhTreeTestIssue153, TestIssue153_2) {
int N = 10;
const dimension_t DIM = 2;
std::vector<TestKey<DIM>> points;
generateCube(points, N);
auto tree = PhTreeMultiMapBoxD<DIM, size_t>();
for (size_t i = 0; i < points.size(); ++i) {
tree.emplace(points[i], i);
}
TestKey<DIM> x{};
for (dimension_t d = 0; d < DIM; ++d) {
x.min()[d] = 2.;
x.max()[d] = 2.;
}
for (size_t r = 0; r < 100; ++r) {
for (size_t i = 0; i < points.size(); ++i) {
TestKey<DIM>& bOld = points[i];
TestKey<DIM> bNew{};
for (dimension_t d = 0; d < DIM; ++d) {
double m0 = (bOld.min()[d] + x.min()[d]) / 2;
double m1 = (bOld.max()[d] + x.max()[d]) / 2;
bNew.min()[d] = m0;
bNew.max()[d] = m1;
}
ASSERT_EQ(1, tree.relocate(bOld, bNew, i));
points[i] = bNew;
}
}
}
/*
* Try moving in very small (but const-sized) steps.
*/
TEST(PhTreeTestIssue153, TestIssue153_Linear) {
int N = 10;
const int R_MAX = 1000000;
const dimension_t DIM = 2;
std::vector<TestKey<DIM>> points;
generateCube(points, N);
TestKey<DIM> x{};
for (dimension_t d = 0; d < DIM; ++d) {
x.min()[d] = 2.;
x.max()[d] = 2. + BOX_LEN;
}
std::vector<PhPointD<DIM>> moves;
auto tree = PhTreeMultiMapBoxD<DIM, size_t>();
for (size_t i = 0; i < points.size(); ++i) {
tree.emplace(points[i], i);
// distances.emplace_back(distance(points[i], x));
PhPointD<DIM>& m = moves.emplace_back();
for (dimension_t d = 0; d < DIM; ++d) {
m[d] = (x.min()[d] - points[i].min()[d]) / R_MAX;
}
}
for (size_t r = 0; r < R_MAX; ++r) {
for (size_t i = 0; i < points.size(); ++i) {
TestKey<DIM>& bOld = points[i];
if (i == 0)
std::cout << "r=" << r << " i=" << i << " " << bOld << std::endl;
TestKey<DIM> bNew{};
PhPointD<DIM> mov = moves[i];
for (dimension_t d = 0; d < DIM; ++d) {
bNew.min()[d] = bOld.min()[d] + mov[d];
bNew.max()[d] = bOld.max()[d] + mov[d];
}
ASSERT_EQ(1, tree.relocate(bOld, bNew, i));
points[i] = bNew;
}
}
}
} // namespace phtree_test_issue_153
Sorry for the late reply. I've been swamped these days... I may try them when I'm free, but no promise on the time, sadly :(
@rockingdice No worries :-)
I cannot make a repro. It's a very big project. Dunno if the logs are helpful: