8198423: Improve metaspace chunk allocation

Reviewed-by: goetz, coleenp
This commit is contained in:
Thomas Stuefe 2018-03-06 19:24:13 +01:00
parent a06129a432
commit 1b402fdb6d
8 changed files with 1530 additions and 519 deletions

View File

@ -48,9 +48,14 @@ size_t Metachunk::overhead() {
// Metachunk methods
Metachunk::Metachunk(size_t word_size,
Metachunk::Metachunk(ChunkIndex chunktype, bool is_class, size_t word_size,
VirtualSpaceNode* container)
: Metabase<Metachunk>(word_size),
_chunk_type(chunktype),
_is_class(is_class),
_sentinel(CHUNK_SENTINEL),
_origin(origin_normal),
_use_count(0),
_top(NULL),
_container(container)
{
@ -58,6 +63,7 @@ Metachunk::Metachunk(size_t word_size,
set_is_tagged_free(false);
#ifdef ASSERT
mangle(uninitMetaWordVal);
verify();
#endif
}
@ -83,15 +89,16 @@ size_t Metachunk::free_word_size() const {
void Metachunk::print_on(outputStream* st) const {
st->print_cr("Metachunk:"
" bottom " PTR_FORMAT " top " PTR_FORMAT
" end " PTR_FORMAT " size " SIZE_FORMAT,
p2i(bottom()), p2i(_top), p2i(end()), word_size());
" end " PTR_FORMAT " size " SIZE_FORMAT " (%s)",
p2i(bottom()), p2i(_top), p2i(end()), word_size(),
chunk_size_name(get_chunk_type()));
if (Verbose) {
st->print_cr(" used " SIZE_FORMAT " free " SIZE_FORMAT,
used_word_size(), free_word_size());
}
}
#ifndef PRODUCT
#ifdef ASSERT
void Metachunk::mangle(juint word_value) {
// Overwrite the payload of the chunk and not the links that
// maintain list of chunks.
@ -99,16 +106,44 @@ void Metachunk::mangle(juint word_value) {
size_t size = word_size() - overhead();
Copy::fill_to_words(start, size, word_value);
}
#endif // PRODUCT
void Metachunk::verify() {
#ifdef ASSERT
// Cannot walk through the blocks unless the blocks have
// headers with sizes.
assert(bottom() <= _top &&
_top <= (MetaWord*)end(),
"Chunk has been smashed");
#endif
return;
assert(is_valid_sentinel(), "Chunk " PTR_FORMAT ": sentinel invalid", p2i(this));
const ChunkIndex chunk_type = get_chunk_type();
assert(is_valid_chunktype(chunk_type), "Chunk " PTR_FORMAT ": Invalid chunk type.", p2i(this));
if (chunk_type != HumongousIndex) {
assert(word_size() == get_size_for_nonhumongous_chunktype(chunk_type, is_class()),
"Chunk " PTR_FORMAT ": wordsize " SIZE_FORMAT " does not fit chunk type %s.",
p2i(this), word_size(), chunk_size_name(chunk_type));
}
assert(is_valid_chunkorigin(get_origin()), "Chunk " PTR_FORMAT ": Invalid chunk origin.", p2i(this));
assert(bottom() <= _top && _top <= (MetaWord*)end(),
"Chunk " PTR_FORMAT ": Chunk top out of chunk bounds.", p2i(this));
// For non-humongous chunks, starting address shall be aligned
// to its chunk size. Humongous chunks start address is
// aligned to specialized chunk size.
const size_t required_alignment =
(chunk_type != HumongousIndex ? word_size() : get_size_for_nonhumongous_chunktype(SpecializedIndex, is_class())) * sizeof(MetaWord);
assert(is_aligned((address)this, required_alignment),
"Chunk " PTR_FORMAT ": (size " SIZE_FORMAT ") not aligned to " SIZE_FORMAT ".",
p2i(this), word_size() * sizeof(MetaWord), required_alignment);
}
#endif // ASSERT
// Helper, returns a descriptive name for the given index.
const char* chunk_size_name(ChunkIndex index) {
switch (index) {
case SpecializedIndex:
return "specialized";
case SmallIndex:
return "small";
case MediumIndex:
return "medium";
case HumongousIndex:
return "humongous";
default:
return "Invalid index";
}
}

View File

@ -94,16 +94,84 @@ class Metabase VALUE_OBJ_CLASS_SPEC {
// | | | |
// +--------------+ <- bottom --+ --+
// ChunkIndex defines the type of chunk.
// Chunk types differ by size: specialized < small < medium, chunks
// larger than medium are humongous chunks of varying size.
enum ChunkIndex {
ZeroIndex = 0,
SpecializedIndex = ZeroIndex,
SmallIndex = SpecializedIndex + 1,
MediumIndex = SmallIndex + 1,
HumongousIndex = MediumIndex + 1,
NumberOfFreeLists = 3,
NumberOfInUseLists = 4
};
// Utility functions.
size_t get_size_for_nonhumongous_chunktype(ChunkIndex chunk_type, bool is_class);
ChunkIndex get_chunk_type_by_size(size_t size, bool is_class);
// Returns a descriptive name for a chunk type.
const char* chunk_size_name(ChunkIndex index);
// Verify chunk type.
inline bool is_valid_chunktype(ChunkIndex index) {
return index == SpecializedIndex || index == SmallIndex ||
index == MediumIndex || index == HumongousIndex;
}
inline bool is_valid_nonhumongous_chunktype(ChunkIndex index) {
return is_valid_chunktype(index) && index != HumongousIndex;
}
enum ChunkOrigin {
// Chunk normally born (via take_from_committed)
origin_normal = 1,
// Chunk was born as padding chunk
origin_pad = 2,
// Chunk was born as leftover chunk in VirtualSpaceNode::retire
origin_leftover = 3,
// Chunk was born as result of a merge of smaller chunks
origin_merge = 4,
// Chunk was born as result of a split of a larger chunk
origin_split = 5,
origin_minimum = origin_normal,
origin_maximum = origin_split,
origins_count = origin_maximum + 1
};
inline bool is_valid_chunkorigin(ChunkOrigin origin) {
return origin == origin_normal ||
origin == origin_pad ||
origin == origin_leftover ||
origin == origin_merge ||
origin == origin_split;
}
class Metachunk : public Metabase<Metachunk> {
friend class MetachunkTest;
// The VirtualSpaceNode containing this chunk.
VirtualSpaceNode* _container;
VirtualSpaceNode* const _container;
// Current allocation top.
MetaWord* _top;
// A 32bit sentinel for debugging purposes.
enum { CHUNK_SENTINEL = 0x4d4554EF, // "MET"
CHUNK_SENTINEL_INVALID = 0xFEEEEEEF
};
uint32_t _sentinel;
const ChunkIndex _chunk_type;
const bool _is_class;
// Whether the chunk is free (in freelist) or in use by some class loader.
bool _is_tagged_free;
ChunkOrigin _origin;
int _use_count;
MetaWord* initial_top() const { return (MetaWord*)this + overhead(); }
MetaWord* top() const { return _top; }
@ -120,7 +188,7 @@ class Metachunk : public Metabase<Metachunk> {
// Size of the Metachunk header, including alignment.
static size_t overhead();
Metachunk(size_t word_size , VirtualSpaceNode* container);
Metachunk(ChunkIndex chunktype, bool is_class, size_t word_size, VirtualSpaceNode* container);
MetaWord* allocate(size_t word_size);
@ -143,12 +211,23 @@ class Metachunk : public Metabase<Metachunk> {
bool contains(const void* ptr) { return bottom() <= ptr && ptr < _top; }
#ifndef PRODUCT
void mangle(juint word_value);
#endif
void print_on(outputStream* st) const;
void verify();
bool is_valid_sentinel() const { return _sentinel == CHUNK_SENTINEL; }
void remove_sentinel() { _sentinel = CHUNK_SENTINEL_INVALID; }
int get_use_count() const { return _use_count; }
void inc_use_count() { _use_count ++; }
ChunkOrigin get_origin() const { return _origin; }
void set_origin(ChunkOrigin orig) { _origin = orig; }
ChunkIndex get_chunk_type() const { return _chunk_type; }
bool is_class() const { return _is_class; }
DEBUG_ONLY(void mangle(juint word_value);)
DEBUG_ONLY(void verify();)
};
// Metablock is the unit of allocation from a Chunk.

File diff suppressed because it is too large Load Diff

View File

@ -89,6 +89,7 @@ class Metaspace : public CHeapObj<mtClass> {
friend class MetaspaceShared;
friend class CollectedHeap;
friend class PrintCLDMetaspaceInfoClosure;
friend class MetaspaceAllocationTest;
public:
enum MetadataType {
@ -176,6 +177,11 @@ class Metaspace : public CHeapObj<mtClass> {
return mdtype == ClassType ? chunk_manager_class() : chunk_manager_metadata();
}
// convenience function
static ChunkManager* get_chunk_manager(bool is_class) {
return is_class ? chunk_manager_class() : chunk_manager_metadata();
}
static const MetaspaceTracer* tracer() { return _tracer; }
static void freeze() {
assert(DumpSharedSpaces, "sanity");

View File

@ -45,7 +45,6 @@ void InternalVMTests::run() {
run_unit_test(TestReserveMemorySpecial_test);
run_unit_test(TestVirtualSpace_test);
run_unit_test(TestMetaspaceAux_test);
run_unit_test(TestVirtualSpaceNode_test);
run_unit_test(GCTimer_test);
run_unit_test(ObjectMonitor_test);
run_unit_test(DirectivesParser_test);

View File

@ -1,59 +0,0 @@
/*
* Copyright (c) 2016, 2017 Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#include "precompiled.hpp"
// The test function is only available in debug builds
#ifdef ASSERT
#include "unittest.hpp"
void ChunkManager_test_list_index();
TEST(ChunkManager, list_index) {
// The ChunkManager is only available in metaspace.cpp,
// so the test code is located in that file.
ChunkManager_test_list_index();
}
extern void* setup_chunkmanager_returntests();
extern void teardown_chunkmanager_returntests(void*);
extern void run_chunkmanager_returntests(void* p, float phase_length_factor);
class ChunkManagerReturnTest : public ::testing::Test {
protected:
void* _test;
virtual void SetUp() {
_test = setup_chunkmanager_returntests();
}
virtual void TearDown() {
teardown_chunkmanager_returntests(_test);
}
};
TEST_VM_F(ChunkManagerReturnTest, test00) { run_chunkmanager_returntests(_test, 0.0f); }
TEST_VM_F(ChunkManagerReturnTest, test05) { run_chunkmanager_returntests(_test, 0.5f); }
TEST_VM_F(ChunkManagerReturnTest, test10) { run_chunkmanager_returntests(_test, 1.0f); }
#endif // ASSERT

View File

@ -41,11 +41,16 @@ class MetachunkTest {
};
TEST(Metachunk, basic) {
size_t size = 2 * 1024 * 1024;
void* memory = malloc(size);
const ChunkIndex chunk_type = MediumIndex;
const bool is_class = false;
const size_t word_size = get_size_for_nonhumongous_chunktype(chunk_type, is_class);
// Allocate the chunk with correct alignment.
void* memory = malloc(word_size * BytesPerWord * 2);
ASSERT_TRUE(NULL != memory) << "Failed to malloc 2MB";
Metachunk* metachunk = ::new (memory) Metachunk(size / BytesPerWord, NULL);
void* p_placement = align_up(memory, word_size * BytesPerWord);
Metachunk* metachunk = ::new (p_placement) Metachunk(chunk_type, is_class, word_size, NULL);
EXPECT_EQ((MetaWord*) metachunk, metachunk->bottom());
EXPECT_EQ((uintptr_t*) metachunk + metachunk->size(), metachunk->end());

View File

@ -0,0 +1,265 @@
/*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2018, SAP.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#include "precompiled.hpp"
#include "memory/allocation.inline.hpp"
#include "memory/metaspace.hpp"
#include "runtime/mutex.hpp"
#include "runtime/os.hpp"
#include "utilities/align.hpp"
#include "utilities/debug.hpp"
#include "utilities/globalDefinitions.hpp"
#include "utilities/ostream.hpp"
#include "unittest.hpp"
#define NUM_PARALLEL_METASPACES 50
#define MAX_PER_METASPACE_ALLOCATION_WORDSIZE (512 * K)
//#define DEBUG_VERBOSE true
#ifdef DEBUG_VERBOSE
struct chunkmanager_statistics_t {
int num_specialized_chunks;
int num_small_chunks;
int num_medium_chunks;
int num_humongous_chunks;
};
extern void test_metaspace_retrieve_chunkmanager_statistics(Metaspace::MetadataType mdType, chunkmanager_statistics_t* out);
static void print_chunkmanager_statistics(outputStream* st, Metaspace::MetadataType mdType) {
chunkmanager_statistics_t stat;
test_metaspace_retrieve_chunkmanager_statistics(mdType, &stat);
st->print_cr("free chunks: %d / %d / %d / %d", stat.num_specialized_chunks, stat.num_small_chunks,
stat.num_medium_chunks, stat.num_humongous_chunks);
}
#endif
struct chunk_geometry_t {
size_t specialized_chunk_word_size;
size_t small_chunk_word_size;
size_t medium_chunk_word_size;
};
extern void test_metaspace_retrieve_chunk_geometry(Metaspace::MetadataType mdType, chunk_geometry_t* out);
class MetaspaceAllocationTest : public ::testing::Test {
protected:
struct {
size_t allocated;
Mutex* lock;
Metaspace* space;
bool is_empty() const { return allocated == 0; }
bool is_full() const { return allocated >= MAX_PER_METASPACE_ALLOCATION_WORDSIZE; }
} _spaces[NUM_PARALLEL_METASPACES];
chunk_geometry_t _chunk_geometry;
virtual void SetUp() {
::memset(_spaces, 0, sizeof(_spaces));
test_metaspace_retrieve_chunk_geometry(Metaspace::NonClassType, &_chunk_geometry);
}
virtual void TearDown() {
for (int i = 0; i < NUM_PARALLEL_METASPACES; i ++) {
if (_spaces[i].space != NULL) {
delete _spaces[i].space;
delete _spaces[i].lock;
}
}
}
void create_space(int i) {
assert(i >= 0 && i < NUM_PARALLEL_METASPACES, "Sanity");
assert(_spaces[i].space == NULL && _spaces[i].allocated == 0, "Sanity");
if (_spaces[i].lock == NULL) {
_spaces[i].lock = new Mutex(Monitor::native, "gtest-MetaspaceAllocationTest-lock", false, Monitor::_safepoint_check_never);
ASSERT_TRUE(_spaces[i].lock != NULL);
}
// Let every ~10th space be an anonymous one to test different allocation patterns.
const Metaspace::MetaspaceType msType = (os::random() % 100 < 10) ?
Metaspace::AnonymousMetaspaceType : Metaspace::StandardMetaspaceType;
_spaces[i].space = new Metaspace(_spaces[i].lock, msType);
_spaces[i].allocated = 0;
ASSERT_TRUE(_spaces[i].space != NULL);
}
// Returns the index of a random space where index is [0..metaspaces) and which is
// empty, non-empty or full.
// Returns -1 if no matching space exists.
enum fillgrade { fg_empty, fg_non_empty, fg_full };
int get_random_matching_space(int metaspaces, fillgrade fg) {
const int start_index = os::random() % metaspaces;
int i = start_index;
do {
if (fg == fg_empty && _spaces[i].is_empty()) {
return i;
} else if ((fg == fg_full && _spaces[i].is_full()) ||
(fg == fg_non_empty && !_spaces[i].is_full() && !_spaces[i].is_empty())) {
return i;
}
i ++;
if (i == metaspaces) {
i = 0;
}
} while (i != start_index);
return -1;
}
int get_random_emtpy_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_empty); }
int get_random_non_emtpy_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_non_empty); }
int get_random_full_space(int metaspaces) { return get_random_matching_space(metaspaces, fg_full); }
void do_test(Metaspace::MetadataType mdType, int metaspaces, int phases, int allocs_per_phase,
float probability_for_large_allocations // 0.0-1.0
) {
// Alternate between breathing in (allocating n blocks for a random Metaspace) and
// breathing out (deleting a random Metaspace). The intent is to stress the coalescation
// and splitting of free chunks.
int phases_done = 0;
bool allocating = true;
while (phases_done < phases) {
bool force_switch = false;
if (allocating) {
// Allocate space from metaspace, with a preference for completely empty spaces. This
// should provide a good mixture of metaspaces in the virtual space.
int index = get_random_emtpy_space(metaspaces);
if (index == -1) {
index = get_random_non_emtpy_space(metaspaces);
}
if (index == -1) {
// All spaces are full, switch to freeing.
force_switch = true;
} else {
// create space if it does not yet exist.
if (_spaces[index].space == NULL) {
create_space(index);
}
// Allocate a bunch of blocks from it. Mostly small stuff but mix in large allocations
// to force humongous chunk allocations.
int allocs_done = 0;
while (allocs_done < allocs_per_phase && !_spaces[index].is_full()) {
size_t size = 0;
int r = os::random() % 1000;
if ((float)r < probability_for_large_allocations * 1000.0) {
size = (os::random() % _chunk_geometry.medium_chunk_word_size) + _chunk_geometry.medium_chunk_word_size;
} else {
size = os::random() % 64;
}
MetaWord* const p = _spaces[index].space->allocate(size, mdType);
if (p == NULL) {
// We very probably did hit the metaspace "until-gc" limit.
#ifdef DEBUG_VERBOSE
tty->print_cr("OOM for " SIZE_FORMAT " words. ", size);
#endif
// Just switch to deallocation and resume tests.
force_switch = true;
break;
} else {
_spaces[index].allocated += size;
allocs_done ++;
}
}
}
} else {
// freeing: find a metaspace and delete it, with preference for completely filled spaces.
int index = get_random_full_space(metaspaces);
if (index == -1) {
index = get_random_non_emtpy_space(metaspaces);
}
if (index == -1) {
force_switch = true;
} else {
assert(_spaces[index].space != NULL && _spaces[index].allocated > 0, "Sanity");
delete _spaces[index].space;
_spaces[index].space = NULL;
_spaces[index].allocated = 0;
}
}
if (force_switch) {
allocating = !allocating;
} else {
// periodically switch between allocating and freeing, but prefer allocation because
// we want to intermingle allocations of multiple metaspaces.
allocating = os::random() % 5 < 4;
}
phases_done ++;
#ifdef DEBUG_VERBOSE
int metaspaces_in_use = 0;
size_t total_allocated = 0;
for (int i = 0; i < metaspaces; i ++) {
if (_spaces[i].allocated > 0) {
total_allocated += _spaces[i].allocated;
metaspaces_in_use ++;
}
}
tty->print("%u:\tspaces: %d total words: " SIZE_FORMAT "\t\t\t", phases_done, metaspaces_in_use, total_allocated);
print_chunkmanager_statistics(tty, mdType);
#endif
}
#ifdef DEBUG_VERBOSE
tty->print_cr("Test finished. ");
MetaspaceAux::print_metaspace_map(tty, mdType);
print_chunkmanager_statistics(tty, mdType);
#endif
}
};
TEST_F(MetaspaceAllocationTest, chunk_geometry) {
ASSERT_GT(_chunk_geometry.specialized_chunk_word_size, (size_t) 0);
ASSERT_GT(_chunk_geometry.small_chunk_word_size, _chunk_geometry.specialized_chunk_word_size);
ASSERT_EQ(_chunk_geometry.small_chunk_word_size % _chunk_geometry.specialized_chunk_word_size, (size_t)0);
ASSERT_GT(_chunk_geometry.medium_chunk_word_size, _chunk_geometry.small_chunk_word_size);
ASSERT_EQ(_chunk_geometry.medium_chunk_word_size % _chunk_geometry.small_chunk_word_size, (size_t)0);
}
TEST_VM_F(MetaspaceAllocationTest, single_space_nonclass) {
do_test(Metaspace::NonClassType, 1, 1000, 100, 0);
}
TEST_VM_F(MetaspaceAllocationTest, single_space_class) {
do_test(Metaspace::ClassType, 1, 1000, 100, 0);
}
TEST_VM_F(MetaspaceAllocationTest, multi_space_nonclass) {
do_test(Metaspace::NonClassType, NUM_PARALLEL_METASPACES, 100, 1000, 0.0);
}
TEST_VM_F(MetaspaceAllocationTest, multi_space_class) {
do_test(Metaspace::ClassType, NUM_PARALLEL_METASPACES, 100, 1000, 0.0);
}
TEST_VM_F(MetaspaceAllocationTest, multi_space_nonclass_2) {
// many metaspaces, with humongous chunks mixed in.
do_test(Metaspace::NonClassType, NUM_PARALLEL_METASPACES, 100, 1000, .006f);
}