/* * Copyright (c) 1997, 2022, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. *
*/
// The virtual machine must never call one of the implicitly declared // global allocation or deletion functions. (Such calls may result in // link-time or run-time errors.) For convenience and documentation of // intended use, classes in the virtual machine may be derived from one // of the following allocation classes, some of which define allocation // and deletion functions. // Note: std::malloc and std::free should never called directly.
// // For objects allocated in the resource area (see resourceArea.hpp). // - ResourceObj // // For objects allocated in the C-heap (managed by: free & malloc and tracked with NMT) // - CHeapObj // // For objects allocated on the stack. // - StackObj // // For classes used as name spaces. // - AllStatic // // For classes in Metaspace (class data) // - MetaspaceObj // // The printable subclasses are used for debugging and define virtual // member functions for printing. Classes that avoid allocating the // vtbl entries in the objects should therefore not be the printable // subclasses. // // The following macros and function should be used to allocate memory // directly in the resource area or in the C-heap, The _OBJ variants // of the NEW/FREE_C_HEAP macros are used for alloc/dealloc simple // objects which are not inherited from CHeapObj, note constructor and // destructor are not called. The preferable way to allocate objects // is using the new operator. // // WARNING: The array variant must only be used for a homogeneous array // where all objects are of the exact type specified. If subtypes are // stored in the array then must pay attention to calling destructors // at needed. // // NEW_RESOURCE_ARRAY* // REALLOC_RESOURCE_ARRAY* // FREE_RESOURCE_ARRAY* // NEW_RESOURCE_OBJ* // NEW_C_HEAP_ARRAY* // REALLOC_C_HEAP_ARRAY* // FREE_C_HEAP_ARRAY* // NEW_C_HEAP_OBJ* // FREE_C_HEAP_OBJ // // char* AllocateHeap(size_t size, MEMFLAGS flags, const NativeCallStack& stack, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); // char* AllocateHeap(size_t size, MEMFLAGS flags, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); // char* ReallocateHeap(char *old, size_t size, MEMFLAGS flag, AllocFailType alloc_failmode = AllocFailStrategy::EXIT_OOM); // void FreeHeap(void* p); //
#define MEMORY_TYPES_DO(f) \ /* Memory type by sub systems. It occupies lower byte. */ \
f(mtJavaHeap, "Java Heap") /* Java heap */ \
f(mtClass, "Class") /* Java classes */ \
f(mtThread, "Thread") /* thread objects */ \
f(mtThreadStack, "Thread Stack") \
f(mtCode, "Code") /* generated code */ \
f(mtGC, "GC") \
f(mtGCCardSet, "GCCardSet") /* G1 card set remembered set */ \
f(mtCompiler, "Compiler") \
f(mtJVMCI, "JVMCI") \
f(mtInternal, "Internal") /* memory used by VM, but does not belong to */ \ /* any of above categories, and not used by */ \ /* NMT */ \
f(mtOther, "Other") /* memory not used by VM */ \
f(mtSymbol, "Symbol") \
f(mtNMT, "Native Memory Tracking") /* memory used by NMT */ \
f(mtClassShared, "Shared class space") /* class data sharing */ \
f(mtChunk, "Arena Chunk") /* chunk that holds content of arenas */ \
f(mtTest, "Test") /* Test type for verifying NMT */ \
f(mtTracing, "Tracing") \
f(mtLogging, "Logging") \
f(mtStatistics, "Statistics") \
f(mtArguments, "Arguments") \
f(mtModule, "Module") \
f(mtSafepoint, "Safepoint") \
f(mtSynchronizer, "Synchronization") \
f(mtServiceability, "Serviceability") \
f(mtMetaspace, "Metaspace") \
f(mtStringDedup, "String Deduplication") \
f(mtObjectMonitor, "Object Monitors") \
f(mtNone, "Unknown") \ //end
/* * Memory types
*/ enumclass MEMFLAGS : uint8_t {
MEMORY_TYPES_DO(MEMORY_TYPE_DECLARE_ENUM)
mt_number_of_types // number of memory types (mtDontTrack // is not included as validate type)
}; // Extra insurance that MEMFLAGS truly has the same size as uint8_t.
STATIC_ASSERT(sizeof(MEMFLAGS) == sizeof(uint8_t));
#define MEMORY_TYPE_SHORTNAME(type, human_readable) \
constexpr MEMFLAGS type = MEMFLAGS::type;
// Generate short aliases for the enum values. E.g. mtGC instead of MEMFLAGS::mtGC.
MEMORY_TYPES_DO(MEMORY_TYPE_SHORTNAME)
// Make an int version of the sentinel end value.
constexpr int mt_number_of_types = static_cast<int>(MEMFLAGS::mt_number_of_types);
// Base class for objects stored in Metaspace. // Calling delete will result in fatal error. // // Do not inherit from something with a vptr because this class does // not introduce one. This class is used to allocate both shared read-only // and shared read-write classes. //
class ClassLoaderData; class MetaspaceClosure;
class MetaspaceObj { // There are functions that all subtypes of MetaspaceObj are expected // to implement, so that templates which are defined for this class hierarchy // can work uniformly. Within the sub-hierarchy of Metadata, these are virtuals. // Elsewhere in the hierarchy of MetaspaceObj, type(), size(), and/or on_stack() // can be static if constant. // // The following functions are required by MetaspaceClosure: // void metaspace_pointers_do(MetaspaceClosure* it) { <walk my refs> } // int size() const { return align_up(sizeof(<This>), wordSize) / wordSize; } // MetaspaceObj::Type type() const { return <This>Type; } // // The following functions are required by MetadataFactory::free_metadata(): // bool on_stack() { return false; } // void deallocate_contents(ClassLoaderData* loader_data);
friendclass VMStructs; // When CDS is enabled, all shared metaspace objects are mapped // into a single contiguous memory block, so we can use these // two pointers to quickly determine if something is in the // shared metaspace. // When CDS is not enabled, both pointers are set to NULL. staticvoid* _shared_metaspace_base; // (inclusive) low address staticvoid* _shared_metaspace_top; // (exclusive) high address
public:
// Returns true if the pointer points to a valid MetaspaceObj. A valid // MetaspaceObj is MetaWord-aligned and contained within either // non-shared or shared metaspace. staticbool is_valid(const MetaspaceObj* p);
#if INCLUDE_CDS staticbool is_shared(const MetaspaceObj* p) { // If no shared metaspace regions are mapped, _shared_metaspace_{base,top} will // both be NULL and all values of p will be rejected quickly. return (((void*)p) < _shared_metaspace_top &&
((void*)p) >= _shared_metaspace_base);
} bool is_shared() const { return MetaspaceObj::is_shared(this); } #else staticbool is_shared(const MetaspaceObj* p) { returnfalse; } bool is_shared() const { returnfalse; } #endif
static MetaspaceObj::Type array_type(size_t elem_size) { switch (elem_size) { case 1: return TypeArrayU1Type; case 2: return TypeArrayU2Type; case 4: return TypeArrayU4Type; case 8: return TypeArrayU8Type; default: return TypeArrayOtherType;
}
}
void* operatornew(size_t size, ClassLoaderData* loader_data,
size_t word_size,
Type type, JavaThread* thread) throw(); // can't use TRAPS from this header file. void* operatornew(size_t size, ClassLoaderData* loader_data,
size_t word_size,
Type type) throw(); voidoperatordelete(void* p) { ShouldNotCallThis(); }
// Declare a *static* method with the same signature in any subclass of MetaspaceObj // that should be read-only by default. See symbol.hpp for an example. This function // is used by the templates in metaspaceClosure.hpp staticbool is_read_only_by_default() { returnfalse; }
};
// Base class for classes that constitute name spaces.
//---------------------------------------------------------------------- // Base class for objects allocated in the resource area. class ResourceObj { public: void* operatornew(size_t size) throw() { return resource_allocate_bytes(size);
}
//---------------------------------------------------------------------- // Base class for objects allocated in the resource area per default. // Optionally, objects may be allocated on the C heap with // new (AnyObj::C_HEAP) Foo(...) or in an Arena with new (&arena). // AnyObj's can be allocated within other objects, but don't use // new or delete (allocation_type is unknown). If new is used to allocate, // use delete to deallocate. class AnyObj { public: enum allocation_type { STACK_OR_EMBEDDED = 0, RESOURCE_AREA, C_HEAP, ARENA, allocation_mask = 0x3 }; staticvoid set_allocation_type(address res, allocation_type type) NOT_DEBUG_RETURN; #ifdef ASSERT private: // When this object is allocated on stack the new() operator is not // called but garbage on stack may look like a valid allocation_type. // Store negated 'this' pointer when new() is called to distinguish cases. // Use second array's element for verification value to distinguish garbage.
uintptr_t _allocation_t[2]; bool is_type_set() const; void initialize_allocation_info(); public:
allocation_type get_allocation_type() const; bool allocated_on_stack_or_embedded() const { return get_allocation_type() == STACK_OR_EMBEDDED; } bool allocated_on_res_area() const { return get_allocation_type() == RESOURCE_AREA; } bool allocated_on_C_heap() const { return get_allocation_type() == C_HEAP; } bool allocated_on_arena() const { return get_allocation_type() == ARENA; } protected:
AnyObj(); // default constructor
AnyObj(const AnyObj& r); // default copy constructor
AnyObj& operator=(const AnyObj& r); // default copy assignment
~AnyObj(); #endif// ASSERT
// One of the following macros must be used when allocating an array // or object to determine whether it should reside in the C heap on in // the resource area.
// deallocate obj of type in heap without calling dtor #define FREE_C_HEAP_OBJ(objname)\
FreeHeap((char*)objname);
//------------------------------ReallocMark--------------------------------- // Code which uses REALLOC_RESOURCE_ARRAY should check an associated // ReallocMark, which is declared in the same scope as the reallocated // pointer. Any operation that could __potentially__ cause a reallocation // should check the ReallocMark. class ReallocMark: public StackObj { protected:
NOT_PRODUCT(int _nesting;)
// Helper class to allocate arrays that may become large. // Uses the OS malloc for allocations smaller than ArrayAllocatorMallocLimit // and uses mapped memory for larger allocations. // Most OS mallocs do something similar but Solaris malloc does not revert // to mapped memory for large allocations. By default ArrayAllocatorMallocLimit // is set so that we always use malloc except for Solaris where we set the // limit to get mapped memory. template <class E> class ArrayAllocator : public AllStatic { private: staticbool should_use_malloc(size_t length);
// Uses mmapped memory for all allocations. All allocations are initially // zero-filled. No pre-touching. template <class E> class MmapArrayAllocator : public AllStatic { private: static size_t size_for(size_t length);
¤ Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.0.20Bemerkung:
(vorverarbeitet)
¤
Die Informationen auf dieser Webseite wurden
nach bestem Wissen sorgfältig zusammengestellt. Es wird jedoch weder Vollständigkeit, noch Richtigkeit,
noch Qualität der bereit gestellten Informationen zugesichert.
Bemerkung:
Die farbliche Syntaxdarstellung ist noch experimentell.