8301076: Replace NULL with nullptr in share/prims/

Reviewed-by: kbarrett, dholmes
This commit is contained in:
Johan Sjölen 2023-01-31 14:22:44 +00:00
parent 90ec19efed
commit b76a52f210
47 changed files with 2037 additions and 2037 deletions

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -43,7 +43,7 @@ void ForeignGlobals::parse_register_array(objArrayOop jarray, StorageType type_i
}
inline const char* null_safe_string(const char* str) {
return str == nullptr ? "NULL" : str;
return str == nullptr ? "null" : str;
}
#endif // SHARE_PRIMS_FOREIGN_GLOBALS_INLINE_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -158,7 +158,7 @@ static bool is_decipherable_compiled_frame(JavaThread* thread, frame* fr, Compil
PcDesc* pc_desc = nm->pc_desc_at(fr->pc());
// Did we find a useful PcDesc?
if (pc_desc != NULL &&
if (pc_desc != nullptr &&
pc_desc->scope_decode_offset() != DebugInformationRecorder::serialized_null) {
return true;
}
@ -172,7 +172,7 @@ static bool is_decipherable_compiled_frame(JavaThread* thread, frame* fr, Compil
PcDesc* pc_desc = nm->pc_desc_near(fr->pc() + 1);
// Now do we have a useful PcDesc?
if (pc_desc == NULL ||
if (pc_desc == nullptr ||
pc_desc->scope_decode_offset() == DebugInformationRecorder::serialized_null) {
// No debug information is available for this PC.
//
@ -207,7 +207,7 @@ static bool is_decipherable_compiled_frame(JavaThread* thread, frame* fr, Compil
// Determine if 'fr' is a walkable interpreted frame. Returns false
// if it is not. *method_p, and *bci_p are not set when false is
// returned. *method_p is non-NULL if frame was executing a Java
// returned. *method_p is non-null if frame was executing a Java
// method. *bci_p is != -1 if a valid BCI in the Java method could
// be found.
// Note: this method returns true when a valid Java method is found
@ -273,13 +273,13 @@ static bool is_decipherable_interpreted_frame(JavaThread* thread,
// Check the return value of find_initial_Java_frame and the value of
// 'method_p' to decide on how use the results returned by this method.
//
// If 'method_p' is not NULL, an initial Java frame has been found and
// If 'method_p' is not null, an initial Java frame has been found and
// the stack can be walked starting from that initial frame. In this case,
// 'method_p' points to the Method that the initial frame belongs to and
// the initial Java frame is returned in initial_frame_p.
//
// find_initial_Java_frame() returns true if a Method has been found (i.e.,
// 'method_p' is not NULL) and the initial frame that belongs to that Method
// 'method_p' is not null) and the initial frame that belongs to that Method
// is decipherable.
//
// A frame is considered to be decipherable:
@ -292,7 +292,7 @@ static bool is_decipherable_interpreted_frame(JavaThread* thread,
// Note that find_initial_Java_frame() can return false even if an initial
// Java method was found (e.g., there is no PCDesc available for the method).
//
// If 'method_p' is NULL, it was not possible to find a Java frame when
// If 'method_p' is null, it was not possible to find a Java frame when
// walking the stack starting from 'fr'. In this case find_initial_Java_frame
// returns false.
@ -305,10 +305,10 @@ static bool find_initial_Java_frame(JavaThread* thread,
// It is possible that for a frame containing a compiled method
// we can capture the method but no bci. If we get no
// bci the frame isn't walkable but the method is usable.
// Therefore we init the returned Method* to NULL so the
// Therefore we init the returned Method* to null so the
// caller can make the distinction.
*method_p = NULL;
*method_p = nullptr;
// On the initial call to this method the frame we get may not be
// recognizable to us. This should only happen if we are in a JRT_LEAF
@ -333,11 +333,11 @@ static bool find_initial_Java_frame(JavaThread* thread,
}
if (candidate.is_entry_frame()) {
// jcw is NULL if the java call wrapper could not be found
// jcw is null if the java call wrapper could not be found
JavaCallWrapper* jcw = candidate.entry_frame_call_wrapper_if_safe(thread);
// If initial frame is frame from StubGenerator and there is no
// previous anchor, there are no java frames associated with a method
if (jcw == NULL || jcw->is_first_frame()) {
if (jcw == nullptr || jcw->is_first_frame()) {
return false;
}
}
@ -363,7 +363,7 @@ static bool find_initial_Java_frame(JavaThread* thread,
// it see if we can find such a frame because only frames with codeBlobs
// are possible Java frames.
if (fr->cb() == NULL) {
if (fr->cb() == nullptr) {
// See if we can find a useful frame
int loop_count;
@ -376,9 +376,9 @@ static bool find_initial_Java_frame(JavaThread* thread,
for (loop_count = 0; loop_max == 0 || loop_count < loop_max; loop_count++) {
if (!candidate.safe_for_sender(thread)) return false;
candidate = candidate.sender(&map);
if (candidate.cb() != NULL) break;
if (candidate.cb() != nullptr) break;
}
if (candidate.cb() == NULL) return false;
if (candidate.cb() == nullptr) return false;
}
// We have a frame known to be in the codeCache
@ -393,11 +393,11 @@ static bool find_initial_Java_frame(JavaThread* thread,
for (loop_count = 0; loop_max == 0 || loop_count < loop_max; loop_count++) {
if (candidate.is_entry_frame()) {
// jcw is NULL if the java call wrapper couldn't be found
// jcw is null if the java call wrapper couldn't be found
JavaCallWrapper *jcw = candidate.entry_frame_call_wrapper_if_safe(thread);
// If initial frame is frame from StubGenerator and there is no
// previous anchor, there are no java frames associated with a method
if (jcw == NULL || jcw->is_first_frame()) {
if (jcw == nullptr || jcw->is_first_frame()) {
return false;
}
}
@ -443,7 +443,7 @@ static bool find_initial_Java_frame(JavaThread* thread,
// is_decipherable_compiled_frame may modify candidate's pc
*initial_frame_p = candidate;
assert(nm->pc_desc_at(candidate.pc()) != NULL, "debug information must be available if the frame is decipherable");
assert(nm->pc_desc_at(candidate.pc()) != nullptr, "debug information must be available if the frame is decipherable");
return true;
}
@ -457,7 +457,7 @@ static bool find_initial_Java_frame(JavaThread* thread,
// since once we find a frame in the code cache they
// all should be there.
if (candidate.cb() == NULL) return false;
if (candidate.cb() == nullptr) return false;
}
@ -478,13 +478,13 @@ static void forte_fill_call_trace_given_top(JavaThread* thd,
int count;
count = 0;
assert(trace->frames != NULL, "trace->frames must be non-NULL");
assert(trace->frames != nullptr, "trace->frames must be non-null");
// Walk the stack starting from 'top_frame' and search for an initial Java frame.
find_initial_Java_frame(thd, &top_frame, &initial_Java_frame, &method, &bci);
// Check if a Java Method has been found.
if (method == NULL) return;
if (method == nullptr) return;
if (!Method::is_valid_method(method)) {
trace->num_frames = ticks_GC_active; // -2
@ -579,7 +579,7 @@ void AsyncGetCallTrace(ASGCT_CallTrace *trace, jint depth, void* ucontext) {
Thread* raw_thread = Thread::current_or_null_safe();
JavaThread* thread;
if (trace->env_id == NULL || raw_thread == NULL || !raw_thread->is_Java_thread() ||
if (trace->env_id == nullptr || raw_thread == nullptr || !raw_thread->is_Java_thread() ||
(thread = JavaThread::cast(raw_thread))->is_exiting()) {
// bad env_id, thread has exited or thread is exiting
trace->num_frames = ticks_thread_exit; // -8
@ -710,8 +710,8 @@ void Forte::register_stub(const char* name, address start, address end) {
assert(pointer_delta(end, start, sizeof(jbyte)) < INT_MAX,
"Code size exceeds maximum range");
collector_func_load((char*)name, NULL, NULL, start,
pointer_delta(end, start, sizeof(jbyte)), 0, NULL);
collector_func_load((char*)name, nullptr, nullptr, start,
pointer_delta(end, start, sizeof(jbyte)), 0, nullptr);
#endif // !_WINDOWS
}

View File

@ -197,9 +197,9 @@ bool jfieldIDWorkaround::is_valid_jfieldID(Klass* k, jfieldID id) {
} else {
JNIid* result = (JNIid*) id;
#ifdef ASSERT
return result != NULL && result->is_static_field_id();
return result != nullptr && result->is_static_field_id();
#else
return result != NULL;
return result != nullptr;
#endif
}
}
@ -241,7 +241,7 @@ bool jfieldIDWorkaround::klass_hash_ok(Klass* k, jfieldID id) {
if ((k->identity_hash() & klass_mask) == klass_hash)
return true;
k = k->super();
} while (k != NULL);
} while (k != nullptr);
return false;
}
@ -278,16 +278,16 @@ JNI_ENTRY(jclass, jni_DefineClass(JNIEnv *env, const char *name, jobject loaderR
HOTSPOT_JNI_DEFINECLASS_ENTRY(
env, (char*) name, loaderRef, (char*) buf, bufLen);
jclass cls = NULL;
jclass cls = nullptr;
DT_RETURN_MARK(DefineClass, jclass, (const jclass&)cls);
// Class resolution will get the class name from the .class stream if the name is null.
TempNewSymbol class_name = name == NULL ? NULL :
TempNewSymbol class_name = name == nullptr ? nullptr :
SystemDictionary::class_name_symbol(name, vmSymbols::java_lang_NoClassDefFoundError(),
CHECK_NULL);
ResourceMark rm(THREAD);
ClassFileStream st((u1*)buf, bufLen, NULL, ClassFileStream::verify);
ClassFileStream st((u1*)buf, bufLen, nullptr, ClassFileStream::verify);
Handle class_loader (THREAD, JNIHandles::resolve(loaderRef));
Handle protection_domain;
ClassLoadInfo cl_info(protection_domain);
@ -312,7 +312,7 @@ DT_RETURN_MARK_DECL(FindClass, jclass
JNI_ENTRY(jclass, jni_FindClass(JNIEnv *env, const char *name))
HOTSPOT_JNI_FINDCLASS_ENTRY(env, (char *)name);
jclass result = NULL;
jclass result = nullptr;
DT_RETURN_MARK(FindClass, jclass, (const jclass&)result);
// This should be ClassNotFoundException imo.
@ -326,10 +326,10 @@ JNI_ENTRY(jclass, jni_FindClass(JNIEnv *env, const char *name))
Klass* k = thread->security_get_caller_class(0);
// default to the system loader when no context
Handle loader(THREAD, SystemDictionary::java_system_loader());
if (k != NULL) {
if (k != nullptr) {
// Special handling to make sure JNI_OnLoad and JNI_OnUnload are executed
// in the correct class context.
if (k->class_loader() == NULL &&
if (k->class_loader() == nullptr &&
k->name() == vmSymbols::jdk_internal_loader_NativeLibraries()) {
JavaValue result(T_OBJECT);
JavaCalls::call_static(&result, k,
@ -337,10 +337,10 @@ JNI_ENTRY(jclass, jni_FindClass(JNIEnv *env, const char *name))
vmSymbols::void_class_signature(),
CHECK_NULL);
// When invoked from JNI_OnLoad, NativeLibraries::getFromClass returns
// a non-NULL Class object. When invoked from JNI_OnUnload,
// it will return NULL to indicate no context.
// a non-null Class object. When invoked from JNI_OnUnload,
// it will return null to indicate no context.
oop mirror = result.get_oop();
if (mirror != NULL) {
if (mirror != nullptr) {
Klass* fromClass = java_lang_Class::as_Klass(mirror);
loader = Handle(THREAD, fromClass->class_loader());
protection_domain = Handle(THREAD, fromClass->protection_domain());
@ -353,7 +353,7 @@ JNI_ENTRY(jclass, jni_FindClass(JNIEnv *env, const char *name))
result = find_class_from_class_loader(env, class_name, true, loader,
protection_domain, true, thread);
if (log_is_enabled(Debug, class, resolve) && result != NULL) {
if (log_is_enabled(Debug, class, resolve) && result != nullptr) {
trace_class_resolution(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(result)));
}
@ -366,12 +366,12 @@ DT_RETURN_MARK_DECL(FromReflectedMethod, jmethodID
JNI_ENTRY(jmethodID, jni_FromReflectedMethod(JNIEnv *env, jobject method))
HOTSPOT_JNI_FROMREFLECTEDMETHOD_ENTRY(env, method);
jmethodID ret = NULL;
jmethodID ret = nullptr;
DT_RETURN_MARK(FromReflectedMethod, jmethodID, (const jmethodID&)ret);
// method is a handle to a java.lang.reflect.Method object
oop reflected = JNIHandles::resolve_non_null(method);
oop mirror = NULL;
oop mirror = nullptr;
int slot = 0;
if (reflected->klass() == vmClasses::reflect_Constructor_klass()) {
@ -387,7 +387,7 @@ JNI_ENTRY(jmethodID, jni_FromReflectedMethod(JNIEnv *env, jobject method))
// Make sure class is initialized before handing id's out to methods
k1->initialize(CHECK_NULL);
Method* m = InstanceKlass::cast(k1)->method_with_idnum(slot);
ret = m==NULL? NULL : m->jmethod_id(); // return NULL if reflected method deleted
ret = m==nullptr? nullptr : m->jmethod_id(); // return null if reflected method deleted
return ret;
JNI_END
@ -397,7 +397,7 @@ DT_RETURN_MARK_DECL(FromReflectedField, jfieldID
JNI_ENTRY(jfieldID, jni_FromReflectedField(JNIEnv *env, jobject field))
HOTSPOT_JNI_FROMREFLECTEDFIELD_ENTRY(env, field);
jfieldID ret = NULL;
jfieldID ret = nullptr;
DT_RETURN_MARK(FromReflectedField, jfieldID, (const jfieldID&)ret);
// field is a handle to a java.lang.reflect.Field object
@ -414,7 +414,7 @@ JNI_ENTRY(jfieldID, jni_FromReflectedField(JNIEnv *env, jobject field))
if (modifiers & JVM_ACC_STATIC) {
intptr_t offset = InstanceKlass::cast(k1)->field_offset( slot );
JNIid* id = InstanceKlass::cast(k1)->jni_id_for(offset);
assert(id != NULL, "corrupt Field object");
assert(id != nullptr, "corrupt Field object");
debug_only(id->set_is_static_field_id();)
// A jfieldID for a static field is a JNIid specifying the field holder and the offset within the Klass*
ret = jfieldIDWorkaround::to_static_jfieldID(id);
@ -437,7 +437,7 @@ DT_RETURN_MARK_DECL(ToReflectedMethod, jobject
JNI_ENTRY(jobject, jni_ToReflectedMethod(JNIEnv *env, jclass cls, jmethodID method_id, jboolean isStatic))
HOTSPOT_JNI_TOREFLECTEDMETHOD_ENTRY(env, cls, (uintptr_t) method_id, isStatic);
jobject ret = NULL;
jobject ret = nullptr;
DT_RETURN_MARK(ToReflectedMethod, jobject, (const jobject&)ret);
methodHandle m (THREAD, Method::resolve_jmethod_id(method_id));
@ -458,19 +458,19 @@ DT_RETURN_MARK_DECL(GetSuperclass, jclass
JNI_ENTRY(jclass, jni_GetSuperclass(JNIEnv *env, jclass sub))
HOTSPOT_JNI_GETSUPERCLASS_ENTRY(env, sub);
jclass obj = NULL;
jclass obj = nullptr;
DT_RETURN_MARK(GetSuperclass, jclass, (const jclass&)obj);
oop mirror = JNIHandles::resolve_non_null(sub);
// primitive classes return NULL
if (java_lang_Class::is_primitive(mirror)) return NULL;
// primitive classes return null
if (java_lang_Class::is_primitive(mirror)) return nullptr;
// Rules of Class.getSuperClass as implemented by KLass::java_super:
// arrays return Object
// interfaces return NULL
// interfaces return null
// proper classes return Klass::super()
Klass* k = java_lang_Class::as_Klass(mirror);
if (k->is_interface()) return NULL;
if (k->is_interface()) return nullptr;
// return mirror for superclass
Klass* super = k->java_super();
@ -480,7 +480,7 @@ JNI_ENTRY(jclass, jni_GetSuperclass(JNIEnv *env, jclass sub))
: k->super() ) );
assert(super == super2,
"java_super computation depends on interface, array, other super");
obj = (super == NULL) ? NULL : (jclass) JNIHandles::make_local(THREAD, super->java_mirror());
obj = (super == nullptr) ? nullptr : (jclass) JNIHandles::make_local(THREAD, super->java_mirror());
return obj;
JNI_END
@ -498,7 +498,7 @@ JNI_ENTRY_NO_PRESERVE(jboolean, jni_IsAssignableFrom(JNIEnv *env, jclass sub, jc
}
Klass* sub_klass = java_lang_Class::as_Klass(sub_mirror);
Klass* super_klass = java_lang_Class::as_Klass(super_mirror);
assert(sub_klass != NULL && super_klass != NULL, "invalid arguments to jni_IsAssignableFrom");
assert(sub_klass != nullptr && super_klass != nullptr, "invalid arguments to jni_IsAssignableFrom");
jboolean ret = sub_klass->is_subtype_of(super_klass) ?
JNI_TRUE : JNI_FALSE;
@ -576,7 +576,7 @@ JNI_ENTRY_NO_PRESERVE(void, jni_ExceptionDescribe(JNIEnv *env))
Handle ex(thread, thread->pending_exception());
thread->clear_pending_exception();
jio_fprintf(defaultStream::error_stream(), "Exception ");
if (thread != NULL && thread->threadObj() != NULL) {
if (thread != nullptr && thread->threadObj() != nullptr) {
ResourceMark rm(THREAD);
jio_fprintf(defaultStream::error_stream(),
"in thread \"%s\" ", thread->name());
@ -613,7 +613,7 @@ JNI_ENTRY_NO_PRESERVE(void, jni_ExceptionClear(JNIEnv *env))
// The jni code might be using this API to clear java thrown exception.
// So just mark jvmti thread exception state as exception caught.
JvmtiThreadState *state = JavaThread::current()->jvmti_thread_state();
if (state != NULL && state->is_exception_detected()) {
if (state != nullptr && state->is_exception_detected()) {
state->set_exception_caught();
}
thread->clear_pending_exception();
@ -655,13 +655,13 @@ JNI_ENTRY(jobject, jni_PopLocalFrame(JNIEnv *env, jobject result))
Handle result_handle(thread, JNIHandles::resolve(result));
JNIHandleBlock* old_handles = thread->active_handles();
JNIHandleBlock* new_handles = old_handles->pop_frame_link();
if (new_handles != NULL) {
// As a sanity check we only release the handle blocks if the pop_frame_link is not NULL.
if (new_handles != nullptr) {
// As a sanity check we only release the handle blocks if the pop_frame_link is not null.
// This way code will still work if PopLocalFrame is called without a corresponding
// PushLocalFrame call. Note that we set the pop_frame_link to NULL explicitly, otherwise
// PushLocalFrame call. Note that we set the pop_frame_link to null explicitly, otherwise
// the release_block call will release the blocks.
thread->set_active_handles(new_handles);
old_handles->set_pop_frame_link(NULL); // clear link we won't release new_handles below
old_handles->set_pop_frame_link(nullptr); // clear link we won't release new_handles below
JNIHandleBlock::release_block(old_handles, thread); // may block
result = JNIHandles::make_local(thread, result_handle());
}
@ -737,7 +737,7 @@ JNI_LEAF(jobjectRefType, jni_GetObjectRefType(JNIEnv *env, jobject obj))
HOTSPOT_JNI_GETOBJECTREFTYPE_ENTRY(env, obj);
jobjectRefType ret = JNIInvalidRefType;
if (obj != NULL) {
if (obj != nullptr) {
ret = JNIHandles::handle_type(thread, obj);
}
@ -768,7 +768,7 @@ class JNI_ArgumentPusher : public SignatureIterator {
: SignatureIterator(method->signature(),
Fingerprinter(methodHandle(Thread::current(), method)).fingerprint())
{
_arguments = NULL;
_arguments = nullptr;
}
public:
@ -895,7 +895,7 @@ static void jni_invoke_static(JNIEnv *env, JavaValue* result, jobject receiver,
static void jni_invoke_nonstatic(JNIEnv *env, JavaValue* result, jobject receiver, JNICallType call_type, jmethodID method_id, JNI_ArgumentPusher *args, TRAPS) {
oop recv = JNIHandles::resolve(receiver);
if (recv == NULL) {
if (recv == nullptr) {
THROW(vmSymbols::java_lang_NullPointerException());
}
Handle h_recv(THREAD, recv);
@ -960,7 +960,7 @@ DT_RETURN_MARK_DECL(AllocObject, jobject
JNI_ENTRY(jobject, jni_AllocObject(JNIEnv *env, jclass clazz))
HOTSPOT_JNI_ALLOCOBJECT_ENTRY(env, clazz);
jobject ret = NULL;
jobject ret = nullptr;
DT_RETURN_MARK(AllocObject, jobject, (const jobject&)ret);
instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(clazz), CHECK_NULL);
@ -974,7 +974,7 @@ DT_RETURN_MARK_DECL(NewObjectA, jobject
JNI_ENTRY(jobject, jni_NewObjectA(JNIEnv *env, jclass clazz, jmethodID methodID, const jvalue *args))
HOTSPOT_JNI_NEWOBJECTA_ENTRY(env, clazz, (uintptr_t) methodID);
jobject obj = NULL;
jobject obj = nullptr;
DT_RETURN_MARK(NewObjectA, jobject, (const jobject&)obj);
instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(clazz), CHECK_NULL);
@ -992,7 +992,7 @@ DT_RETURN_MARK_DECL(NewObjectV, jobject
JNI_ENTRY(jobject, jni_NewObjectV(JNIEnv *env, jclass clazz, jmethodID methodID, va_list args))
HOTSPOT_JNI_NEWOBJECTV_ENTRY(env, clazz, (uintptr_t) methodID);
jobject obj = NULL;
jobject obj = nullptr;
DT_RETURN_MARK(NewObjectV, jobject, (const jobject&)obj);
instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(clazz), CHECK_NULL);
@ -1010,7 +1010,7 @@ DT_RETURN_MARK_DECL(NewObject, jobject
JNI_ENTRY(jobject, jni_NewObject(JNIEnv *env, jclass clazz, jmethodID methodID, ...))
HOTSPOT_JNI_NEWOBJECT_ENTRY(env, clazz, (uintptr_t) methodID);
jobject obj = NULL;
jobject obj = nullptr;
DT_RETURN_MARK(NewObject, jobject, (const jobject&)obj);
instanceOop i = InstanceKlass::allocate_instance(JNIHandles::resolve_non_null(clazz), CHECK_NULL);
@ -1040,11 +1040,11 @@ JNI_ENTRY_NO_PRESERVE(jboolean, jni_IsInstanceOf(JNIEnv *env, jobject obj, jclas
HOTSPOT_JNI_ISINSTANCEOF_ENTRY(env, obj, clazz);
jboolean ret = JNI_TRUE;
if (obj != NULL) {
if (obj != nullptr) {
ret = JNI_FALSE;
Klass* k = java_lang_Class::as_Klass(
JNIHandles::resolve_non_null(clazz));
if (k != NULL) {
if (k != nullptr) {
ret = JNIHandles::resolve_non_null(obj)->is_a(k) ? JNI_TRUE : JNI_FALSE;
}
}
@ -1061,13 +1061,13 @@ static jmethodID get_method_id(JNIEnv *env, jclass clazz, const char *name_str,
// The class should have been loaded (we have an instance of the class
// passed in) so the method and signature should already be in the symbol
// table. If they're not there, the method doesn't exist.
const char *name_to_probe = (name_str == NULL)
const char *name_to_probe = (name_str == nullptr)
? vmSymbols::object_initializer_name()->as_C_string()
: name_str;
TempNewSymbol name = SymbolTable::probe(name_to_probe, (int)strlen(name_to_probe));
TempNewSymbol signature = SymbolTable::probe(sig, (int)strlen(sig));
if (name == NULL || signature == NULL) {
if (name == nullptr || signature == nullptr) {
THROW_MSG_0(vmSymbols::java_lang_NoSuchMethodError(), name_str);
}
@ -1092,15 +1092,15 @@ static jmethodID get_method_id(JNIEnv *env, jclass clazz, const char *name_str,
if (klass->is_instance_klass()) {
m = InstanceKlass::cast(klass)->find_method(name, signature);
} else {
m = NULL;
m = nullptr;
}
} else {
m = klass->lookup_method(name, signature);
if (m == NULL && klass->is_instance_klass()) {
if (m == nullptr && klass->is_instance_klass()) {
m = InstanceKlass::cast(klass)->lookup_method_in_ordered_interfaces(name, signature);
}
}
if (m == NULL || (m->is_static() != is_static)) {
if (m == nullptr || (m->is_static() != is_static)) {
ResourceMark rm(THREAD);
THROW_MSG_0(vmSymbols::java_lang_NoSuchMethodError(), err_msg("%s%s.%s%s", is_static ? "static " : "", klass->signature_name(), name_str, sig));
}
@ -1548,7 +1548,7 @@ JNI_ENTRY(ResultType, \
va_start(args, methodID); \
JavaValue jvalue(Tag); \
JNI_ArgumentPusherVaArg ap(methodID, args); \
jni_invoke_static(env, &jvalue, NULL, JNI_STATIC, methodID, &ap, CHECK_0); \
jni_invoke_static(env, &jvalue, nullptr, JNI_STATIC, methodID, &ap, CHECK_0); \
va_end(args); \
ret = jvalue.get_##ResultType(); \
return ret;\
@ -1604,7 +1604,7 @@ JNI_ENTRY(ResultType, \
/* Make sure class is initialized before trying to invoke its method */ \
Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(cls)); \
k->initialize(CHECK_0); \
jni_invoke_static(env, &jvalue, NULL, JNI_STATIC, methodID, &ap, CHECK_0); \
jni_invoke_static(env, &jvalue, nullptr, JNI_STATIC, methodID, &ap, CHECK_0); \
va_end(args); \
ret = jvalue.get_##ResultType(); \
return ret;\
@ -1657,7 +1657,7 @@ JNI_ENTRY(ResultType, \
\
JavaValue jvalue(Tag); \
JNI_ArgumentPusherArray ap(methodID, args); \
jni_invoke_static(env, &jvalue, NULL, JNI_STATIC, methodID, &ap, CHECK_0); \
jni_invoke_static(env, &jvalue, nullptr, JNI_STATIC, methodID, &ap, CHECK_0); \
ret = jvalue.get_##ResultType(); \
return ret;\
JNI_END
@ -1708,7 +1708,7 @@ JNI_ENTRY(void, jni_CallStaticVoidMethod(JNIEnv *env, jclass cls, jmethodID meth
va_start(args, methodID);
JavaValue jvalue(T_VOID);
JNI_ArgumentPusherVaArg ap(methodID, args);
jni_invoke_static(env, &jvalue, NULL, JNI_STATIC, methodID, &ap, CHECK);
jni_invoke_static(env, &jvalue, nullptr, JNI_STATIC, methodID, &ap, CHECK);
va_end(args);
JNI_END
@ -1719,7 +1719,7 @@ JNI_ENTRY(void, jni_CallStaticVoidMethodV(JNIEnv *env, jclass cls, jmethodID met
JavaValue jvalue(T_VOID);
JNI_ArgumentPusherVaArg ap(methodID, args);
jni_invoke_static(env, &jvalue, NULL, JNI_STATIC, methodID, &ap, CHECK);
jni_invoke_static(env, &jvalue, nullptr, JNI_STATIC, methodID, &ap, CHECK);
JNI_END
@ -1729,7 +1729,7 @@ JNI_ENTRY(void, jni_CallStaticVoidMethodA(JNIEnv *env, jclass cls, jmethodID met
JavaValue jvalue(T_VOID);
JNI_ArgumentPusherArray ap(methodID, args);
jni_invoke_static(env, &jvalue, NULL, JNI_STATIC, methodID, &ap, CHECK);
jni_invoke_static(env, &jvalue, nullptr, JNI_STATIC, methodID, &ap, CHECK);
JNI_END
@ -1754,7 +1754,7 @@ JNI_ENTRY(jfieldID, jni_GetFieldID(JNIEnv *env, jclass clazz,
// table. If they're not there, the field doesn't exist.
TempNewSymbol fieldname = SymbolTable::probe(name, (int)strlen(name));
TempNewSymbol signame = SymbolTable::probe(sig, (int)strlen(sig));
if (fieldname == NULL || signame == NULL) {
if (fieldname == nullptr || signame == nullptr) {
ResourceMark rm;
THROW_MSG_0(vmSymbols::java_lang_NoSuchFieldError(), err_msg("%s.%s %s", k->external_name(), name, sig));
}
@ -1936,7 +1936,7 @@ DT_RETURN_MARK_DECL(ToReflectedField, jobject
JNI_ENTRY(jobject, jni_ToReflectedField(JNIEnv *env, jclass cls, jfieldID fieldID, jboolean isStatic))
HOTSPOT_JNI_TOREFLECTEDFIELD_ENTRY(env, cls, (uintptr_t) fieldID, isStatic);
jobject ret = NULL;
jobject ret = nullptr;
DT_RETURN_MARK(ToReflectedField, jobject, (const jobject&)ret);
fieldDescriptor fd;
@ -1971,7 +1971,7 @@ DT_RETURN_MARK_DECL(GetStaticFieldID, jfieldID
JNI_ENTRY(jfieldID, jni_GetStaticFieldID(JNIEnv *env, jclass clazz,
const char *name, const char *sig))
HOTSPOT_JNI_GETSTATICFIELDID_ENTRY(env, clazz, (char *) name, (char *) sig);
jfieldID ret = NULL;
jfieldID ret = nullptr;
DT_RETURN_MARK(GetStaticFieldID, jfieldID, (const jfieldID&)ret);
// The class should have been loaded (we have an instance of the class
@ -1979,7 +1979,7 @@ JNI_ENTRY(jfieldID, jni_GetStaticFieldID(JNIEnv *env, jclass clazz,
// table. If they're not there, the field doesn't exist.
TempNewSymbol fieldname = SymbolTable::probe(name, (int)strlen(name));
TempNewSymbol signame = SymbolTable::probe(sig, (int)strlen(sig));
if (fieldname == NULL || signame == NULL) {
if (fieldname == nullptr || signame == nullptr) {
THROW_MSG_0(vmSymbols::java_lang_NoSuchFieldError(), (char*) name);
}
Klass* k = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(clazz));
@ -2013,7 +2013,7 @@ JNI_ENTRY(jobject, jni_GetStaticObjectField(JNIEnv *env, jclass clazz, jfieldID
// Keep JVMTI addition small and only check enabled flag here.
// jni_GetField_probe() assumes that is okay to create handles.
if (JvmtiExport::should_post_field_access()) {
JvmtiExport::jni_GetField_probe(thread, NULL, NULL, id->holder(), fieldID, true);
JvmtiExport::jni_GetField_probe(thread, nullptr, nullptr, id->holder(), fieldID, true);
}
jobject ret = JNIHandles::make_local(THREAD, id->holder()->java_mirror()->obj_field(id->offset()));
HOTSPOT_JNI_GETSTATICOBJECTFIELD_RETURN(ret);
@ -2037,7 +2037,7 @@ JNI_ENTRY(Return, jni_GetStatic##Result##Field(JNIEnv *env, jclass clazz, jfield
/* Keep JVMTI addition small and only check enabled flag here. */ \
/* jni_GetField_probe() assumes that is okay to create handles. */ \
if (JvmtiExport::should_post_field_access()) { \
JvmtiExport::jni_GetField_probe(thread, NULL, NULL, id->holder(), fieldID, true); \
JvmtiExport::jni_GetField_probe(thread, nullptr, nullptr, id->holder(), fieldID, true); \
} \
ret = id->holder()->java_mirror()-> Fieldname##_field (id->offset()); \
return ret;\
@ -2070,7 +2070,7 @@ JNI_ENTRY(void, jni_SetStaticObjectField(JNIEnv *env, jclass clazz, jfieldID fie
if (JvmtiExport::should_post_field_modification()) {
jvalue field_value;
field_value.l = value;
JvmtiExport::jni_SetField_probe(thread, NULL, NULL, id->holder(), fieldID, true, JVM_SIGNATURE_CLASS, (jvalue *)&field_value);
JvmtiExport::jni_SetField_probe(thread, nullptr, nullptr, id->holder(), fieldID, true, JVM_SIGNATURE_CLASS, (jvalue *)&field_value);
}
id->holder()->java_mirror()->obj_field_put(id->offset(), JNIHandles::resolve(value));
HOTSPOT_JNI_SETSTATICOBJECTFIELD_RETURN();
@ -2091,7 +2091,7 @@ JNI_ENTRY(void, jni_SetStatic##Result##Field(JNIEnv *env, jclass clazz, jfieldID
if (JvmtiExport::should_post_field_modification()) { \
jvalue field_value; \
field_value.unionType = value; \
JvmtiExport::jni_SetField_probe(thread, NULL, NULL, id->holder(), fieldID, true, SigType, (jvalue *)&field_value); \
JvmtiExport::jni_SetField_probe(thread, nullptr, nullptr, id->holder(), fieldID, true, SigType, (jvalue *)&field_value); \
} \
if (SigType == JVM_SIGNATURE_BOOLEAN) { value = ((jboolean)value) & 1; } \
id->holder()->java_mirror()-> Fieldname##_field_put (id->offset(), value); \
@ -2135,7 +2135,7 @@ DT_RETURN_MARK_DECL(NewString, jstring
JNI_ENTRY(jstring, jni_NewString(JNIEnv *env, const jchar *unicodeChars, jsize len))
HOTSPOT_JNI_NEWSTRING_ENTRY(env, (uint16_t *) unicodeChars, len);
jstring ret = NULL;
jstring ret = nullptr;
DT_RETURN_MARK(NewString, jstring, (const jstring&)ret);
oop string=java_lang_String::create_oop_from_unicode((jchar*) unicodeChars, len, CHECK_NULL);
ret = (jstring) JNIHandles::make_local(THREAD, string);
@ -2156,15 +2156,15 @@ JNI_END
JNI_ENTRY_NO_PRESERVE(const jchar*, jni_GetStringChars(
JNIEnv *env, jstring string, jboolean *isCopy))
HOTSPOT_JNI_GETSTRINGCHARS_ENTRY(env, string, (uintptr_t *) isCopy);
jchar* buf = NULL;
jchar* buf = nullptr;
oop s = JNIHandles::resolve_non_null(string);
typeArrayOop s_value = java_lang_String::value(s);
if (s_value != NULL) {
if (s_value != nullptr) {
int s_len = java_lang_String::length(s, s_value);
bool is_latin1 = java_lang_String::is_latin1(s);
buf = NEW_C_HEAP_ARRAY_RETURN_NULL(jchar, s_len + 1, mtInternal); // add one for zero termination
/* JNI Specification states return NULL on OOM */
if (buf != NULL) {
/* JNI Specification states return null on OOM */
if (buf != nullptr) {
if (s_len > 0) {
if (!is_latin1) {
ArrayAccess<>::arraycopy_to_native(s_value, (size_t) typeArrayOopDesc::element_offset<jchar>(0),
@ -2177,7 +2177,7 @@ JNI_ENTRY_NO_PRESERVE(const jchar*, jni_GetStringChars(
}
buf[s_len] = 0;
//%note jni_5
if (isCopy != NULL) {
if (isCopy != nullptr) {
*isCopy = JNI_TRUE;
}
}
@ -2190,7 +2190,7 @@ JNI_END
JNI_ENTRY_NO_PRESERVE(void, jni_ReleaseStringChars(JNIEnv *env, jstring str, const jchar *chars))
HOTSPOT_JNI_RELEASESTRINGCHARS_ENTRY(env, str, (uint16_t *) chars);
//%note jni_6
if (chars != NULL) {
if (chars != nullptr) {
// Since String objects are supposed to be immutable, don't copy any
// new data back. A bad user will have to go after the char array.
FreeHeap((void*) chars);
@ -2226,16 +2226,16 @@ JNI_END
JNI_ENTRY(const char*, jni_GetStringUTFChars(JNIEnv *env, jstring string, jboolean *isCopy))
HOTSPOT_JNI_GETSTRINGUTFCHARS_ENTRY(env, string, (uintptr_t *) isCopy);
char* result = NULL;
char* result = nullptr;
oop java_string = JNIHandles::resolve_non_null(string);
typeArrayOop s_value = java_lang_String::value(java_string);
if (s_value != NULL) {
if (s_value != nullptr) {
size_t length = java_lang_String::utf8_length(java_string, s_value);
/* JNI Specification states return NULL on OOM */
/* JNI Specification states return null on OOM */
result = AllocateHeap(length + 1, mtInternal, AllocFailStrategy::RETURN_NULL);
if (result != NULL) {
if (result != nullptr) {
java_lang_String::as_utf8_string(java_string, s_value, result, (int) length + 1);
if (isCopy != NULL) {
if (isCopy != nullptr) {
*isCopy = JNI_TRUE;
}
}
@ -2247,7 +2247,7 @@ JNI_END
JNI_LEAF(void, jni_ReleaseStringUTFChars(JNIEnv *env, jstring str, const char *chars))
HOTSPOT_JNI_RELEASESTRINGUTFCHARS_ENTRY(env, str, (char *) chars);
if (chars != NULL) {
if (chars != nullptr) {
FreeHeap((char*) chars);
}
HOTSPOT_JNI_RELEASESTRINGUTFCHARS_RETURN();
@ -2273,14 +2273,14 @@ DT_RETURN_MARK_DECL(NewObjectArray, jobjectArray
JNI_ENTRY(jobjectArray, jni_NewObjectArray(JNIEnv *env, jsize length, jclass elementClass, jobject initialElement))
HOTSPOT_JNI_NEWOBJECTARRAY_ENTRY(env, length, elementClass, initialElement);
jobjectArray ret = NULL;
jobjectArray ret = nullptr;
DT_RETURN_MARK(NewObjectArray, jobjectArray, (const jobjectArray&)ret);
Klass* ek = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(elementClass));
Klass* ak = ek->array_klass(CHECK_NULL);
ObjArrayKlass::cast(ak)->initialize(CHECK_NULL);
objArrayOop result = ObjArrayKlass::cast(ak)->allocate(length, CHECK_NULL);
oop initial_value = JNIHandles::resolve(initialElement);
if (initial_value != NULL) { // array already initialized with NULL
if (initial_value != nullptr) { // array already initialized with null
for (int index = 0; index < length; index++) {
result->obj_at_put(index, initial_value);
}
@ -2294,7 +2294,7 @@ DT_RETURN_MARK_DECL(GetObjectArrayElement, jobject
JNI_ENTRY(jobject, jni_GetObjectArrayElement(JNIEnv *env, jobjectArray array, jsize index))
HOTSPOT_JNI_GETOBJECTARRAYELEMENT_ENTRY(env, array, index);
jobject ret = NULL;
jobject ret = nullptr;
DT_RETURN_MARK(GetObjectArrayElement, jobject, (const jobject&)ret);
objArrayOop a = objArrayOop(JNIHandles::resolve_non_null(array));
if (a->is_within_bounds(index)) {
@ -2318,7 +2318,7 @@ JNI_ENTRY(void, jni_SetObjectArrayElement(JNIEnv *env, jobjectArray array, jsize
objArrayOop a = objArrayOop(JNIHandles::resolve_non_null(array));
oop v = JNIHandles::resolve(value);
if (a->is_within_bounds(index)) {
if (v == NULL || v->is_a(ObjArrayKlass::cast(a->klass())->element_klass())) {
if (v == nullptr || v->is_a(ObjArrayKlass::cast(a->klass())->element_klass())) {
a->obj_at_put(index, v);
} else {
ResourceMark rm(THREAD);
@ -2352,7 +2352,7 @@ JNI_END
JNI_ENTRY(Return, \
jni_New##Result##Array(JNIEnv *env, jsize len)) \
EntryProbe; \
Return ret = NULL;\
Return ret = nullptr;\
DT_RETURN_MARK(New##Result##Array, Return, (const Return&)ret);\
\
oop obj= oopFactory::Allocator(len, CHECK_NULL); \
@ -2388,11 +2388,11 @@ DEFINE_NEWSCALARARRAY(jdoubleArray, new_doubleArray, Double,
// Return an address which will fault if the caller writes to it.
static char* get_bad_address() {
static char* bad_address = NULL;
if (bad_address == NULL) {
static char* bad_address = nullptr;
if (bad_address == nullptr) {
size_t size = os::vm_allocation_granularity();
bad_address = os::reserve_memory(size);
if (bad_address != NULL) {
if (bad_address != nullptr) {
os::protect_memory(bad_address, size, os::MEM_PROT_READ,
/*is_committed*/false);
MemTracker::record_virtual_memory_type((void*)bad_address, mtInternal);
@ -2414,17 +2414,17 @@ JNI_ENTRY_NO_PRESERVE(ElementType*, \
ElementType* result; \
int len = a->length(); \
if (len == 0) { \
if (isCopy != NULL) { \
if (isCopy != nullptr) { \
*isCopy = JNI_FALSE; \
} \
/* Empty array: legal but useless, can't return NULL. \
/* Empty array: legal but useless, can't return null. \
* Return a pointer to something useless. \
* Avoid asserts in typeArrayOop. */ \
result = (ElementType*)get_bad_address(); \
} else { \
/* JNI Specification states return NULL on OOM */ \
/* JNI Specification states return null on OOM */ \
result = NEW_C_HEAP_ARRAY_RETURN_NULL(ElementType, len, mtInternal); \
if (result != NULL) { \
if (result != nullptr) { \
/* copy the array to the c chunk */ \
ArrayAccess<>::arraycopy_to_native(a, typeArrayOopDesc::element_offset<ElementType>(0), \
result, len); \
@ -2634,12 +2634,12 @@ JNI_ENTRY(jint, jni_RegisterNatives(JNIEnv *env, jclass clazz,
oop cl = k->class_loader();
InstanceKlass* ik = InstanceKlass::cast(k);
// Check for a platform class
if ((cl == NULL || SystemDictionary::is_platform_class_loader(cl)) &&
if ((cl == nullptr || SystemDictionary::is_platform_class_loader(cl)) &&
ik->module()->is_named()) {
Klass* caller = thread->security_get_caller_class(1);
// If no caller class, or caller class has a different loader, then
// issue a warning below.
do_warning = (caller == NULL) || caller->class_loader() != cl;
do_warning = (caller == nullptr) || caller->class_loader() != cl;
}
}
@ -2655,7 +2655,7 @@ JNI_ENTRY(jint, jni_RegisterNatives(JNIEnv *env, jclass clazz,
TempNewSymbol name = SymbolTable::probe(meth_name, meth_name_len);
TempNewSymbol signature = SymbolTable::probe(meth_sig, (int)strlen(meth_sig));
if (name == NULL || signature == NULL) {
if (name == nullptr || signature == nullptr) {
ResourceMark rm(THREAD);
stringStream st;
st.print("Method %s.%s%s not found", k->external_name(), meth_name, meth_sig);
@ -2689,7 +2689,7 @@ JNI_ENTRY(jint, jni_UnregisterNatives(JNIEnv *env, jclass clazz))
Method* m = InstanceKlass::cast(k)->methods()->at(index);
if (m->is_native()) {
m->clear_native_function();
m->set_signature_handler(NULL);
m->set_signature_handler(nullptr);
}
}
}
@ -2710,7 +2710,7 @@ JNI_ENTRY(jint, jni_MonitorEnter(JNIEnv *env, jobject jobj))
DT_RETURN_MARK(MonitorEnter, jint, (const jint&)ret);
// If the object is null, we can't do anything with it
if (jobj == NULL) {
if (jobj == nullptr) {
THROW_(vmSymbols::java_lang_NullPointerException(), JNI_ERR);
}
@ -2728,7 +2728,7 @@ JNI_ENTRY(jint, jni_MonitorExit(JNIEnv *env, jobject jobj))
DT_RETURN_MARK(MonitorExit, jint, (const jint&)ret);
// Don't do anything with a null object
if (jobj == NULL) {
if (jobj == nullptr) {
THROW_(vmSymbols::java_lang_NullPointerException(), JNI_ERR);
}
@ -2786,7 +2786,7 @@ JNI_ENTRY(void, jni_GetStringUTFRegion(JNIEnv *env, jstring string, jsize start,
// as_utf8_string null-terminates the result string
} else {
// JDK null-terminates the buffer even in len is zero
if (buf != NULL) {
if (buf != nullptr) {
buf[0] = 0;
}
}
@ -2803,7 +2803,7 @@ JNI_ENTRY(void*, jni_GetPrimitiveArrayCritical(JNIEnv *env, jarray array, jboole
BasicType type = TypeArrayKlass::cast(a->klass())->element_type();
void* ret = arrayOop(a())->base(type);
if (isCopy != NULL) {
if (isCopy != nullptr) {
*isCopy = JNI_FALSE;
}
HOTSPOT_JNI_GETPRIMITIVEARRAYCRITICAL_RETURN(ret);
@ -2830,20 +2830,20 @@ JNI_ENTRY(const jchar*, jni_GetStringCritical(JNIEnv *env, jstring string, jbool
Universe::heap()->pin_object(thread, s_value());
ret = (jchar*) s_value->base(T_CHAR);
if (isCopy != NULL) *isCopy = JNI_FALSE;
if (isCopy != nullptr) *isCopy = JNI_FALSE;
} else {
// Inflate latin1 encoded string to UTF16
typeArrayOop s_value = java_lang_String::value(s);
int s_len = java_lang_String::length(s, s_value);
ret = NEW_C_HEAP_ARRAY_RETURN_NULL(jchar, s_len + 1, mtInternal); // add one for zero termination
/* JNI Specification states return NULL on OOM */
if (ret != NULL) {
/* JNI Specification states return null on OOM */
if (ret != nullptr) {
for (int i = 0; i < s_len; i++) {
ret[i] = ((jchar) s_value->byte_at(i)) & 0xff;
}
ret[s_len] = 0;
}
if (isCopy != NULL) *isCopy = JNI_TRUE;
if (isCopy != nullptr) *isCopy = JNI_TRUE;
}
HOTSPOT_JNI_GETSTRINGCRITICAL_RETURN((uint16_t *) ret);
return ret;
@ -2875,8 +2875,8 @@ JNI_ENTRY(jweak, jni_NewWeakGlobalRef(JNIEnv *env, jobject ref))
HOTSPOT_JNI_NEWWEAKGLOBALREF_ENTRY(env, ref);
Handle ref_handle(thread, JNIHandles::resolve(ref));
jweak ret = JNIHandles::make_weak_global(ref_handle, AllocFailStrategy::RETURN_NULL);
if (ret == NULL) {
THROW_OOP_(Universe::out_of_memory_error_c_heap(), NULL);
if (ret == nullptr) {
THROW_OOP_(Universe::out_of_memory_error_c_heap(), nullptr);
}
HOTSPOT_JNI_NEWWEAKGLOBALREF_RETURN(ret);
return ret;
@ -2904,12 +2904,12 @@ JNI_END
static int directBufferSupportInitializeStarted = 0;
static volatile int directBufferSupportInitializeEnded = 0;
static volatile int directBufferSupportInitializeFailed = 0;
static jclass bufferClass = NULL;
static jclass directBufferClass = NULL;
static jclass directByteBufferClass = NULL;
static jmethodID directByteBufferConstructor = NULL;
static jfieldID directBufferAddressField = NULL;
static jfieldID bufferCapacityField = NULL;
static jclass bufferClass = nullptr;
static jclass directBufferClass = nullptr;
static jclass directByteBufferClass = nullptr;
static jmethodID directByteBufferConstructor = nullptr;
static jfieldID directBufferAddressField = nullptr;
static jfieldID bufferCapacityField = nullptr;
static jclass lookupOne(JNIEnv* env, const char* name, TRAPS) {
Handle loader; // null (bootstrap) loader
@ -2918,19 +2918,19 @@ static jclass lookupOne(JNIEnv* env, const char* name, TRAPS) {
TempNewSymbol sym = SymbolTable::new_symbol(name);
jclass result = find_class_from_class_loader(env, sym, true, loader, protection_domain, true, CHECK_NULL);
if (log_is_enabled(Debug, class, resolve) && result != NULL) {
if (log_is_enabled(Debug, class, resolve) && result != nullptr) {
trace_class_resolution(java_lang_Class::as_Klass(JNIHandles::resolve_non_null(result)));
}
return result;
}
// These lookups are done with the NULL (bootstrap) ClassLoader to
// These lookups are done with the null (bootstrap) ClassLoader to
// circumvent any security checks that would be done by jni_FindClass.
JNI_ENTRY(bool, lookupDirectBufferClasses(JNIEnv* env))
{
if ((bufferClass = lookupOne(env, "java/nio/Buffer", thread)) == NULL) { return false; }
if ((directBufferClass = lookupOne(env, "sun/nio/ch/DirectBuffer", thread)) == NULL) { return false; }
if ((directByteBufferClass = lookupOne(env, "java/nio/DirectByteBuffer", thread)) == NULL) { return false; }
if ((bufferClass = lookupOne(env, "java/nio/Buffer", thread)) == nullptr) { return false; }
if ((directBufferClass = lookupOne(env, "sun/nio/ch/DirectBuffer", thread)) == nullptr) { return false; }
if ((directByteBufferClass = lookupOne(env, "java/nio/DirectByteBuffer", thread)) == nullptr) { return false; }
return true;
}
JNI_END
@ -2952,8 +2952,8 @@ static bool initializeDirectBufferSupport(JNIEnv* env, JavaThread* thread) {
directBufferClass = (jclass) env->NewGlobalRef(directBufferClass);
directByteBufferClass = (jclass) env->NewGlobalRef(directByteBufferClass);
// Global refs will be NULL if out-of-memory (no exception is pending)
if (bufferClass == NULL || directBufferClass == NULL || directByteBufferClass == NULL) {
// Global refs will be null if out-of-memory (no exception is pending)
if (bufferClass == nullptr || directBufferClass == nullptr || directByteBufferClass == nullptr) {
directBufferSupportInitializeFailed = 1;
return false;
}
@ -2978,9 +2978,9 @@ static bool initializeDirectBufferSupport(JNIEnv* env, JavaThread* thread) {
return false;
}
if ((directByteBufferConstructor == NULL) ||
(directBufferAddressField == NULL) ||
(bufferCapacityField == NULL)) {
if ((directByteBufferConstructor == nullptr) ||
(directBufferAddressField == nullptr) ||
(bufferCapacityField == nullptr)) {
directBufferSupportInitializeFailed = 1;
return false;
}
@ -3004,8 +3004,8 @@ extern "C" jobject JNICALL jni_NewDirectByteBuffer(JNIEnv *env, void* address, j
if (!directBufferSupportInitializeEnded) {
if (!initializeDirectBufferSupport(env, thread)) {
HOTSPOT_JNI_NEWDIRECTBYTEBUFFER_RETURN(NULL);
return NULL;
HOTSPOT_JNI_NEWDIRECTBYTEBUFFER_RETURN(nullptr);
return nullptr;
}
}
@ -3025,7 +3025,7 @@ extern "C" void* JNICALL jni_GetDirectBufferAddress(JNIEnv *env, jobject buf)
JavaThread* thread = JavaThread::thread_from_jni_environment(env);
HOTSPOT_JNI_GETDIRECTBUFFERADDRESS_ENTRY(env, buf);
void* ret = NULL;
void* ret = nullptr;
DT_RETURN_MARK(GetDirectBufferAddress, void*, (const void*&)ret);
if (!directBufferSupportInitializeEnded) {
@ -3034,7 +3034,7 @@ extern "C" void* JNICALL jni_GetDirectBufferAddress(JNIEnv *env, jobject buf)
}
}
if ((buf != NULL) && (!env->IsInstanceOf(buf, directBufferClass))) {
if ((buf != nullptr) && (!env->IsInstanceOf(buf, directBufferClass))) {
return 0;
}
@ -3061,7 +3061,7 @@ extern "C" jlong JNICALL jni_GetDirectBufferCapacity(JNIEnv *env, jobject buf)
}
}
if (buf == NULL) {
if (buf == nullptr) {
return -1;
}
@ -3097,7 +3097,7 @@ JNI_END
JNI_ENTRY(jboolean, jni_IsVirtualThread(JNIEnv* env, jobject obj))
oop thread_obj = JNIHandles::resolve_external_guard(obj);
if (thread_obj != NULL && thread_obj->is_a(vmClasses::BasicVirtualThread_klass())) {
if (thread_obj != nullptr && thread_obj->is_a(vmClasses::BasicVirtualThread_klass())) {
return JNI_TRUE;
} else {
return JNI_FALSE;
@ -3107,11 +3107,11 @@ JNI_END
// Structure containing all jni functions
struct JNINativeInterface_ jni_NativeInterface = {
NULL,
NULL,
NULL,
nullptr,
nullptr,
nullptr,
NULL,
nullptr,
jni_GetVersion,
@ -3464,7 +3464,7 @@ struct JNINativeInterface_* jni_functions_nocheck() {
}
static void post_thread_start_event(const JavaThread* jt) {
assert(jt != NULL, "invariant");
assert(jt != nullptr, "invariant");
EventThreadStart event;
if (event.should_commit()) {
event.set_thread(JFR_JVM_THREAD_ID(jt));
@ -3684,10 +3684,10 @@ _JNI_IMPORT_OR_EXPORT_ jint JNICALL JNI_GetCreatedJavaVMs(JavaVM **vm_buf, jsize
HOTSPOT_JNI_GETCREATEDJAVAVMS_ENTRY((void **) vm_buf, bufLen, (uintptr_t *) numVMs);
if (vm_created == 1) {
if (numVMs != NULL) *numVMs = 1;
if (numVMs != nullptr) *numVMs = 1;
if (bufLen > 0) *vm_buf = (JavaVM *)(&main_vm);
} else {
if (numVMs != NULL) *numVMs = 0;
if (numVMs != nullptr) *numVMs = 0;
}
HOTSPOT_JNI_GETCREATEDJAVAVMS_RETURN(JNI_OK);
return JNI_OK;
@ -3712,7 +3712,7 @@ static jint JNICALL jni_DestroyJavaVM_inner(JavaVM *vm) {
JavaVMAttachArgs destroyargs;
destroyargs.version = CurrentVersion;
destroyargs.name = (char *)"DestroyJavaVM";
destroyargs.group = NULL;
destroyargs.group = nullptr;
res = vm->AttachCurrentThread((void **)&env, (void *)&destroyargs);
if (res != JNI_OK) {
return res;
@ -3764,7 +3764,7 @@ static jint attach_current_thread(JavaVM *vm, void **penv, void *_args, bool dae
*/
Thread* t = Thread::current_or_null();
if (t != NULL) {
if (t != nullptr) {
// If executing from an atexit hook we may be in the VMThread.
if (t->is_Java_thread()) {
// If the thread has been attached this operation is a no-op
@ -3808,13 +3808,13 @@ static jint attach_current_thread(JavaVM *vm, void **penv, void *_args, bool dae
Threads::add(thread, daemon);
}
// Create thread group and name info from attach arguments
oop group = NULL;
char* thread_name = NULL;
if (args != NULL && Threads::is_supported_jni_version(args->version)) {
oop group = nullptr;
char* thread_name = nullptr;
if (args != nullptr && Threads::is_supported_jni_version(args->version)) {
group = JNIHandles::resolve(args->group);
thread_name = args->name; // may be NULL
thread_name = args->name; // may be null
}
if (group == NULL) group = Universe::main_thread_group();
if (group == nullptr) group = Universe::main_thread_group();
// Create Java level thread object and attach it to this thread
bool attach_failed = false;
@ -3889,7 +3889,7 @@ jint JNICALL jni_DetachCurrentThread(JavaVM *vm) {
Thread* current = Thread::current_or_null();
// If the thread has already been detached the operation is a no-op
if (current == NULL) {
if (current == nullptr) {
HOTSPOT_JNI_DETACHCURRENTTHREAD_RETURN(JNI_OK);
return JNI_OK;
}
@ -3945,14 +3945,14 @@ jint JNICALL jni_GetEnv(JavaVM *vm, void **penv, jint version) {
DT_RETURN_MARK(GetEnv, jint, (const jint&)ret);
if (vm_created == 0) {
*penv = NULL;
*penv = nullptr;
ret = JNI_EDETACHED;
return ret;
}
// No JVM TI with --enable-preview and no continuations support.
if (!VMContinuations && Arguments::enable_preview() && JvmtiExport::is_jvmti_version(version)) {
*penv = NULL;
*penv = nullptr;
ret = JNI_EVERSION;
return ret;
}
@ -3969,7 +3969,7 @@ jint JNICALL jni_GetEnv(JavaVM *vm, void **penv, jint version) {
#endif // !JVMPI_VERSION_1
Thread* thread = Thread::current_or_null();
if (thread != NULL && thread->is_Java_thread()) {
if (thread != nullptr && thread->is_Java_thread()) {
if (Threads::is_supported_jni_version_including_1_1(version)) {
*(JNIEnv**)penv = JavaThread::cast(thread)->jni_environment();
ret = JNI_OK;
@ -3988,12 +3988,12 @@ jint JNICALL jni_GetEnv(JavaVM *vm, void **penv, jint version) {
ret = JNI_EVERSION;
return ret;
} else {
*penv = NULL;
*penv = nullptr;
ret = JNI_EVERSION;
return ret;
}
} else {
*penv = NULL;
*penv = nullptr;
ret = JNI_EDETACHED;
return ret;
}
@ -4016,9 +4016,9 @@ jint JNICALL jni_AttachCurrentThreadAsDaemon(JavaVM *vm, void **penv, void *_arg
} // End extern "C"
const struct JNIInvokeInterface_ jni_InvokeInterface = {
NULL,
NULL,
NULL,
nullptr,
nullptr,
nullptr,
jni_DestroyJavaVM,
jni_AttachCurrentThread,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2001, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2001, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -92,7 +92,7 @@ static struct JNINativeInterface_ * unchecked_jni_NativeInterface;
extern "C" { \
result_type JNICALL header { \
Thread* cur = Thread::current_or_null(); \
if (cur == NULL || !cur->is_Java_thread()) { \
if (cur == nullptr || !cur->is_Java_thread()) { \
tty->print_cr("%s", fatal_using_jnienv_in_nonjava); \
os::abort(true); \
} \
@ -101,7 +101,7 @@ extern "C" { \
if (env != xenv) { \
NativeReportJNIFatalError(thr, warn_wrong_jnienv); \
} \
MACOS_AARCH64_ONLY(ThreadWXEnable __wx(WXWrite, thr)); \
MACOS_AARCH64_ONLY(ThreadWXEnable __wx(WXWrite, thr)); \
VM_ENTRY_BASE(result_type, header, thr)
@ -273,7 +273,7 @@ checkInstanceFieldID(JavaThread* thr, jfieldID fid, jobject obj, int ftype)
/* validate the object being passed and then get its class */
ASSERT_OOPS_ALLOWED;
oop oopObj = jniCheck::validate_object(thr, obj);
if (oopObj == NULL) {
if (oopObj == nullptr) {
ReportJNIFatalError(thr, fatal_null_object);
}
Klass* k_oop = oopObj->klass();
@ -303,7 +303,7 @@ checkString(JavaThread* thr, jstring js)
{
ASSERT_OOPS_ALLOWED;
oop s = jniCheck::validate_object(thr, js);
if ((s == NULL) || !java_lang_String::is_instance(s))
if ((s == nullptr) || !java_lang_String::is_instance(s))
ReportJNIFatalError(thr, fatal_non_string);
}
@ -314,7 +314,7 @@ check_is_array(JavaThread* thr, jarray jArray)
arrayOop aOop;
aOop = (arrayOop)jniCheck::validate_object(thr, jArray);
if (aOop == NULL || !aOop->is_array()) {
if (aOop == nullptr || !aOop->is_array()) {
ReportJNIFatalError(thr, fatal_non_array);
}
return aOop;
@ -369,9 +369,9 @@ static void* check_jni_wrap_copy_array(JavaThread* thr, jarray array,
static void* check_wrapped_array(JavaThread* thr, const char* fn_name,
void* obj, void* carray, size_t* rsz) {
if (carray == NULL) {
tty->print_cr("%s: elements vector NULL" PTR_FORMAT, fn_name, p2i(obj));
NativeReportJNIFatalError(thr, "Elements vector NULL");
if (carray == nullptr) {
tty->print_cr("%s: elements vector null" PTR_FORMAT, fn_name, p2i(obj));
NativeReportJNIFatalError(thr, "Elements vector null");
}
GuardedMemory guarded(carray);
void* orig_result = guarded.get_tag();
@ -381,13 +381,13 @@ static void* check_wrapped_array(JavaThread* thr, const char* fn_name,
DEBUG_ONLY(guarded.print_on(tty);) // This may crash.
NativeReportJNIFatalError(thr, err_msg("%s: failed bounds check", fn_name));
}
if (orig_result == NULL) {
if (orig_result == nullptr) {
tty->print_cr("%s: unrecognized elements. array: " PTR_FORMAT " carray: " PTR_FORMAT,
fn_name, p2i(obj), p2i(carray));
DEBUG_ONLY(guarded.print_on(tty);) // This may crash.
NativeReportJNIFatalError(thr, err_msg("%s: unrecognized elements", fn_name));
}
if (rsz != NULL) {
if (rsz != nullptr) {
*rsz = guarded.get_user_size();
}
return orig_result;
@ -422,12 +422,12 @@ static void* check_wrapped_array_release(JavaThread* thr, const char* fn_name,
}
oop jniCheck::validate_handle(JavaThread* thr, jobject obj) {
if ((obj != NULL) && (JNIHandles::handle_type(thr, obj) != JNIInvalidRefType)) {
if ((obj != nullptr) && (JNIHandles::handle_type(thr, obj) != JNIInvalidRefType)) {
ASSERT_OOPS_ALLOWED;
return JNIHandles::resolve_external_guard(obj);
}
ReportJNIFatalError(thr, fatal_bad_ref_to_jni);
return NULL;
return nullptr;
}
@ -435,7 +435,7 @@ Method* jniCheck::validate_jmethod_id(JavaThread* thr, jmethodID method_id) {
ASSERT_OOPS_ALLOWED;
// do the fast jmethodID check first
Method* m = Method::checked_resolve_jmethod_id(method_id);
if (m == NULL) {
if (m == nullptr) {
ReportJNIFatalError(thr, fatal_wrong_class_or_method);
}
// jmethodIDs are handles in the class loader data,
@ -448,10 +448,10 @@ Method* jniCheck::validate_jmethod_id(JavaThread* thr, jmethodID method_id) {
oop jniCheck::validate_object(JavaThread* thr, jobject obj) {
if (obj == NULL) return NULL;
if (obj == nullptr) return nullptr;
ASSERT_OOPS_ALLOWED;
oop oopObj = jniCheck::validate_handle(thr, obj);
if (oopObj == NULL) {
if (oopObj == nullptr) {
ReportJNIFatalError(thr, fatal_bad_ref_to_jni);
}
return oopObj;
@ -461,7 +461,7 @@ oop jniCheck::validate_object(JavaThread* thr, jobject obj) {
// passed to JNI findClass should not be decorated unless they are
// array descriptors.
void jniCheck::validate_class_descriptor(JavaThread* thr, const char* name) {
if (name == NULL) return; // implementation accepts NULL so just return
if (name == nullptr) return; // implementation accepts null so just return
size_t len = strlen(name);
@ -485,7 +485,7 @@ void jniCheck::validate_class_descriptor(JavaThread* thr, const char* name) {
Klass* jniCheck::validate_class(JavaThread* thr, jclass clazz, bool allow_primitive) {
ASSERT_OOPS_ALLOWED;
oop mirror = jniCheck::validate_handle(thr, clazz);
if (mirror == NULL) {
if (mirror == nullptr) {
ReportJNIFatalError(thr, fatal_received_null_class);
}
@ -495,7 +495,7 @@ Klass* jniCheck::validate_class(JavaThread* thr, jclass clazz, bool allow_primit
Klass* k = java_lang_Class::as_Klass(mirror);
// Make allowances for primitive classes ...
if (!(k != NULL || (allow_primitive && java_lang_Class::is_primitive(mirror)))) {
if (!(k != nullptr || (allow_primitive && java_lang_Class::is_primitive(mirror)))) {
ReportJNIFatalError(thr, fatal_class_not_a_class);
}
return k;
@ -503,7 +503,7 @@ Klass* jniCheck::validate_class(JavaThread* thr, jclass clazz, bool allow_primit
void jniCheck::validate_throwable_klass(JavaThread* thr, Klass* klass) {
ASSERT_OOPS_ALLOWED;
assert(klass != NULL, "klass argument must have a value");
assert(klass != nullptr, "klass argument must have a value");
if (!klass->is_instance_klass() ||
!klass->is_subclass_of(vmClasses::Throwable_klass())) {
@ -516,7 +516,7 @@ void jniCheck::validate_call(JavaThread* thr, jclass clazz, jmethodID method_id,
Method* m = jniCheck::validate_jmethod_id(thr, method_id);
InstanceKlass* holder = m->method_holder();
if (clazz != NULL) {
if (clazz != nullptr) {
Klass* k = jniCheck::validate_class(thr, clazz, false);
// Check that method is in the class, must be InstanceKlass
if (!InstanceKlass::cast(k)->is_subtype_of(holder)) {
@ -524,9 +524,9 @@ void jniCheck::validate_call(JavaThread* thr, jclass clazz, jmethodID method_id,
}
}
if (obj != NULL) {
if (obj != nullptr) {
oop recv = jniCheck::validate_object(thr, obj);
assert(recv != NULL, "validate_object checks that");
assert(recv != nullptr, "validate_object checks that");
Klass* rk = recv->klass();
// Check that the object is a subtype of method holder too.
@ -654,9 +654,9 @@ JNI_ENTRY_CHECKED(jint,
functionEnter(thr);
IN_VM(
oop oopObj = jniCheck::validate_object(thr, obj);
if (oopObj == NULL) {
// Unchecked Throw tolerates a NULL obj, so just warn
ReportJNIWarning(thr, "JNI Throw called with NULL throwable");
if (oopObj == nullptr) {
// Unchecked Throw tolerates a null obj, so just warn
ReportJNIWarning(thr, "JNI Throw called with null throwable");
} else {
jniCheck::validate_throwable_klass(thr, oopObj->klass());
}
@ -673,7 +673,7 @@ JNI_ENTRY_CHECKED(jint,
functionEnter(thr);
IN_VM(
Klass* k = jniCheck::validate_class(thr, clazz, false);
assert(k != NULL, "validate_class shouldn't return NULL Klass*");
assert(k != nullptr, "validate_class shouldn't return null Klass*");
jniCheck::validate_throwable_klass(thr, k);
)
jint result = UNCHECKED()->ThrowNew(env, clazz, msg);
@ -739,7 +739,7 @@ JNI_ENTRY_CHECKED(jobject,
jobject lobj))
functionEnter(thr);
IN_VM(
if (lobj != NULL) {
if (lobj != nullptr) {
jniCheck::validate_handle(thr, lobj);
}
)
@ -785,13 +785,13 @@ JNI_ENTRY_CHECKED(jboolean,
functionEnterExceptionAllowed(thr);
IN_VM(
/* This JNI function can be used to compare weak global references
* to NULL objects. If the handles are valid, but contain NULL,
* to nullptr objects. If the handles are valid, but contain nullptr,
* then don't attempt to validate the object.
*/
if (obj1 != NULL && jniCheck::validate_handle(thr, obj1) != NULL) {
if (obj1 != nullptr && jniCheck::validate_handle(thr, obj1) != nullptr) {
jniCheck::validate_object(thr, obj1);
}
if (obj2 != NULL && jniCheck::validate_handle(thr, obj2) != NULL) {
if (obj2 != nullptr && jniCheck::validate_handle(thr, obj2) != nullptr) {
jniCheck::validate_object(thr, obj2);
}
)
@ -805,7 +805,7 @@ JNI_ENTRY_CHECKED(jobject,
jobject ref))
functionEnter(thr);
IN_VM(
if (ref != NULL) {
if (ref != nullptr) {
jniCheck::validate_handle(thr, ref);
}
)
@ -932,7 +932,7 @@ JNI_ENTRY_CHECKED(ResultType, \
functionEnter(thr); \
va_list args; \
IN_VM( \
jniCheck::validate_call(thr, NULL, methodID, obj); \
jniCheck::validate_call(thr, nullptr, methodID, obj); \
) \
va_start(args,methodID); \
ResultType result =UNCHECKED()->Call##Result##MethodV(env, obj, methodID, \
@ -950,7 +950,7 @@ JNI_ENTRY_CHECKED(ResultType, \
va_list args)) \
functionEnter(thr); \
IN_VM(\
jniCheck::validate_call(thr, NULL, methodID, obj); \
jniCheck::validate_call(thr, nullptr, methodID, obj); \
) \
ResultType result = UNCHECKED()->Call##Result##MethodV(env, obj, methodID,\
args); \
@ -966,7 +966,7 @@ JNI_ENTRY_CHECKED(ResultType, \
const jvalue * args)) \
functionEnter(thr); \
IN_VM( \
jniCheck::validate_call(thr, NULL, methodID, obj); \
jniCheck::validate_call(thr, nullptr, methodID, obj); \
) \
ResultType result = UNCHECKED()->Call##Result##MethodA(env, obj, methodID,\
args); \
@ -993,7 +993,7 @@ JNI_ENTRY_CHECKED(void,
functionEnter(thr);
va_list args;
IN_VM(
jniCheck::validate_call(thr, NULL, methodID, obj);
jniCheck::validate_call(thr, nullptr, methodID, obj);
)
va_start(args,methodID);
UNCHECKED()->CallVoidMethodV(env,obj,methodID,args);
@ -1009,7 +1009,7 @@ JNI_ENTRY_CHECKED(void,
va_list args))
functionEnter(thr);
IN_VM(
jniCheck::validate_call(thr, NULL, methodID, obj);
jniCheck::validate_call(thr, nullptr, methodID, obj);
)
UNCHECKED()->CallVoidMethodV(env,obj,methodID,args);
thr->set_pending_jni_exception_check("CallVoidMethodV");
@ -1023,7 +1023,7 @@ JNI_ENTRY_CHECKED(void,
const jvalue * args))
functionEnter(thr);
IN_VM(
jniCheck::validate_call(thr, NULL, methodID, obj);
jniCheck::validate_call(thr, nullptr, methodID, obj);
)
UNCHECKED()->CallVoidMethodA(env,obj,methodID,args);
thr->set_pending_jni_exception_check("CallVoidMethodA");
@ -1442,14 +1442,14 @@ JNI_ENTRY_CHECKED(const jchar *,
IN_VM(
checkString(thr, str);
)
jchar* new_result = NULL;
jchar* new_result = nullptr;
const jchar *result = UNCHECKED()->GetStringChars(env,str,isCopy);
assert (isCopy == NULL || *isCopy == JNI_TRUE, "GetStringChars didn't return a copy as expected");
if (result != NULL) {
size_t len = UNCHECKED()->GetStringLength(env,str) + 1; // + 1 for NULL termination
assert (isCopy == nullptr || *isCopy == JNI_TRUE, "GetStringChars didn't return a copy as expected");
if (result != nullptr) {
size_t len = UNCHECKED()->GetStringLength(env,str) + 1; // + 1 for null termination
len *= sizeof(jchar);
new_result = (jchar*) GuardedMemory::wrap_copy(result, len, STRING_TAG);
if (new_result == NULL) {
if (new_result == nullptr) {
vm_exit_out_of_memory(len, OOM_MALLOC_ERROR, "checked_jni_GetStringChars");
}
// Avoiding call to UNCHECKED()->ReleaseStringChars() since that will fire unexpected dtrace probes
@ -1468,7 +1468,7 @@ JNI_ENTRY_CHECKED(void,
IN_VM(
checkString(thr, str);
)
if (chars == NULL) {
if (chars == nullptr) {
// still do the unchecked call to allow dtrace probes
UNCHECKED()->ReleaseStringChars(env,str,chars);
}
@ -1526,13 +1526,13 @@ JNI_ENTRY_CHECKED(const char *,
IN_VM(
checkString(thr, str);
)
char* new_result = NULL;
char* new_result = nullptr;
const char *result = UNCHECKED()->GetStringUTFChars(env,str,isCopy);
assert (isCopy == NULL || *isCopy == JNI_TRUE, "GetStringUTFChars didn't return a copy as expected");
if (result != NULL) {
size_t len = strlen(result) + 1; // + 1 for NULL termination
assert (isCopy == nullptr || *isCopy == JNI_TRUE, "GetStringUTFChars didn't return a copy as expected");
if (result != nullptr) {
size_t len = strlen(result) + 1; // + 1 for null termination
new_result = (char*) GuardedMemory::wrap_copy(result, len, STRING_UTF_TAG);
if (new_result == NULL) {
if (new_result == nullptr) {
vm_exit_out_of_memory(len, OOM_MALLOC_ERROR, "checked_jni_GetStringUTFChars");
}
// Avoiding call to UNCHECKED()->ReleaseStringUTFChars() since that will fire unexpected dtrace probes
@ -1551,7 +1551,7 @@ JNI_ENTRY_CHECKED(void,
IN_VM(
checkString(thr, str);
)
if (chars == NULL) {
if (chars == nullptr) {
// still do the unchecked call to allow dtrace probes
UNCHECKED()->ReleaseStringUTFChars(env,str,chars);
}
@ -1657,7 +1657,7 @@ JNI_ENTRY_CHECKED(ElementType *, \
ElementType *result = UNCHECKED()->Get##Result##ArrayElements(env, \
array, \
isCopy); \
if (result != NULL) { \
if (result != nullptr) { \
result = (ElementType *) check_jni_wrap_copy_array(thr, array, result); \
} \
functionExit(thr); \
@ -1838,7 +1838,7 @@ JNI_ENTRY_CHECKED(void *,
check_is_primitive_array(thr, array);
)
void *result = UNCHECKED()->GetPrimitiveArrayCritical(env, array, isCopy);
if (result != NULL) {
if (result != nullptr) {
result = check_jni_wrap_copy_array(thr, array, result);
}
functionExit(thr);
@ -1894,7 +1894,7 @@ JNI_ENTRY_CHECKED(jweak,
jobject obj))
functionEnter(thr);
IN_VM(
if (obj != NULL) {
if (obj != nullptr) {
jniCheck::validate_handle(thr, obj);
}
)
@ -1998,11 +1998,11 @@ JNI_END
* Structure containing all checked jni functions
*/
struct JNINativeInterface_ checked_jni_NativeInterface = {
NULL,
NULL,
NULL,
nullptr,
nullptr,
nullptr,
NULL,
nullptr,
checked_jni_GetVersion,

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -51,7 +51,7 @@ class jniCheck : public AllStatic {
static Klass* validate_class(JavaThread* thr, jclass clazz, bool allow_primitive = false);
static void validate_class_descriptor(JavaThread* thr, const char* name);
static void validate_throwable_klass(JavaThread* thr, Klass* klass);
static void validate_call(JavaThread* thr, jclass clazz, jmethodID method_id, jobject obj = NULL);
static void validate_call(JavaThread* thr, jclass clazz, jmethodID method_id, jobject obj = nullptr);
static Method* validate_jmethod_id(JavaThread* thr, jmethodID method_id);
};

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2005, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2005, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -71,8 +71,8 @@ void JvmtiClassFileReconstituter::write_field_infos() {
guarantee(name_index != 0 && signature_index != 0, "bad constant pool index for field");
// int offset = ik()->field_offset( index );
int generic_signature_index = fs.generic_signature_index();
AnnotationArray* anno = fields_anno == NULL ? NULL : fields_anno->at(fs.index());
AnnotationArray* type_anno = fields_type_anno == NULL ? NULL : fields_type_anno->at(fs.index());
AnnotationArray* anno = fields_anno == nullptr ? nullptr : fields_anno->at(fs.index());
AnnotationArray* type_anno = fields_type_anno == nullptr ? nullptr : fields_type_anno->at(fs.index());
// JVMSpec| field_info {
// JVMSpec| u2 access_flags;
@ -95,10 +95,10 @@ void JvmtiClassFileReconstituter::write_field_infos() {
if (generic_signature_index != 0) {
++attr_count;
}
if (anno != NULL) {
if (anno != nullptr) {
++attr_count; // has RuntimeVisibleAnnotations attribute
}
if (type_anno != NULL) {
if (type_anno != nullptr) {
++attr_count; // has RuntimeVisibleTypeAnnotations attribute
}
@ -115,10 +115,10 @@ void JvmtiClassFileReconstituter::write_field_infos() {
if (generic_signature_index != 0) {
write_signature_attribute(generic_signature_index);
}
if (anno != NULL) {
if (anno != nullptr) {
write_annotations_attribute("RuntimeVisibleAnnotations", anno);
}
if (type_anno != NULL) {
if (type_anno != nullptr) {
write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno);
}
}
@ -319,7 +319,7 @@ void JvmtiClassFileReconstituter::write_method_parameter_attribute(const ConstMe
// JVMSpec| u2 sourcefile_index;
// JVMSpec| }
void JvmtiClassFileReconstituter::write_source_file_attribute() {
assert(ik()->source_file_name() != NULL, "caller must check");
assert(ik()->source_file_name() != nullptr, "caller must check");
write_attribute_name_index("SourceFile");
write_u4(2); // always length 2
@ -333,7 +333,7 @@ void JvmtiClassFileReconstituter::write_source_file_attribute() {
// JSR45| u1 debug_extension[attribute_length];
// JSR45| }
void JvmtiClassFileReconstituter::write_source_debug_extension_attribute() {
assert(ik()->source_debug_extension() != NULL, "caller must check");
assert(ik()->source_debug_extension() != nullptr, "caller must check");
write_attribute_name_index("SourceDebugExtension");
int len = (int)strlen(ik()->source_debug_extension());
@ -493,10 +493,10 @@ void JvmtiClassFileReconstituter::write_record_attribute() {
length += 8; // Signature attribute size
assert(component->attributes_count() > 0, "Bad component attributes count");
}
if (component->annotations() != NULL) {
if (component->annotations() != nullptr) {
length += 6 + component->annotations()->length();
}
if (component->type_annotations() != NULL) {
if (component->type_annotations() != nullptr) {
length += 6 + component->type_annotations()->length();
}
}
@ -512,10 +512,10 @@ void JvmtiClassFileReconstituter::write_record_attribute() {
if (component->generic_signature_index() != 0) {
write_signature_attribute(component->generic_signature_index());
}
if (component->annotations() != NULL) {
if (component->annotations() != nullptr) {
write_annotations_attribute("RuntimeVisibleAnnotations", component->annotations());
}
if (component->type_annotations() != NULL) {
if (component->type_annotations() != nullptr) {
write_annotations_attribute("RuntimeVisibleTypeAnnotations", component->type_annotations());
}
}
@ -711,7 +711,7 @@ void JvmtiClassFileReconstituter::write_method_info(const methodHandle& method)
if (const_method->has_checked_exceptions()) {
++attr_count; // has Exceptions attribute
}
if (default_anno != NULL) {
if (default_anno != nullptr) {
++attr_count; // has AnnotationDefault attribute
}
if (const_method->has_method_parameters()) {
@ -724,13 +724,13 @@ void JvmtiClassFileReconstituter::write_method_info(const methodHandle& method)
if (generic_signature_index != 0) {
++attr_count;
}
if (anno != NULL) {
if (anno != nullptr) {
++attr_count; // has RuntimeVisibleAnnotations attribute
}
if (param_anno != NULL) {
if (param_anno != nullptr) {
++attr_count; // has RuntimeVisibleParameterAnnotations attribute
}
if (type_anno != NULL) {
if (type_anno != nullptr) {
++attr_count; // has RuntimeVisibleTypeAnnotations attribute
}
@ -741,7 +741,7 @@ void JvmtiClassFileReconstituter::write_method_info(const methodHandle& method)
if (const_method->has_checked_exceptions()) {
write_exceptions_attribute(const_method);
}
if (default_anno != NULL) {
if (default_anno != nullptr) {
write_annotations_attribute("AnnotationDefault", default_anno);
}
if (const_method->has_method_parameters()) {
@ -754,13 +754,13 @@ void JvmtiClassFileReconstituter::write_method_info(const methodHandle& method)
if (generic_signature_index != 0) {
write_signature_attribute(generic_signature_index);
}
if (anno != NULL) {
if (anno != nullptr) {
write_annotations_attribute("RuntimeVisibleAnnotations", anno);
}
if (param_anno != NULL) {
if (param_anno != nullptr) {
write_annotations_attribute("RuntimeVisibleParameterAnnotations", param_anno);
}
if (type_anno != NULL) {
if (type_anno != nullptr) {
write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno);
}
}
@ -775,25 +775,25 @@ void JvmtiClassFileReconstituter::write_class_attributes() {
AnnotationArray* type_anno = ik()->class_type_annotations();
int attr_count = 0;
if (generic_signature != NULL) {
if (generic_signature != nullptr) {
++attr_count;
}
if (ik()->source_file_name() != NULL) {
if (ik()->source_file_name() != nullptr) {
++attr_count;
}
if (ik()->source_debug_extension() != NULL) {
if (ik()->source_debug_extension() != nullptr) {
++attr_count;
}
if (inner_classes_length > 0) {
++attr_count;
}
if (anno != NULL) {
if (anno != nullptr) {
++attr_count; // has RuntimeVisibleAnnotations attribute
}
if (type_anno != NULL) {
if (type_anno != nullptr) {
++attr_count; // has RuntimeVisibleTypeAnnotations attribute
}
if (cpool()->operands() != NULL) {
if (cpool()->operands() != nullptr) {
++attr_count;
}
if (ik()->nest_host_index() != 0) {
@ -805,25 +805,25 @@ void JvmtiClassFileReconstituter::write_class_attributes() {
if (ik()->permitted_subclasses() != Universe::the_empty_short_array()) {
++attr_count;
}
if (ik()->record_components() != NULL) {
if (ik()->record_components() != nullptr) {
++attr_count;
}
write_u2(attr_count);
if (generic_signature != NULL) {
if (generic_signature != nullptr) {
write_signature_attribute(symbol_to_cpool_index(generic_signature));
}
if (ik()->source_file_name() != NULL) {
if (ik()->source_file_name() != nullptr) {
write_source_file_attribute();
}
if (ik()->source_debug_extension() != NULL) {
if (ik()->source_debug_extension() != nullptr) {
write_source_debug_extension_attribute();
}
if (anno != NULL) {
if (anno != nullptr) {
write_annotations_attribute("RuntimeVisibleAnnotations", anno);
}
if (type_anno != NULL) {
if (type_anno != nullptr) {
write_annotations_attribute("RuntimeVisibleTypeAnnotations", type_anno);
}
if (ik()->nest_host_index() != 0) {
@ -835,10 +835,10 @@ void JvmtiClassFileReconstituter::write_class_attributes() {
if (ik()->permitted_subclasses() != Universe::the_empty_short_array()) {
write_permitted_subclasses_attribute();
}
if (ik()->record_components() != NULL) {
if (ik()->record_components() != nullptr) {
write_record_attribute();
}
if (cpool()->operands() != NULL) {
if (cpool()->operands() != nullptr) {
write_bootstrapmethod_attribute();
}
if (inner_classes_length > 0) {
@ -919,7 +919,7 @@ void JvmtiClassFileReconstituter::write_class_file_format() {
// JVMSpec| u2 super_class;
write_u2(class_symbol_to_cpool_index(ik()->name()));
Klass* super_class = ik()->super();
write_u2(super_class == NULL? 0 : // zero for java.lang.Object
write_u2(super_class == nullptr? 0 : // zero for java.lang.Object
class_symbol_to_cpool_index(super_class->name()));
// JVMSpec| u2 interfaces_count;
@ -967,7 +967,7 @@ address JvmtiClassFileReconstituter::writeable_address(size_t size) {
void JvmtiClassFileReconstituter::write_attribute_name_index(const char* name) {
TempNewSymbol sym = SymbolTable::probe(name, (int)strlen(name));
assert(sym != NULL, "attribute name symbol not found");
assert(sym != nullptr, "attribute name symbol not found");
u2 attr_name_index = symbol_to_cpool_index(sym);
assert(attr_name_index != 0, "attribute name symbol not in constant pool");
write_u2(attr_name_index);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2005, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2005, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -56,13 +56,13 @@ class JvmtiConstantPoolReconstituter : public StackObj {
JvmtiConstantPoolReconstituter(InstanceKlass* ik);
~JvmtiConstantPoolReconstituter() {
if (_symmap != NULL) {
if (_symmap != nullptr) {
delete _symmap;
_symmap = NULL;
_symmap = nullptr;
}
if (_classmap != NULL) {
if (_classmap != nullptr) {
delete _classmap;
_classmap = NULL;
_classmap = nullptr;
}
}
@ -73,8 +73,8 @@ class JvmtiConstantPoolReconstituter : public StackObj {
int cpool_size() { return _cpool_size; }
void copy_cpool_bytes(unsigned char *cpool_bytes) {
if (cpool_bytes == NULL) {
assert(cpool_bytes != NULL, "cpool_bytes pointer must not be NULL");
if (cpool_bytes == nullptr) {
assert(cpool_bytes != nullptr, "cpool_bytes pointer must not be null");
return;
}
cpool()->copy_cpool_bytes(cpool_size(), _symmap, cpool_bytes);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -53,7 +53,7 @@
//
// collector.collect();
// JvmtiCodeBlobDesc* blob = collector.first();
// while (blob != NULL) {
// while (blob != nullptr) {
// :
// blob = collector.next();
// }
@ -70,11 +70,11 @@ class CodeBlobCollector : StackObj {
static void do_vtable_stub(VtableStub* vs);
public:
CodeBlobCollector() {
_code_blobs = NULL;
_code_blobs = nullptr;
_pos = -1;
}
~CodeBlobCollector() {
if (_code_blobs != NULL) {
if (_code_blobs != nullptr) {
for (int i=0; i<_code_blobs->length(); i++) {
FreeHeap(_code_blobs->at(i));
}
@ -87,9 +87,9 @@ class CodeBlobCollector : StackObj {
// iteration support - return first code blob
JvmtiCodeBlobDesc* first() {
assert(_code_blobs != NULL, "not collected");
assert(_code_blobs != nullptr, "not collected");
if (_code_blobs->length() == 0) {
return NULL;
return nullptr;
}
_pos = 0;
return _code_blobs->at(0);
@ -99,7 +99,7 @@ class CodeBlobCollector : StackObj {
JvmtiCodeBlobDesc* next() {
assert(_pos >= 0, "iteration not started");
if (_pos+1 >= _code_blobs->length()) {
return NULL;
return nullptr;
}
return _code_blobs->at(++_pos);
}
@ -170,13 +170,13 @@ void CodeBlobCollector::do_vtable_stub(VtableStub* vs) {
void CodeBlobCollector::collect() {
assert_locked_or_safepoint(CodeCache_lock);
assert(_global_code_blobs == NULL, "checking");
assert(_global_code_blobs == nullptr, "checking");
// create the global list
_global_code_blobs = new (mtServiceability) GrowableArray<JvmtiCodeBlobDesc*>(50, mtServiceability);
// iterate over the stub code descriptors and put them in the list first.
for (StubCodeDesc* desc = StubCodeDesc::first(); desc != NULL; desc = StubCodeDesc::next(desc)) {
for (StubCodeDesc* desc = StubCodeDesc::first(); desc != nullptr; desc = StubCodeDesc::next(desc)) {
_global_code_blobs->append(new JvmtiCodeBlobDesc(desc->name(), desc->begin(), desc->end()));
}
@ -192,7 +192,7 @@ void CodeBlobCollector::collect() {
// make the global list the instance list so that it can be used
// for other iterations.
_code_blobs = _global_code_blobs;
_global_code_blobs = NULL;
_global_code_blobs = nullptr;
}
@ -212,7 +212,7 @@ jvmtiError JvmtiCodeBlobEvents::generate_dynamic_code_events(JvmtiEnv* env) {
// iterate over the collected list and post an event for each blob
JvmtiCodeBlobDesc* blob = collector.first();
while (blob != NULL) {
while (blob != nullptr) {
JvmtiExport::post_dynamic_code_generated(env, blob->name(), blob->code_begin(), blob->code_end());
blob = collector.next();
}
@ -262,7 +262,7 @@ void JvmtiCodeBlobEvents::build_jvmti_addr_location_map(nmethod *nm,
jint *map_length_ptr)
{
ResourceMark rm;
jvmtiAddrLocationMap* map = NULL;
jvmtiAddrLocationMap* map = nullptr;
jint map_length = 0;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -27,7 +27,7 @@
#include "prims/jvmtiDeferredUpdates.hpp"
void JvmtiDeferredUpdates::create_for(JavaThread* thread) {
assert(thread->deferred_updates() == NULL, "already allocated");
assert(thread->deferred_updates() == nullptr, "already allocated");
thread->set_deferred_updates(new JvmtiDeferredUpdates());
}
@ -40,7 +40,7 @@ JvmtiDeferredUpdates::~JvmtiDeferredUpdates() {
}
void JvmtiDeferredUpdates::inc_relock_count_after_wait(JavaThread* thread) {
if (thread->deferred_updates() == NULL) {
if (thread->deferred_updates() == nullptr) {
create_for(thread);
}
thread->deferred_updates()->inc_relock_count_after_wait();
@ -49,11 +49,11 @@ void JvmtiDeferredUpdates::inc_relock_count_after_wait(JavaThread* thread) {
int JvmtiDeferredUpdates::get_and_reset_relock_count_after_wait(JavaThread* jt) {
JvmtiDeferredUpdates* updates = jt->deferred_updates();
int result = 0;
if (updates != NULL) {
if (updates != nullptr) {
result = updates->get_and_reset_relock_count_after_wait();
if (updates->count() == 0) {
delete updates;
jt->set_deferred_updates(NULL);
jt->set_deferred_updates(nullptr);
}
}
return result;
@ -61,7 +61,7 @@ int JvmtiDeferredUpdates::get_and_reset_relock_count_after_wait(JavaThread* jt)
void JvmtiDeferredUpdates::delete_updates_for_frame(JavaThread* jt, intptr_t* frame_id) {
JvmtiDeferredUpdates* updates = jt->deferred_updates();
if (updates != NULL) {
if (updates != nullptr) {
GrowableArray<jvmtiDeferredLocalVariableSet*>* list = updates->deferred_locals();
assert(list->length() > 0, "Updates holder not deleted");
int i = 0;
@ -78,7 +78,7 @@ void JvmtiDeferredUpdates::delete_updates_for_frame(JavaThread* jt, intptr_t* fr
}
} while ( i < list->length() );
if (updates->count() == 0) {
jt->set_deferred_updates(NULL);
jt->set_deferred_updates(nullptr);
// Free deferred updates.
// Note, the 'list' of local variable updates is embedded in 'updates'.
delete updates;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020 SAP SE. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@ -141,7 +141,7 @@ public:
static void create_for(JavaThread* thread);
static GrowableArray<jvmtiDeferredLocalVariableSet*>* deferred_locals(JavaThread* jt) {
return jt->deferred_updates() == NULL ? NULL : jt->deferred_updates()->deferred_locals();
return jt->deferred_updates() == nullptr ? nullptr : jt->deferred_updates()->deferred_locals();
}
// Relocking has to be deferred if the lock owning thread is currently waiting on the monitor.

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -69,7 +69,7 @@
// JvmtiEnvBase
//
JvmtiEnvBase* JvmtiEnvBase::_head_environment = NULL;
JvmtiEnvBase* JvmtiEnvBase::_head_environment = nullptr;
bool JvmtiEnvBase::_globally_initialized = false;
volatile bool JvmtiEnvBase::_needs_clean_up = false;
@ -114,11 +114,11 @@ JvmtiEnvBase::initialize() {
// addition. Note: NoSafepointVerifier cannot, currently, be used before
// threads exist.
JvmtiEnvIterator it;
JvmtiEnvBase *previous_env = NULL;
for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
JvmtiEnvBase *previous_env = nullptr;
for (JvmtiEnvBase* env = it.first(); env != nullptr; env = it.next(env)) {
previous_env = env;
}
if (previous_env == NULL) {
if (previous_env == nullptr) {
_head_environment = this;
} else {
previous_env->set_next_environment(this);
@ -199,11 +199,11 @@ JvmtiEnvBase::use_version_1_2_semantics() {
JvmtiEnvBase::JvmtiEnvBase(jint version) : _env_event_enable() {
_version = version;
_env_local_storage = NULL;
_tag_map = NULL;
_env_local_storage = nullptr;
_tag_map = nullptr;
_native_method_prefix_count = 0;
_native_method_prefixes = NULL;
_next = NULL;
_native_method_prefixes = nullptr;
_next = nullptr;
_class_file_load_hook_ever_enabled = false;
// Moot since ClassFileLoadHook not yet enabled.
@ -211,7 +211,7 @@ JvmtiEnvBase::JvmtiEnvBase(jint version) : _env_event_enable() {
// for environment creation during ClassFileLoadHook.
_is_retransformable = true;
// all callbacks initially NULL
// all callbacks initially null
memset(&_event_callbacks,0,sizeof(jvmtiEventCallbacks));
// all capabilities initially off
@ -225,7 +225,7 @@ JvmtiEnvBase::JvmtiEnvBase(jint version) : _env_event_enable() {
JvmtiEventController::env_initialize((JvmtiEnv*)this);
#ifdef JVMTI_TRACE
_jvmti_external.functions = TraceJVMTI != NULL ? &jvmtiTrace_Interface : &jvmti_Interface;
_jvmti_external.functions = TraceJVMTI != nullptr ? &jvmtiTrace_Interface : &jvmti_Interface;
#else
_jvmti_external.functions = &jvmti_Interface;
#endif
@ -261,12 +261,12 @@ JvmtiEnvBase::env_dispose() {
JvmtiManageCapabilities::relinquish_capabilities(caps, caps, caps);
// Same situation as with events (see above)
set_native_method_prefixes(0, NULL);
set_native_method_prefixes(0, nullptr);
JvmtiTagMap* tag_map_to_clear = tag_map_acquire();
// A tag map can be big, clear it now to save memory until
// the destructor runs.
if (tag_map_to_clear != NULL) {
if (tag_map_to_clear != nullptr) {
tag_map_to_clear->clear();
}
@ -281,9 +281,9 @@ JvmtiEnvBase::~JvmtiEnvBase() {
// disposed environment could have been reallocated.
// Make sure it is gone.
JvmtiTagMap* tag_map_to_deallocate = _tag_map;
set_tag_map(NULL);
set_tag_map(nullptr);
// A tag map can be big, deallocate it now
if (tag_map_to_deallocate != NULL) {
if (tag_map_to_deallocate != nullptr) {
delete tag_map_to_deallocate;
}
@ -302,9 +302,9 @@ JvmtiEnvBase::periodic_clean_up() {
// Unlink all invalid environments from the list of environments
// and deallocate them
JvmtiEnvIterator it;
JvmtiEnvBase* previous_env = NULL;
JvmtiEnvBase* previous_env = nullptr;
JvmtiEnvBase* env = it.first();
while (env != NULL) {
while (env != nullptr) {
if (env->is_valid()) {
previous_env = env;
env = it.next(env);
@ -312,7 +312,7 @@ JvmtiEnvBase::periodic_clean_up() {
// This one isn't valid, remove it from the list and deallocate it
JvmtiEnvBase* defunct_env = env;
env = it.next(env);
if (previous_env == NULL) {
if (previous_env == nullptr) {
_head_environment = env;
} else {
previous_env->set_next_environment(env);
@ -401,16 +401,16 @@ JvmtiEnvBase::set_native_method_prefixes(jint prefix_count, char** prefixes) {
// allocate and install the new prefixex
if (prefix_count == 0 || !is_valid()) {
_native_method_prefix_count = 0;
_native_method_prefixes = NULL;
_native_method_prefixes = nullptr;
} else {
// there are prefixes, allocate an array to hold them, and fill it
char** new_prefixes = (char**)os::malloc((prefix_count) * sizeof(char*), mtInternal);
if (new_prefixes == NULL) {
if (new_prefixes == nullptr) {
return JVMTI_ERROR_OUT_OF_MEMORY;
}
for (int i = 0; i < prefix_count; i++) {
char* prefix = prefixes[i];
if (prefix == NULL) {
if (prefix == nullptr) {
for (int j = 0; j < (i-1); j++) {
os::free(new_prefixes[j]);
}
@ -418,7 +418,7 @@ JvmtiEnvBase::set_native_method_prefixes(jint prefix_count, char** prefixes) {
return JVMTI_ERROR_NULL_POINTER;
}
prefix = os::strdup(prefixes[i]);
if (prefix == NULL) {
if (prefix == nullptr) {
for (int j = 0; j < (i-1); j++) {
os::free(new_prefixes[j]);
}
@ -459,7 +459,7 @@ JvmtiEnvBase::get_all_native_method_prefixes(int* count_ptr) {
GrowableArray<char*>* prefix_array =new GrowableArray<char*>(5);
JvmtiEnvIterator it;
for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
for (JvmtiEnvBase* env = it.first(); env != nullptr; env = it.next(env)) {
int prefix_count = env->get_native_method_prefix_count();
char** prefixes = env->get_native_method_prefixes();
for (int j = 0; j < prefix_count; j++) {
@ -494,7 +494,7 @@ JvmtiEnvBase::set_event_callbacks(const jvmtiEventCallbacks* callbacks,
// Now that JvmtiThreadState_lock is held, prevent a possible race condition where events
// are re-enabled by a call to set event callbacks where the DisposeEnvironment
// occurs after the boiler-plate environment check and before the lock is acquired.
if (callbacks != NULL && is_valid()) {
if (callbacks != nullptr && is_valid()) {
if (size_of_callbacks < (jint)byte_cnt) {
byte_cnt = size_of_callbacks;
}
@ -508,7 +508,7 @@ JvmtiEnvBase::set_event_callbacks(const jvmtiEventCallbacks* callbacks,
// mean much better out of memory handling
unsigned char *
JvmtiEnvBase::jvmtiMalloc(jlong size) {
unsigned char* mem = NULL;
unsigned char* mem = nullptr;
jvmtiError result = allocate(size, &mem);
assert(result == JVMTI_ERROR_NONE, "Allocate failed");
return mem;
@ -540,11 +540,11 @@ void JvmtiEnvBase::destroy_jni_reference(JavaThread *thread, jobject jobj) {
jthread *
JvmtiEnvBase::new_jthreadArray(int length, Handle *handles) {
if (length == 0) {
return NULL;
return nullptr;
}
jthread* objArray = (jthread *) jvmtiMalloc(sizeof(jthread) * length);
NULL_CHECK(objArray, NULL);
NULL_CHECK(objArray, nullptr);
for (int i = 0; i < length; i++) {
objArray[i] = (jthread)jni_reference(handles[i]);
@ -555,11 +555,11 @@ JvmtiEnvBase::new_jthreadArray(int length, Handle *handles) {
jthreadGroup *
JvmtiEnvBase::new_jthreadGroupArray(int length, objArrayHandle groups) {
if (length == 0) {
return NULL;
return nullptr;
}
jthreadGroup* objArray = (jthreadGroup *) jvmtiMalloc(sizeof(jthreadGroup) * length);
NULL_CHECK(objArray, NULL);
NULL_CHECK(objArray, nullptr);
for (int i = 0; i < length; i++) {
objArray[i] = (jthreadGroup)JNIHandles::make_local(groups->obj_at(i));
@ -567,12 +567,12 @@ JvmtiEnvBase::new_jthreadGroupArray(int length, objArrayHandle groups) {
return objArray;
}
// Return the vframe on the specified thread and depth, NULL if no such frame.
// Return the vframe on the specified thread and depth, null if no such frame.
// The thread and the oops in the returned vframe might not have been processed.
javaVFrame*
JvmtiEnvBase::jvf_for_thread_and_depth(JavaThread* java_thread, jint depth) {
if (!java_thread->has_last_Java_frame()) {
return NULL;
return nullptr;
}
RegisterMap reg_map(java_thread,
RegisterMap::UpdateMap::include,
@ -582,7 +582,7 @@ JvmtiEnvBase::jvf_for_thread_and_depth(JavaThread* java_thread, jint depth) {
jvf = JvmtiEnvBase::check_and_skip_hidden_frames(java_thread, jvf);
for (int d = 0; jvf != NULL && d < depth; d++) {
for (int d = 0; jvf != nullptr && d < depth; d++) {
jvf = jvf->java_sender();
}
return jvf;
@ -595,7 +595,7 @@ JvmtiEnvBase::jvf_for_thread_and_depth(JavaThread* java_thread, jint depth) {
jclass
JvmtiEnvBase::get_jni_class_non_null(Klass* k) {
assert(k != NULL, "k != NULL");
assert(k != nullptr, "k != null");
Thread *thread = Thread::current();
return (jclass)jni_reference(Handle(thread, k->java_mirror()));
}
@ -627,11 +627,11 @@ JvmtiEnvBase::is_vthread_alive(oop vt) {
java_lang_VirtualThread::state(vt) != java_lang_VirtualThread::TERMINATED;
}
// Return JavaThread if virtual thread is mounted, NULL otherwise.
// Return JavaThread if virtual thread is mounted, null otherwise.
JavaThread* JvmtiEnvBase::get_JavaThread_or_null(oop vthread) {
oop carrier_thread = java_lang_VirtualThread::carrier_thread(vthread);
if (carrier_thread == NULL) {
return NULL;
if (carrier_thread == nullptr) {
return nullptr;
}
JavaThread* java_thread = java_lang_Thread::thread(carrier_thread);
@ -642,19 +642,19 @@ JavaThread* JvmtiEnvBase::get_JavaThread_or_null(oop vthread) {
StackWatermarkSet::start_processing(java_thread, StackWatermarkKind::gc);
oop cont = java_lang_VirtualThread::continuation(vthread);
assert(cont != NULL, "must be");
assert(cont != nullptr, "must be");
assert(Continuation::continuation_scope(cont) == java_lang_VirtualThread::vthread_scope(), "must be");
return Continuation::is_continuation_mounted(java_thread, cont) ? java_thread : NULL;
return Continuation::is_continuation_mounted(java_thread, cont) ? java_thread : nullptr;
}
javaVFrame*
JvmtiEnvBase::check_and_skip_hidden_frames(bool is_in_VTMS_transition, javaVFrame* jvf) {
// The second condition is needed to hide notification methods.
if (!is_in_VTMS_transition && (jvf == NULL || !jvf->method()->jvmti_mount_transition())) {
if (!is_in_VTMS_transition && (jvf == nullptr || !jvf->method()->jvmti_mount_transition())) {
return jvf; // No frames to skip.
}
// Find jvf with a method annotated with @JvmtiMountTransition.
for ( ; jvf != NULL; jvf = jvf->java_sender()) {
for ( ; jvf != nullptr; jvf = jvf->java_sender()) {
if (jvf->method()->jvmti_mount_transition()) { // Cannot actually appear in an unmounted continuation; they're never frozen.
jvf = jvf->java_sender(); // Skip annotated method.
break;
@ -676,7 +676,7 @@ JvmtiEnvBase::check_and_skip_hidden_frames(JavaThread* jt, javaVFrame* jvf) {
javaVFrame*
JvmtiEnvBase::check_and_skip_hidden_frames(oop vthread, javaVFrame* jvf) {
JvmtiThreadState* state = java_lang_Thread::jvmti_thread_state(vthread);
if (state == NULL) {
if (state == nullptr) {
// nothing to skip
return jvf;
}
@ -691,22 +691,22 @@ JvmtiEnvBase::get_vthread_jvf(oop vthread) {
Thread* cur_thread = Thread::current();
oop cont = java_lang_VirtualThread::continuation(vthread);
javaVFrame* jvf = NULL;
javaVFrame* jvf = nullptr;
JavaThread* java_thread = get_JavaThread_or_null(vthread);
if (java_thread != NULL) {
if (java_thread != nullptr) {
if (!java_thread->has_last_Java_frame()) {
// TBD: This is a temporary work around to avoid a guarantee caused by
// the native enterSpecial frame on the top. No frames will be found
// by the JVMTI functions such as GetStackTrace.
return NULL;
return nullptr;
}
vframeStream vfs(java_thread);
jvf = vfs.at_end() ? NULL : vfs.asJavaVFrame();
jvf = vfs.at_end() ? nullptr : vfs.asJavaVFrame();
jvf = check_and_skip_hidden_frames(java_thread, jvf);
} else {
vframeStream vfs(cont);
jvf = vfs.at_end() ? NULL : vfs.asJavaVFrame();
jvf = vfs.at_end() ? nullptr : vfs.asJavaVFrame();
jvf = check_and_skip_hidden_frames(vthread, jvf);
}
return jvf;
@ -732,16 +732,16 @@ jint
JvmtiEnvBase::get_thread_state(oop thread_oop, JavaThread* jt) {
jint state = 0;
if (thread_oop != NULL) {
if (thread_oop != nullptr) {
// Get most state bits.
state = (jint)java_lang_Thread::get_thread_status(thread_oop);
}
if (jt != NULL) {
if (jt != nullptr) {
// We have a JavaThread* so add more state bits.
JavaThreadState jts = jt->thread_state();
if (jt->is_carrier_thread_suspended() ||
((jt->jvmti_vthread() == NULL || jt->jvmti_vthread() == thread_oop) && jt->is_suspended())) {
((jt->jvmti_vthread() == nullptr || jt->jvmti_vthread() == thread_oop) && jt->is_suspended())) {
// Suspended non-virtual thread.
state |= JVMTI_THREAD_STATE_SUSPENDED;
}
@ -761,7 +761,7 @@ JvmtiEnvBase::get_vthread_state(oop thread_oop, JavaThread* java_thread) {
bool ext_suspended = JvmtiVTSuspender::is_vthread_suspended(thread_oop);
jint interrupted = java_lang_Thread::interrupted(thread_oop);
if (java_thread != NULL) {
if (java_thread != nullptr) {
// If virtual thread is blocked on a monitor enter the BLOCKED_ON_MONITOR_ENTER bit
// is set for carrier thread instead of virtual.
// Other state bits except filtered ones are expected to be the same.
@ -787,7 +787,7 @@ JvmtiEnvBase::get_vthread_state(oop thread_oop, JavaThread* java_thread) {
jvmtiError
JvmtiEnvBase::get_live_threads(JavaThread* current_thread, Handle group_hdl, jint *count_ptr, Handle **thread_objs_p) {
jint count = 0;
Handle *thread_objs = NULL;
Handle *thread_objs = nullptr;
ThreadsListEnumerator tle(current_thread, /* include_jvmti_agent_threads */ true);
int nthreads = tle.num_threads();
if (nthreads > 0) {
@ -861,7 +861,7 @@ JvmtiEnvBase::count_locked_objects(JavaThread *java_thread, Handle hobj) {
RegisterMap::ProcessFrames::include,
RegisterMap::WalkContinuation::skip);
for (javaVFrame *jvf = java_thread->last_java_vframe(&reg_map); jvf != NULL;
for (javaVFrame *jvf = java_thread->last_java_vframe(&reg_map); jvf != nullptr;
jvf = jvf->java_sender()) {
GrowableArray<MonitorInfo*>* mons = jvf->monitors();
if (!mons->is_empty()) {
@ -870,7 +870,7 @@ JvmtiEnvBase::count_locked_objects(JavaThread *java_thread, Handle hobj) {
if (mi->owner_is_scalar_replaced()) continue;
// see if owner of the monitor is our object
if (mi->owner() != NULL && mi->owner() == hobj()) {
if (mi->owner() != nullptr && mi->owner() == hobj()) {
ret++;
}
}
@ -891,28 +891,28 @@ JvmtiEnvBase::get_current_contended_monitor(JavaThread *calling_thread, JavaThre
*monitor_ptr = nullptr;
return JVMTI_ERROR_NONE;
}
oop obj = NULL;
oop obj = nullptr;
// The ObjectMonitor* can't be async deflated since we are either
// at a safepoint or the calling thread is operating on itself so
// it cannot leave the underlying wait()/enter() call.
ObjectMonitor *mon = java_thread->current_waiting_monitor();
if (mon == NULL) {
if (mon == nullptr) {
// thread is not doing an Object.wait() call
mon = java_thread->current_pending_monitor();
if (mon != NULL) {
if (mon != nullptr) {
// The thread is trying to enter() an ObjectMonitor.
obj = mon->object();
assert(obj != NULL, "ObjectMonitor should have a valid object!");
assert(obj != nullptr, "ObjectMonitor should have a valid object!");
}
// implied else: no contended ObjectMonitor
} else {
// thread is doing an Object.wait() call
obj = mon->object();
assert(obj != NULL, "Object.wait() should have an object");
assert(obj != nullptr, "Object.wait() should have an object");
}
if (obj == NULL) {
*monitor_ptr = NULL;
if (obj == nullptr) {
*monitor_ptr = nullptr;
} else {
HandleMark hm(current_thread);
Handle hobj(current_thread, obj);
@ -949,7 +949,7 @@ JvmtiEnvBase::get_owned_monitors(JavaThread *calling_thread, JavaThread* java_th
int depth = 0;
for (javaVFrame *jvf = get_cthread_last_java_vframe(java_thread, &reg_map);
jvf != NULL; jvf = jvf->java_sender()) {
jvf != nullptr; jvf = jvf->java_sender()) {
if (MaxJavaStackTraceDepth == 0 || depth++ < MaxJavaStackTraceDepth) { // check for stack too deep
// add locked objects for this frame into list
err = get_locked_objects_in_frame(calling_thread, java_thread, jvf, owned_monitors_list, depth-1);
@ -977,7 +977,7 @@ JvmtiEnvBase::get_owned_monitors(JavaThread* calling_thread, JavaThread* java_th
"call by myself or at handshake");
int depth = 0;
for ( ; jvf != NULL; jvf = jvf->java_sender()) {
for ( ; jvf != nullptr; jvf = jvf->java_sender()) {
if (MaxJavaStackTraceDepth == 0 || depth++ < MaxJavaStackTraceDepth) { // check for stack too deep
// Add locked objects for this frame into list.
err = get_locked_objects_in_frame(calling_thread, java_thread, jvf, owned_monitors_list, depth - 1);
@ -1009,25 +1009,25 @@ JvmtiEnvBase::get_locked_objects_in_frame(JavaThread* calling_thread, JavaThread
return err; // this javaVFrame holds no monitors
}
oop wait_obj = NULL;
oop wait_obj = nullptr;
{
// The ObjectMonitor* can't be async deflated since we are either
// at a safepoint or the calling thread is operating on itself so
// it cannot leave the underlying wait() call.
// Save object of current wait() call (if any) for later comparison.
ObjectMonitor *mon = java_thread->current_waiting_monitor();
if (mon != NULL) {
if (mon != nullptr) {
wait_obj = mon->object();
}
}
oop pending_obj = NULL;
oop pending_obj = nullptr;
{
// The ObjectMonitor* can't be async deflated since we are either
// at a safepoint or the calling thread is operating on itself so
// it cannot leave the underlying enter() call.
// Save object of current enter() call (if any) for later comparison.
ObjectMonitor *mon = java_thread->current_pending_monitor();
if (mon != NULL) {
if (mon != nullptr) {
pending_obj = mon->object();
}
}
@ -1038,7 +1038,7 @@ JvmtiEnvBase::get_locked_objects_in_frame(JavaThread* calling_thread, JavaThread
if (mi->owner_is_scalar_replaced()) continue;
oop obj = mi->owner();
if (obj == NULL) {
if (obj == nullptr) {
// this monitor doesn't have an owning object so skip it
continue;
}
@ -1098,10 +1098,10 @@ JvmtiEnvBase::get_stack_trace(javaVFrame *jvf,
if (start_depth != 0) {
if (start_depth > 0) {
for (int j = 0; j < start_depth && jvf != NULL; j++) {
for (int j = 0; j < start_depth && jvf != nullptr; j++) {
jvf = jvf->java_sender();
}
if (jvf == NULL) {
if (jvf == nullptr) {
// start_depth is deeper than the stack depth.
return JVMTI_ERROR_ILLEGAL_ARGUMENT;
}
@ -1110,13 +1110,13 @@ JvmtiEnvBase::get_stack_trace(javaVFrame *jvf,
// part of the stack.
// Optimize to limit the number of times that java_sender() is called.
javaVFrame *jvf_cursor = jvf;
javaVFrame *jvf_prev = NULL;
javaVFrame *jvf_prev_prev = NULL;
javaVFrame *jvf_prev = nullptr;
javaVFrame *jvf_prev_prev = nullptr;
int j = 0;
while (jvf_cursor != NULL) {
while (jvf_cursor != nullptr) {
jvf_prev_prev = jvf_prev;
jvf_prev = jvf_cursor;
for (j = 0; j > start_depth && jvf_cursor != NULL; j--) {
for (j = 0; j > start_depth && jvf_cursor != nullptr; j--) {
jvf_cursor = jvf_cursor->java_sender();
}
}
@ -1125,7 +1125,7 @@ JvmtiEnvBase::get_stack_trace(javaVFrame *jvf,
jvf = jvf_prev;
} else {
// We need to back up further to get to the right place.
if (jvf_prev_prev == NULL) {
if (jvf_prev_prev == nullptr) {
// The -start_depth is greater than the stack depth.
return JVMTI_ERROR_ILLEGAL_ARGUMENT;
}
@ -1140,7 +1140,7 @@ JvmtiEnvBase::get_stack_trace(javaVFrame *jvf,
}
}
}
for (; count < max_count && jvf != NULL; count++) {
for (; count < max_count && jvf != nullptr; count++) {
frame_buffer[count].method = jvf->method()->jmethod_id();
frame_buffer[count].location = (jvf->method()->is_native() ? -1 : jvf->bci());
jvf = jvf->java_sender();
@ -1183,7 +1183,7 @@ jint
JvmtiEnvBase::get_frame_count(javaVFrame *jvf) {
int count = 0;
while (jvf != NULL) {
while (jvf != nullptr) {
jvf = jvf->java_sender();
count++;
}
@ -1231,12 +1231,12 @@ JvmtiEnvBase::get_frame_location(javaVFrame* jvf, jint depth,
jmethodID* method_ptr, jlocation* location_ptr) {
int cur_depth = 0;
while (jvf != NULL && cur_depth < depth) {
while (jvf != nullptr && cur_depth < depth) {
jvf = jvf->java_sender();
cur_depth++;
}
assert(depth >= cur_depth, "ran out of frames too soon");
if (jvf == NULL) {
if (jvf == nullptr) {
return JVMTI_ERROR_NO_MORE_FRAMES;
}
Method* method = jvf->method();
@ -1285,16 +1285,16 @@ JvmtiEnvBase::get_frame_location(oop vthread_oop, jint depth,
jvmtiError
JvmtiEnvBase::set_frame_pop(JvmtiThreadState* state, javaVFrame* jvf, jint depth) {
for (int d = 0; jvf != NULL && d < depth; d++) {
for (int d = 0; jvf != nullptr && d < depth; d++) {
jvf = jvf->java_sender();
}
if (jvf == NULL) {
if (jvf == nullptr) {
return JVMTI_ERROR_NO_MORE_FRAMES;
}
if (jvf->method()->is_native()) {
return JVMTI_ERROR_OPAQUE_FRAME;
}
assert(jvf->frame_pointer() != NULL, "frame pointer mustn't be NULL");
assert(jvf->frame_pointer() != nullptr, "frame pointer mustn't be null");
int frame_number = (int)get_frame_count(jvf);
state->env_thread_state((JvmtiEnvBase*)this)->set_frame_pop(frame_number);
return JVMTI_ERROR_NONE;
@ -1303,27 +1303,27 @@ JvmtiEnvBase::set_frame_pop(JvmtiThreadState* state, javaVFrame* jvf, jint depth
bool
JvmtiEnvBase::is_cthread_with_mounted_vthread(JavaThread* jt) {
oop thread_oop = jt->threadObj();
assert(thread_oop != NULL, "sanity check");
assert(thread_oop != nullptr, "sanity check");
oop mounted_vt = jt->jvmti_vthread();
return mounted_vt != NULL && mounted_vt != thread_oop;
return mounted_vt != nullptr && mounted_vt != thread_oop;
}
bool
JvmtiEnvBase::is_cthread_with_continuation(JavaThread* jt) {
const ContinuationEntry* cont_entry = NULL;
const ContinuationEntry* cont_entry = nullptr;
if (jt->has_last_Java_frame()) {
cont_entry = jt->vthread_continuation();
}
return cont_entry != NULL && is_cthread_with_mounted_vthread(jt);
return cont_entry != nullptr && is_cthread_with_mounted_vthread(jt);
}
// If (thread == NULL) then return current thread object.
// If (thread == null) then return current thread object.
// Otherwise return JNIHandles::resolve_external_guard(thread).
oop
JvmtiEnvBase::current_thread_obj_or_resolve_external_guard(jthread thread) {
oop thread_obj = JNIHandles::resolve_external_guard(thread);
if (thread == NULL) {
if (thread == nullptr) {
thread_obj = get_vthread_or_thread_oop(JavaThread::current());
}
return thread_obj;
@ -1333,13 +1333,13 @@ jvmtiError
JvmtiEnvBase::get_threadOop_and_JavaThread(ThreadsList* t_list, jthread thread,
JavaThread** jt_pp, oop* thread_oop_p) {
JavaThread* cur_thread = JavaThread::current();
JavaThread* java_thread = NULL;
oop thread_oop = NULL;
JavaThread* java_thread = nullptr;
oop thread_oop = nullptr;
if (thread == NULL) {
if (thread == nullptr) {
java_thread = cur_thread;
thread_oop = get_vthread_or_thread_oop(java_thread);
if (thread_oop == NULL || !thread_oop->is_a(vmClasses::Thread_klass())) {
if (thread_oop == nullptr || !thread_oop->is_a(vmClasses::Thread_klass())) {
return JVMTI_ERROR_INVALID_THREAD;
}
} else {
@ -1349,12 +1349,12 @@ JvmtiEnvBase::get_threadOop_and_JavaThread(ThreadsList* t_list, jthread thread,
// an error from here if we didn't get a valid thread_oop. In a vthread case
// the cv_external_thread_to_JavaThread is expected to correctly set the
// thread_oop and return JVMTI_ERROR_INVALID_THREAD which we ignore here.
if (thread_oop == NULL || err != JVMTI_ERROR_INVALID_THREAD) {
if (thread_oop == nullptr || err != JVMTI_ERROR_INVALID_THREAD) {
*thread_oop_p = thread_oop;
return err;
}
}
if (java_thread == NULL && java_lang_VirtualThread::is_instance(thread_oop)) {
if (java_thread == nullptr && java_lang_VirtualThread::is_instance(thread_oop)) {
java_thread = get_JavaThread_or_null(thread_oop);
}
}
@ -1386,16 +1386,16 @@ JvmtiEnvBase::get_object_monitor_usage(JavaThread* calling_thread, jobject objec
}
ThreadsListHandle tlh(current_thread);
JavaThread *owning_thread = NULL;
ObjectMonitor *mon = NULL;
JavaThread *owning_thread = nullptr;
ObjectMonitor *mon = nullptr;
jvmtiMonitorUsage ret = {
NULL, 0, 0, NULL, 0, NULL
nullptr, 0, 0, nullptr, 0, nullptr
};
uint32_t debug_bits = 0;
// first derive the object's owner and entry_count (if any)
owning_thread = ObjectSynchronizer::get_lock_owner(tlh.list(), hobj);
if (owning_thread != NULL) {
if (owning_thread != nullptr) {
Handle th(current_thread, get_vthread_or_thread_oop(owning_thread));
ret.owner = (jthread)jni_reference(calling_thread, th);
@ -1411,7 +1411,7 @@ JvmtiEnvBase::get_object_monitor_usage(JavaThread* calling_thread, jobject objec
markWord mark = hobj->mark();
if (mark.has_monitor()) {
mon = mark.monitor();
assert(mon != NULL, "must have monitor");
assert(mon != nullptr, "must have monitor");
// this object has a heavyweight monitor
nWant = mon->contentions(); // # of threads contending for monitor
nWait = mon->waiters(); // # of threads in Object.wait()
@ -1437,11 +1437,11 @@ JvmtiEnvBase::get_object_monitor_usage(JavaThread* calling_thread, jobject objec
}
// now derive the rest of the fields
if (mon != NULL) {
if (mon != nullptr) {
// this object has a heavyweight monitor
// Number of waiters may actually be less than the waiter count.
// So NULL out memory so that unused memory will be NULL.
// So null out memory so that unused memory will be null.
memset(ret.waiters, 0, ret.waiter_count * sizeof(jthread *));
memset(ret.notify_waiters, 0, ret.notify_waiter_count * sizeof(jthread *));
@ -1468,13 +1468,13 @@ JvmtiEnvBase::get_object_monitor_usage(JavaThread* calling_thread, jobject objec
int offset = nWant; // add after any contending threads
ObjectWaiter *waiter = mon->first_waiter();
for (int i = 0, j = 0; i < nWait; i++) {
if (waiter == NULL) {
if (waiter == nullptr) {
// robustness: the waiting list has gotten smaller
nWait = j;
break;
}
JavaThread *w = mon->thread_of_waiter(waiter);
if (w != NULL) {
if (w != nullptr) {
// If the thread was found on the ObjectWaiter list, then
// it has not been notified. This thread can't change the
// state of the monitor so it doesn't need to be suspended.
@ -1503,13 +1503,13 @@ JvmtiEnvBase::get_object_monitor_usage(JavaThread* calling_thread, jobject objec
jvmtiError
JvmtiEnvBase::check_thread_list(jint count, const jthread* list) {
if (list == NULL && count != 0) {
if (list == nullptr && count != 0) {
return JVMTI_ERROR_NULL_POINTER;
}
for (int i = 0; i < count; i++) {
jthread thread = list[i];
oop thread_oop = JNIHandles::resolve_external_guard(thread);
if (thread_oop == NULL || !thread_oop->is_a(vmClasses::VirtualThread_klass())) {
if (thread_oop == nullptr || !thread_oop->is_a(vmClasses::VirtualThread_klass())) {
return JVMTI_ERROR_INVALID_THREAD;
}
}
@ -1544,14 +1544,14 @@ JvmtiEnvBase::suspend_thread(oop thread_oop, JavaThread* java_thread, bool singl
}
JvmtiVTSuspender::register_vthread_suspend(thread_h());
// Check if virtual thread is mounted and there is a java_thread.
// A non-NULL java_thread is always passed in the !single_suspend case.
// A non-null java_thread is always passed in the !single_suspend case.
oop carrier_thread = java_lang_VirtualThread::carrier_thread(thread_h());
java_thread = carrier_thread == NULL ? NULL : java_lang_Thread::thread(carrier_thread);
java_thread = carrier_thread == nullptr ? nullptr : java_lang_Thread::thread(carrier_thread);
}
// The java_thread can be still blocked in VTMS transition after a previous JVMTI resume call.
// There is no need to suspend the java_thread in this case. After vthread unblocking,
// it will check for ext_suspend request and suspend itself if necessary.
if (java_thread == NULL || java_thread->is_suspended()) {
if (java_thread == nullptr || java_thread->is_suspended()) {
// We are done if the virtual thread is unmounted or
// the java_thread is externally suspended.
return JVMTI_ERROR_NONE;
@ -1612,14 +1612,14 @@ JvmtiEnvBase::resume_thread(oop thread_oop, JavaThread* java_thread, bool single
}
JvmtiVTSuspender::register_vthread_resume(thread_h());
// Check if virtual thread is mounted and there is a java_thread.
// A non-NULL java_thread is always passed in the !single_resume case.
// A non-null java_thread is always passed in the !single_resume case.
oop carrier_thread = java_lang_VirtualThread::carrier_thread(thread_h());
java_thread = carrier_thread == NULL ? NULL : java_lang_Thread::thread(carrier_thread);
java_thread = carrier_thread == nullptr ? nullptr : java_lang_Thread::thread(carrier_thread);
}
// The java_thread can be still blocked in VTMS transition after a previous JVMTI suspend call.
// There is no need to resume the java_thread in this case. After vthread unblocking,
// it will check for is_vthread_suspended request and remain resumed if necessary.
if (java_thread == NULL || !java_thread->is_suspended()) {
if (java_thread == nullptr || !java_thread->is_suspended()) {
// We are done if the virtual thread is unmounted or
// the java_thread is not externally suspended.
return JVMTI_ERROR_NONE;
@ -1674,7 +1674,7 @@ jvmtiError ResourceTracker::allocate(jlong size, unsigned char** mem_ptr) {
_allocations->append(ptr);
*mem_ptr = ptr;
} else {
*mem_ptr = NULL;
*mem_ptr = nullptr;
_failed = true;
}
return err;
@ -1688,7 +1688,7 @@ unsigned char* ResourceTracker::allocate(jlong size) {
char* ResourceTracker::strdup(const char* str) {
char *dup_str = (char*)allocate(strlen(str)+1);
if (dup_str != NULL) {
if (dup_str != nullptr) {
strcpy(dup_str, str);
}
return dup_str;
@ -1709,7 +1709,7 @@ MultipleStackTracesCollector::fill_frames(jthread jt, JavaThread *thr, oop threa
#ifdef ASSERT
Thread *current_thread = Thread::current();
assert(SafepointSynchronize::is_at_safepoint() ||
thr == NULL ||
thr == nullptr ||
thr->is_handshake_safe_for(current_thread),
"unmounted virtual thread / call by myself / at safepoint / at handshake");
#endif
@ -1721,7 +1721,7 @@ MultipleStackTracesCollector::fill_frames(jthread jt, JavaThread *thr, oop threa
node->next = head();
set_head(node);
infop->frame_count = 0;
infop->frame_buffer = NULL;
infop->frame_buffer = nullptr;
infop->thread = jt;
if (java_lang_VirtualThread::is_instance(thread_oop)) {
@ -1735,7 +1735,7 @@ MultipleStackTracesCollector::fill_frames(jthread jt, JavaThread *thr, oop threa
}
} else {
state = JvmtiEnvBase::get_thread_state(thread_oop, thr);
if (thr != NULL && (state & JVMTI_THREAD_STATE_ALIVE) != 0) {
if (thr != nullptr && (state & JVMTI_THREAD_STATE_ALIVE) != 0) {
infop->frame_buffer = NEW_RESOURCE_ARRAY(jvmtiFrameInfo, max_frame_count());
_result = env()->get_stack_trace(thr, 0, max_frame_count(),
infop->frame_buffer, &(infop->frame_count));
@ -1762,13 +1762,13 @@ MultipleStackTracesCollector::allocate_and_fill_stacks(jint thread_count) {
// insert stack info backwards since linked list is backwards
// insert frame info forwards
// walk the StackInfoNodes
for (struct StackInfoNode *sin = head(); sin != NULL; sin = sin->next) {
for (struct StackInfoNode *sin = head(); sin != nullptr; sin = sin->next) {
jint frame_count = sin->info.frame_count;
size_t frames_size = frame_count * sizeof(jvmtiFrameInfo);
--si;
memcpy(si, &(sin->info), sizeof(jvmtiStackInfo));
if (frames_size == 0) {
si->frame_buffer = NULL;
si->frame_buffer = nullptr;
} else {
memcpy(fi, sin->info.frame_buffer, frames_size);
si->frame_buffer = fi; // point to the new allocated copy of the frames
@ -1789,8 +1789,8 @@ VM_GetThreadListStackTraces::doit() {
ThreadsListHandle tlh;
for (int i = 0; i < _thread_count; ++i) {
jthread jt = _thread_list[i];
JavaThread* java_thread = NULL;
oop thread_oop = NULL;
JavaThread* java_thread = nullptr;
oop thread_oop = nullptr;
jvmtiError err = JvmtiExport::cv_external_thread_to_JavaThread(tlh.list(), jt, &java_thread, &thread_oop);
if (err != JVMTI_ERROR_NONE) {
// We got an error code so we don't have a JavaThread *, but
@ -1798,7 +1798,7 @@ VM_GetThreadListStackTraces::doit() {
// thread_oop.
// In the virtual thread case the cv_external_thread_to_JavaThread is expected to correctly set
// the thread_oop and return JVMTI_ERROR_INVALID_THREAD which we ignore here.
if (thread_oop == NULL) {
if (thread_oop == nullptr) {
_collector.set_result(err);
return;
}
@ -1814,7 +1814,7 @@ GetSingleStackTraceClosure::do_thread(Thread *target) {
JavaThread *jt = JavaThread::cast(target);
oop thread_oop = jt->threadObj();
if (!jt->is_exiting() && thread_oop != NULL) {
if (!jt->is_exiting() && thread_oop != nullptr) {
ResourceMark rm;
_collector.fill_frames(_jthread, jt, thread_oop);
_collector.allocate_and_fill_stacks(1);
@ -1829,7 +1829,7 @@ VM_GetAllStackTraces::doit() {
_final_thread_count = 0;
for (JavaThreadIteratorWithHandle jtiwh; JavaThread *jt = jtiwh.next(); ) {
oop thread_oop = jt->threadObj();
if (thread_oop != NULL &&
if (thread_oop != nullptr &&
!jt->is_exiting() &&
java_lang_Thread::is_alive(thread_oop) &&
!jt->is_hidden_from_external_view()) {
@ -1880,7 +1880,7 @@ JvmtiEnvBase::check_top_frame(Thread* current_thread, JavaThread* java_thread,
// Check that the jobject class matches the return type signature.
jobject jobj = value.l;
if (tos == atos && jobj != NULL) { // NULL reference is allowed
if (tos == atos && jobj != nullptr) { // null reference is allowed
Handle ob_h(current_thread, JNIHandles::resolve_external_guard(jobj));
NULL_CHECK(ob_h, JVMTI_ERROR_INVALID_OBJECT);
Klass* ob_k = ob_h()->klass();
@ -1915,11 +1915,11 @@ JvmtiEnvBase::force_early_return(jthread thread, jvalue value, TosState tos) {
JvmtiVTMSTransitionDisabler disabler;
ThreadsListHandle tlh(current_thread);
JavaThread* java_thread = NULL;
oop thread_obj = NULL;
JavaThread* java_thread = nullptr;
oop thread_obj = nullptr;
jvmtiError err = get_threadOop_and_JavaThread(tlh.list(), thread, &java_thread, &thread_obj);
if (thread_obj != NULL && java_lang_VirtualThread::is_instance(thread_obj)) {
if (thread_obj != nullptr && java_lang_VirtualThread::is_instance(thread_obj)) {
// No support for virtual threads (yet).
return JVMTI_ERROR_OPAQUE_FRAME;
}
@ -1929,7 +1929,7 @@ JvmtiEnvBase::force_early_return(jthread thread, jvalue value, TosState tos) {
// retrieve or create the state
JvmtiThreadState* state = JvmtiThreadState::state_for(java_thread);
if (state == NULL) {
if (state == nullptr) {
return JVMTI_ERROR_THREAD_NOT_ALIVE;
}
@ -1990,8 +1990,8 @@ SetForceEarlyReturn::doit(Thread *target, bool self) {
if (_result != JVMTI_ERROR_NONE) {
return;
}
assert(_tos != atos || _value.l == NULL || ret_ob_h() != NULL,
"return object oop must not be NULL if jobject is not NULL");
assert(_tos != atos || _value.l == nullptr || ret_ob_h() != nullptr,
"return object oop must not be null if jobject is not null");
// Update the thread state to reflect that the top frame must be
// forced to return.
@ -2044,12 +2044,12 @@ JvmtiMonitorClosure::do_monitor(ObjectMonitor* mon) {
}
}
GrowableArray<OopHandle>* JvmtiModuleClosure::_tbl = NULL;
GrowableArray<OopHandle>* JvmtiModuleClosure::_tbl = nullptr;
void JvmtiModuleClosure::do_module(ModuleEntry* entry) {
assert_locked_or_safepoint(Module_lock);
OopHandle module = entry->module_handle();
guarantee(module.resolve() != NULL, "module object is NULL");
guarantee(module.resolve() != nullptr, "module object is null");
_tbl->push(module);
}
@ -2060,7 +2060,7 @@ JvmtiModuleClosure::get_all_modules(JvmtiEnv* env, jint* module_count_ptr, jobje
MutexLocker ml(Module_lock);
_tbl = new GrowableArray<OopHandle>(77);
if (_tbl == NULL) {
if (_tbl == nullptr) {
return JVMTI_ERROR_OUT_OF_MEMORY;
}
@ -2071,13 +2071,13 @@ JvmtiModuleClosure::get_all_modules(JvmtiEnv* env, jint* module_count_ptr, jobje
guarantee(len > 0, "at least one module must be present");
jobject* array = (jobject*)env->jvmtiMalloc((jlong)(len * sizeof(jobject)));
if (array == NULL) {
if (array == nullptr) {
return JVMTI_ERROR_OUT_OF_MEMORY;
}
for (jint idx = 0; idx < len; idx++) {
array[idx] = JNIHandles::make_local(_tbl->at(idx).resolve());
}
_tbl = NULL;
_tbl = nullptr;
*modules_ptr = array;
*module_count_ptr = len;
return JVMTI_ERROR_NONE;
@ -2139,7 +2139,7 @@ UpdateForPopTopFrameClosure::doit(Thread *target, bool self) {
// There can be two situations here:
// 1. There are no more java frames
// 2. Two top java frames are separated by non-java native frames
if (JvmtiEnvBase::jvf_for_thread_and_depth(java_thread, 1) == NULL) {
if (JvmtiEnvBase::jvf_for_thread_and_depth(java_thread, 1) == nullptr) {
_result = JVMTI_ERROR_NO_MORE_FRAMES;
return;
} else {
@ -2205,7 +2205,7 @@ SetFramePopClosure::doit(Thread *target, bool self) {
void
GetOwnedMonitorInfoClosure::do_thread(Thread *target) {
JavaThread *jt = JavaThread::cast(target);
if (!jt->is_exiting() && (jt->threadObj() != NULL)) {
if (!jt->is_exiting() && (jt->threadObj() != nullptr)) {
_result = ((JvmtiEnvBase *)_env)->get_owned_monitors(_calling_thread,
jt,
_owned_monitors_list);
@ -2215,7 +2215,7 @@ GetOwnedMonitorInfoClosure::do_thread(Thread *target) {
void
GetCurrentContendedMonitorClosure::do_thread(Thread *target) {
JavaThread *jt = JavaThread::cast(target);
if (!jt->is_exiting() && (jt->threadObj() != NULL)) {
if (!jt->is_exiting() && (jt->threadObj() != nullptr)) {
_result = ((JvmtiEnvBase *)_env)->get_current_contended_monitor(_calling_thread,
jt,
_owned_monitor_ptr,
@ -2240,7 +2240,7 @@ VM_VirtualThreadGetStackTrace::doit() {
void
GetStackTraceClosure::do_thread(Thread *target) {
JavaThread *jt = JavaThread::cast(target);
if (!jt->is_exiting() && jt->threadObj() != NULL) {
if (!jt->is_exiting() && jt->threadObj() != nullptr) {
_result = ((JvmtiEnvBase *)_env)->get_stack_trace(jt,
_start_depth, _max_count,
_frame_buffer, _count_ptr);
@ -2256,7 +2256,7 @@ PrintStackTraceClosure::do_thread_impl(Thread *target) {
ResourceMark rm (current_thread);
const char* tname = JvmtiTrace::safe_get_thread_name(java_thread);
oop t_oop = java_thread->jvmti_vthread();
t_oop = t_oop == NULL ? java_thread->threadObj() : t_oop;
t_oop = t_oop == nullptr ? java_thread->threadObj() : t_oop;
bool is_vt_suspended = java_lang_VirtualThread::is_instance(t_oop) && JvmtiVTSuspender::is_vthread_suspended(t_oop);
log_error(jvmti)("%s(%s) exiting: %d is_susp: %d is_thread_susp: %d is_vthread_susp: %d "
@ -2273,7 +2273,7 @@ PrintStackTraceClosure::do_thread_impl(Thread *target) {
ResourceMark rm(current_thread);
HandleMark hm(current_thread);
javaVFrame *jvf = java_thread->last_java_vframe(&reg_map);
while (jvf != NULL) {
while (jvf != nullptr) {
log_error(jvmti)(" %s:%d",
jvf->method()->external_name(),
jvf->method()->line_number_from_bci(jvf->bci()));
@ -2305,7 +2305,7 @@ void
GetFrameCountClosure::do_thread(Thread *target) {
JavaThread* jt = JavaThread::cast(target);
assert(target == jt, "just checking");
if (!jt->is_exiting() && jt->threadObj() != NULL) {
if (!jt->is_exiting() && jt->threadObj() != nullptr) {
_result = ((JvmtiEnvBase*)_env)->get_frame_count(jt, _count_ptr);
}
}
@ -2313,7 +2313,7 @@ GetFrameCountClosure::do_thread(Thread *target) {
void
GetFrameLocationClosure::do_thread(Thread *target) {
JavaThread *jt = JavaThread::cast(target);
if (!jt->is_exiting() && jt->threadObj() != NULL) {
if (!jt->is_exiting() && jt->threadObj() != nullptr) {
_result = ((JvmtiEnvBase*)_env)->get_frame_location(jt, _depth,
_method_ptr, _location_ptr);
}
@ -2332,7 +2332,7 @@ VirtualThreadGetOwnedMonitorInfoClosure::do_thread(Thread *target) {
javaVFrame *jvf = JvmtiEnvBase::get_vthread_jvf(_vthread_h());
if (!java_thread->is_exiting() && java_thread->threadObj() != NULL) {
if (!java_thread->is_exiting() && java_thread->threadObj() != nullptr) {
_result = ((JvmtiEnvBase *)_env)->get_owned_monitors(java_thread,
java_thread,
jvf,
@ -2385,7 +2385,7 @@ VirtualThreadGetThreadStateClosure::do_thread(Thread *target) {
oop carrier_thread_oop = java_lang_VirtualThread::carrier_thread(_vthread_h());
jint state;
if (vthread_state == java_lang_VirtualThread::RUNNING && carrier_thread_oop != NULL) {
if (vthread_state == java_lang_VirtualThread::RUNNING && carrier_thread_oop != nullptr) {
state = (jint) java_lang_Thread::get_thread_status(carrier_thread_oop);
JavaThread* java_thread = java_lang_Thread::thread(carrier_thread_oop);
if (java_thread->is_suspended()) {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -91,7 +91,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
// check if thread_oop represents a passive carrier thread
static bool is_passive_carrier_thread(JavaThread* java_thread, oop thread_oop) {
return java_thread != NULL && java_thread->jvmti_vthread() != NULL
return java_thread != nullptr && java_thread->jvmti_vthread() != nullptr
&& java_thread->jvmti_vthread() != thread_oop
&& java_thread->threadObj() == thread_oop;
}
@ -149,7 +149,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
void set_next_environment(JvmtiEnvBase* env) { _next = env; }
static JvmtiEnv* head_environment() {
JVMTI_ONLY(return (JvmtiEnv*)_head_environment);
NOT_JVMTI(return NULL);
NOT_JVMTI(return nullptr);
}
public:
@ -167,7 +167,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
return byte_offset_of(JvmtiEnvBase, _jvmti_external);
};
// If (thread == NULL) then return current thread object.
// If (thread == nullptr) then return current thread object.
// Otherwise return JNIHandles::resolve_external_guard(thread).
static oop current_thread_obj_or_resolve_external_guard(jthread thread);
@ -175,23 +175,23 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
// A passive carrier thread is not treated as current.
static bool is_JavaThread_current(JavaThread* jt, oop thr_obj) {
JavaThread* current = JavaThread::current();
// jt can be NULL in case of a virtual thread
if (jt == NULL || jt != current) {
// jt can be null in case of a virtual thread
if (jt == nullptr || jt != current) {
return false;
}
oop cur_obj = current->jvmti_vthread();
// cur_obj == NULL is true for normal platform threads only
// cur_obj == nullptr is true for normal platform threads only
// otherwise it can be virtual or carrier thread.
return cur_obj == NULL || cur_obj == thr_obj;
return cur_obj == nullptr || cur_obj == thr_obj;
}
static jvmtiError get_JavaThread(ThreadsList* tlist, jthread thread, JavaThread** jt_pp) {
jvmtiError err = JVMTI_ERROR_NONE;
if (thread == NULL) {
if (thread == nullptr) {
*jt_pp = JavaThread::current();
} else {
err = JvmtiExport::cv_external_thread_to_JavaThread(tlist, thread, jt_pp, NULL);
err = JvmtiExport::cv_external_thread_to_JavaThread(tlist, thread, jt_pp, nullptr);
}
return err;
}
@ -200,7 +200,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
// return virtual thread oop. Otherwise, return thread oop.
static oop get_vthread_or_thread_oop(JavaThread* jt) {
oop result = jt->threadObj();
if (jt->jvmti_vthread() != NULL) {
if (jt->jvmti_vthread() != nullptr) {
result = jt->jvmti_vthread();
}
return result;
@ -236,7 +236,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
// not yet been deallocated. As a result, this test should only be used as an
// optimization for the no environment case.
static bool environments_might_exist() {
return head_environment() != NULL;
return head_environment() != nullptr;
}
static void check_for_periodic_clean_up();
@ -250,10 +250,10 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
return JVMTI_ERROR_ILLEGAL_ARGUMENT;
}
if (size == 0) {
*mem_ptr = NULL;
*mem_ptr = nullptr;
} else {
*mem_ptr = (unsigned char *)os::malloc((size_t)size, mtInternal);
if (*mem_ptr == NULL) {
if (*mem_ptr == nullptr) {
return JVMTI_ERROR_OUT_OF_MEMORY;
}
}
@ -261,7 +261,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
}
jvmtiError deallocate(unsigned char* mem) {
if (mem != NULL) {
if (mem != nullptr) {
os::free(mem);
}
return JVMTI_ERROR_NONE;
@ -294,7 +294,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
bool has_callback(jvmtiEvent event_type) {
assert(event_type >= JVMTI_MIN_EVENT_TYPE_VAL &&
event_type <= JVMTI_MAX_EVENT_TYPE_VAL, "checking");
return ((void**)&_event_callbacks)[event_type-JVMTI_MIN_EVENT_TYPE_VAL] != NULL;
return ((void**)&_event_callbacks)[event_type-JVMTI_MIN_EVENT_TYPE_VAL] != nullptr;
}
jvmtiEventCallbacks* callbacks() {
@ -358,7 +358,7 @@ class JvmtiEnvBase : public CHeapObj<mtInternal> {
// check if virtual thread is not terminated (alive)
static bool is_vthread_alive(oop vt);
// return JavaThread if virtual thread is mounted, NULL otherwise
// return JavaThread if virtual thread is mounted, null otherwise
static JavaThread* get_JavaThread_or_null(oop vthread);
// get virtual thread last java vframe
@ -611,10 +611,10 @@ public:
MultipleStackTracesCollector(JvmtiEnv *env, jint max_frame_count)
: _env(env),
_max_frame_count(max_frame_count),
_stack_info(NULL),
_stack_info(nullptr),
_result(JVMTI_ERROR_NONE),
_frame_count_total(0),
_head(NULL) {
_head(nullptr) {
}
void set_result(jvmtiError result) { _result = result; }
void fill_frames(jthread jt, JavaThread *thr, oop thread_oop);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -130,25 +130,25 @@ JvmtiEnvThreadState::JvmtiEnvThreadState(JvmtiThreadState* state, JvmtiEnvBase *
_event_enable() {
_state = state;
_env = (JvmtiEnv*)env;
_next = NULL;
_frame_pops = NULL;
_next = nullptr;
_frame_pops = nullptr;
_current_bci = 0;
_current_method_id = NULL;
_current_method_id = nullptr;
_breakpoint_posted = false;
_single_stepping_posted = false;
_agent_thread_local_storage_data = NULL;
_agent_thread_local_storage_data = nullptr;
}
JvmtiEnvThreadState::~JvmtiEnvThreadState() {
delete _frame_pops;
_frame_pops = NULL;
_frame_pops = nullptr;
}
bool JvmtiEnvThreadState::is_virtual() {
return _state->is_virtual();
}
// Use _thread_saved if cthread is detached from JavaThread (_thread == NULL).
// Use _thread_saved if cthread is detached from JavaThread (_thread == nullptr).
JavaThread* JvmtiEnvThreadState::get_thread_or_saved() {
return _state->get_thread_or_saved();
}
@ -215,25 +215,25 @@ JvmtiFramePops* JvmtiEnvThreadState::get_frame_pops() {
#ifdef ASSERT
Thread *current = Thread::current();
#endif
assert(get_thread() == NULL || get_thread()->is_handshake_safe_for(current),
assert(get_thread() == nullptr || get_thread()->is_handshake_safe_for(current),
"frame pop data only accessible from same or detached thread or direct handshake");
if (_frame_pops == NULL) {
if (_frame_pops == nullptr) {
_frame_pops = new JvmtiFramePops();
assert(_frame_pops != NULL, "_frame_pops != NULL");
assert(_frame_pops != nullptr, "_frame_pops != null");
}
return _frame_pops;
}
bool JvmtiEnvThreadState::has_frame_pops() {
return _frame_pops == NULL? false : (_frame_pops->length() > 0);
return _frame_pops == nullptr? false : (_frame_pops->length() > 0);
}
void JvmtiEnvThreadState::set_frame_pop(int frame_number) {
#ifdef ASSERT
Thread *current = Thread::current();
#endif
assert(get_thread() == NULL || get_thread()->is_handshake_safe_for(current),
assert(get_thread() == nullptr || get_thread()->is_handshake_safe_for(current),
"frame pop data only accessible from same or detached thread or direct handshake");
JvmtiFramePop fpop(frame_number);
JvmtiEventController::set_frame_pop(this, fpop);
@ -244,7 +244,7 @@ void JvmtiEnvThreadState::clear_frame_pop(int frame_number) {
#ifdef ASSERT
Thread *current = Thread::current();
#endif
assert(get_thread() == NULL || get_thread()->is_handshake_safe_for(current),
assert(get_thread() == nullptr || get_thread()->is_handshake_safe_for(current),
"frame pop data only accessible from same or detached thread or direct handshake");
JvmtiFramePop fpop(frame_number);
JvmtiEventController::clear_frame_pop(this, fpop);
@ -255,9 +255,9 @@ bool JvmtiEnvThreadState::is_frame_pop(int cur_frame_number) {
#ifdef ASSERT
Thread *current = Thread::current();
#endif
assert(get_thread() == NULL || get_thread()->is_handshake_safe_for(current),
assert(get_thread() == nullptr || get_thread()->is_handshake_safe_for(current),
"frame pop data only accessible from same or detached thread or direct handshake");
if (!jvmti_thread_state()->is_interp_only_mode() || _frame_pops == NULL) {
if (!jvmti_thread_state()->is_interp_only_mode() || _frame_pops == nullptr) {
return false;
}
JvmtiFramePop fp(cur_frame_number);
@ -274,7 +274,7 @@ class VM_VirtualThreadGetCurrentLocation : public VM_Operation {
public:
VM_VirtualThreadGetCurrentLocation(Handle vthread_h)
: _vthread_h(vthread_h),
_method_id(NULL),
_method_id(nullptr),
_bci(0),
_completed(false)
{}
@ -287,8 +287,8 @@ class VM_VirtualThreadGetCurrentLocation : public VM_Operation {
ResourceMark rm;
javaVFrame* jvf = JvmtiEnvBase::get_vthread_jvf(_vthread_h());
if (jvf != NULL) {
// jvf can be NULL, when the native enterSpecial frame is on the top.
if (jvf != nullptr) {
// jvf can be null, when the native enterSpecial frame is on the top.
Method* method = jvf->method();
_method_id = method->jmethod_id();
_bci = jvf->bci();
@ -312,7 +312,7 @@ class GetCurrentLocationClosure : public HandshakeClosure {
public:
GetCurrentLocationClosure()
: HandshakeClosure("GetCurrentLocation"),
_method_id(NULL),
_method_id(nullptr),
_bci(0),
_completed(false) {}
void do_thread(Thread *target) {
@ -327,7 +327,7 @@ class GetCurrentLocationClosure : public HandshakeClosure {
// We must recheck that the last Java frame still exists.
if (!jt->is_exiting() && jt->has_last_Java_frame()) {
javaVFrame* vf = jt->last_java_vframe(&rm);
if (vf != NULL) {
if (vf != nullptr) {
Method* method = vf->method();
_method_id = method->jmethod_id();
_bci = vf->bci();
@ -378,7 +378,7 @@ void JvmtiEnvThreadState::reset_current_location(jvmtiEvent event_type, bool ena
oop thread_oop = jvmti_thread_state()->get_thread_oop();
assert(!jvmti_thread_state()->is_in_VTMS_transition(), "sanity check");
if (thread == NULL && event_type == JVMTI_EVENT_SINGLE_STEP && is_virtual()) {
if (thread == nullptr && event_type == JVMTI_EVENT_SINGLE_STEP && is_virtual()) {
// Handle the unmounted virtual thread case.
jmethodID method_id;
int bci;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -153,7 +153,7 @@ public:
// here.
void compare_and_set_current_location(Method* method, address location, jvmtiEvent event);
void clear_current_location() { set_current_location((jmethodID)NULL, 0); }
void clear_current_location() { set_current_location((jmethodID)nullptr, 0); }
void reset_current_location(jvmtiEvent event, bool enabled);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -211,7 +211,7 @@ class EnterInterpOnlyModeClosure : public HandshakeClosure {
JavaThread* jt = JavaThread::cast(th);
JvmtiThreadState* state = jt->jvmti_thread_state();
assert(state != NULL, "sanity check");
assert(state != nullptr, "sanity check");
assert(state->get_thread() == jt, "handshake unsafe conditions");
if (!state->is_pending_interp_only_mode()) {
_completed = true;
@ -358,20 +358,20 @@ void JvmtiEventControllerPrivate::enter_interp_only_mode(JvmtiThreadState *state
JavaThread *target = state->get_thread();
Thread *current = Thread::current();
assert(state != NULL, "sanity check");
assert(state != nullptr, "sanity check");
if (state->is_pending_interp_only_mode()) {
return; // An EnterInterpOnlyModeClosure handshake is already pending for execution.
}
// This flag will be cleared in EnterInterpOnlyModeClosure handshake.
state->set_pending_interp_only_mode(true);
if (target == NULL) { // an unmounted virtual thread
if (target == nullptr) { // an unmounted virtual thread
return; // EnterInterpOnlyModeClosure will be executed right after mount.
}
EnterInterpOnlyModeClosure hs;
if (target->is_handshake_safe_for(current)) {
hs.do_thread(target);
} else {
assert(state->get_thread() != NULL, "sanity check");
assert(state->get_thread() != nullptr, "sanity check");
Handshake::execute(&hs, target);
guarantee(hs.completed(), "Handshake failed: Target thread is not alive?");
}
@ -437,7 +437,7 @@ JvmtiEventControllerPrivate::flush_object_free_events(JvmtiEnvBase* env) {
// need to ensure the env is cleaned up and any events that should
// be posted are posted.
JvmtiTagMap* tag_map = env->tag_map_acquire();
if (tag_map != NULL) {
if (tag_map != nullptr) {
tag_map->flush_object_free_events();
}
}
@ -447,7 +447,7 @@ JvmtiEventControllerPrivate::set_enabled_events_with_lock(JvmtiEnvBase* env, jlo
// The state for ObjectFree events must be enabled or disabled
// under the TagMap lock, to allow pending object posting events to complete.
JvmtiTagMap* tag_map = env->tag_map_acquire();
if (tag_map != NULL) {
if (tag_map != nullptr) {
MutexLocker ml(tag_map->lock(), Mutex::_no_safepoint_check_flag);
env->env_event_enable()->_event_enabled.set_bits(now_enabled);
} else {
@ -555,7 +555,7 @@ JvmtiEventControllerPrivate::recompute_env_thread_enabled(JvmtiEnvThreadState* e
// set external state accordingly. Only thread-filtered events are included.
jlong
JvmtiEventControllerPrivate::recompute_thread_enabled(JvmtiThreadState *state) {
if (state == NULL) {
if (state == nullptr) {
// associated JavaThread is exiting
return (jlong)0;
}
@ -571,7 +571,7 @@ JvmtiEventControllerPrivate::recompute_thread_enabled(JvmtiThreadState *state) {
// have been disposed. These JvmtiEnvThreadStates must not be filtered
// as recompute must be called on them to disable their events,
JvmtiEnvThreadStateIterator it(state);
for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ets = it.next(ets)) {
for (JvmtiEnvThreadState* ets = it.first(); ets != nullptr; ets = it.next(ets)) {
any_env_enabled |= recompute_env_thread_enabled(ets, state);
has_frame_pops |= ets->has_frame_pops();
}
@ -581,7 +581,7 @@ JvmtiEventControllerPrivate::recompute_thread_enabled(JvmtiThreadState *state) {
// mark if event is truly enabled on this thread in any environment
state->thread_event_enable()->_event_enabled.set_bits(any_env_enabled);
if (state->get_thread() != NULL) {
if (state->get_thread() != nullptr) {
// The JavaThread for carrier or mounted virtual thread case.
// Update the cached value for thread-specific should_post_on_exceptions value.
bool should_post_on_exceptions = (any_env_enabled & SHOULD_POST_ON_EXCEPTIONS_BITS) != 0;
@ -632,7 +632,7 @@ JvmtiEventControllerPrivate::recompute_enabled() {
// This must be done separately from thread-filtered events, since some
// events can occur before any threads exist.
JvmtiEnvIterator it;
for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
for (JvmtiEnvBase* env = it.first(); env != nullptr; env = it.next(env)) {
any_env_thread_enabled |= recompute_env_enabled(env);
}
@ -648,7 +648,7 @@ JvmtiEventControllerPrivate::recompute_enabled() {
// create the thread state for mounted virtual thread if missing
oop vt_oop = tp->jvmti_vthread();
if (vt_oop != NULL && java_lang_VirtualThread::is_instance(vt_oop)) {
if (vt_oop != nullptr && java_lang_VirtualThread::is_instance(vt_oop)) {
state = JvmtiThreadState::state_for_while_locked(tp, vt_oop);
}
}
@ -742,7 +742,7 @@ JvmtiEventControllerPrivate::thread_started(JavaThread *thread) {
MutexLocker mu(JvmtiThreadState_lock);
// create the thread state if missing
JvmtiThreadState *state = JvmtiThreadState::state_for_while_locked(thread);
if (state != NULL) { // skip threads with no JVMTI thread state
if (state != nullptr) { // skip threads with no JVMTI thread state
recompute_thread_enabled(state);
}
}
@ -758,7 +758,7 @@ JvmtiEventControllerPrivate::thread_ended(JavaThread *thread) {
EC_TRACE(("[%s] # thread ended", JvmtiTrace::safe_get_thread_name(thread)));
JvmtiThreadState *state = thread->jvmti_thread_state();
assert(state != NULL, "else why are we here?");
assert(state != nullptr, "else why are we here?");
delete state;
}
@ -809,7 +809,7 @@ JvmtiEventControllerPrivate::set_extension_event_callback(JvmtiEnvBase *env,
// set event callbacks, where the DisposeEnvironment occurs after the boiler-plate
// environment check and before the lock is acquired.
// We can safely do the is_valid check now, as JvmtiThreadState_lock is held.
bool enabling = (callback != NULL) && (env->is_valid());
bool enabling = (callback != nullptr) && (env->is_valid());
// update the callback
jvmtiExtEventCallbacks* ext_callbacks = env->ext_callbacks();
@ -855,7 +855,7 @@ JvmtiEventControllerPrivate::env_initialize(JvmtiEnvBase *env) {
env->initialize();
// add the JvmtiEnvThreadState to each JvmtiThreadState
for (JvmtiThreadState *state = JvmtiThreadState::first(); state != NULL; state = state->next()) {
for (JvmtiThreadState *state = JvmtiThreadState::first(); state != nullptr; state = state->next()) {
state->add_env(env);
assert((JvmtiEnv*)(state->env_thread_state(env)->get_env()) == env, "sanity check");
}
@ -871,11 +871,11 @@ JvmtiEventControllerPrivate::env_dispose(JvmtiEnvBase *env) {
// Before the environment is marked disposed, disable all events on this
// environment (by zapping the callbacks). As a result, the disposed
// environment will not call event handlers.
set_event_callbacks(env, NULL, 0);
set_event_callbacks(env, nullptr, 0);
for (jint extension_event_index = EXT_MIN_EVENT_TYPE_VAL;
extension_event_index <= EXT_MAX_EVENT_TYPE_VAL;
++extension_event_index) {
set_extension_event_callback(env, extension_event_index, NULL);
set_extension_event_callback(env, extension_event_index, nullptr);
}
// Let the environment finish disposing itself.
@ -889,18 +889,18 @@ JvmtiEventControllerPrivate::set_user_enabled(JvmtiEnvBase *env, JavaThread *thr
assert(Threads::number_of_threads() == 0 || JvmtiThreadState_lock->is_locked(), "sanity check");
EC_TRACE(("[%s] # user %s event %s",
thread==NULL? "ALL": JvmtiTrace::safe_get_thread_name(thread),
thread==nullptr? "ALL": JvmtiTrace::safe_get_thread_name(thread),
enabled? "enabled" : "disabled", JvmtiTrace::event_name(event_type)));
if (thread == NULL && thread_oop_h() == NULL) {
// NULL thread and NULL thread_oop now indicate setting globally instead
// of setting thread specific since NULL thread by itself means an
if (thread == nullptr && thread_oop_h() == nullptr) {
// null thread and null thread_oop now indicate setting globally instead
// of setting thread specific since null thread by itself means an
// unmounted virtual thread.
env->env_event_enable()->set_user_enabled(event_type, enabled);
} else {
// create the thread state (if it didn't exist before)
JvmtiThreadState *state = JvmtiThreadState::state_for_while_locked(thread, thread_oop_h());
if (state != NULL) {
if (state != nullptr) {
state->env_thread_state(env)->event_enable()->set_user_enabled(event_type, enabled);
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -210,14 +210,14 @@ public:
&& ((int)event_type <= TOTAL_MAX_EVENT_TYPE_VAL);
}
// Use (thread == NULL) to enable/disable an event globally.
// Use (thread != NULL) to enable/disable an event for a particular thread.
// Use (thread == nullptr) to enable/disable an event globally.
// Use (thread != nullptr) to enable/disable an event for a particular thread.
// thread is ignored for events that can only be specified globally
static void set_user_enabled(JvmtiEnvBase *env, JavaThread *thread, oop thread_oop,
jvmtiEvent event_type, bool enabled);
// Setting callbacks changes computed enablement and must be done
// at a safepoint otherwise a NULL callback could be attempted
// at a safepoint otherwise a null callback could be attempted
static void set_event_callbacks(JvmtiEnvBase *env,
const jvmtiEventCallbacks* callbacks,
jint size_of_callbacks);

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1998, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1998, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -319,14 +319,14 @@ class JvmtiExport : public AllStatic {
static oop jni_GetField_probe (JavaThread *thread, jobject jobj,
oop obj, Klass* klass, jfieldID fieldID, bool is_static)
NOT_JVMTI_RETURN_(NULL);
NOT_JVMTI_RETURN_(nullptr);
static void post_field_access_by_jni (JavaThread *thread, oop obj,
Klass* klass, jfieldID fieldID, bool is_static) NOT_JVMTI_RETURN;
static void post_field_access (JavaThread *thread, Method* method,
address location, Klass* field_klass, Handle object, jfieldID field) NOT_JVMTI_RETURN;
static oop jni_SetField_probe (JavaThread *thread, jobject jobj,
oop obj, Klass* klass, jfieldID fieldID, bool is_static, char sig_type,
jvalue *value) NOT_JVMTI_RETURN_(NULL);
jvalue *value) NOT_JVMTI_RETURN_(nullptr);
static void post_field_modification_by_jni(JavaThread *thread, oop obj,
Klass* klass, jfieldID fieldID, bool is_static, char sig_type,
jvalue *value);
@ -427,7 +427,7 @@ class JvmtiExport : public AllStatic {
#endif
// SetNativeMethodPrefix support
static char** get_all_native_method_prefixes(int* count_ptr) NOT_JVMTI_RETURN_(NULL);
static char** get_all_native_method_prefixes(int* count_ptr) NOT_JVMTI_RETURN_(nullptr);
// JavaThread lifecycle support:
static jvmtiError cv_external_thread_to_JavaThread(ThreadsList * t_list,
@ -448,7 +448,7 @@ class JvmtiCodeBlobDesc : public CHeapObj<mtInternal> {
public:
JvmtiCodeBlobDesc(const char *name, address code_begin, address code_end) {
assert(name != NULL, "all code blobs must be named");
assert(name != nullptr, "all code blobs must be named");
strncpy(_name, name, sizeof(_name) - 1);
_name[sizeof(_name)-1] = '\0';
_code_begin = code_begin;
@ -467,7 +467,7 @@ class JvmtiEventCollector : public StackObj {
bool _unset_jvmti_thread_state;
public:
JvmtiEventCollector() : _prev(NULL), _unset_jvmti_thread_state(false) {}
JvmtiEventCollector() : _prev(nullptr), _unset_jvmti_thread_state(false) {}
void setup_jvmti_thread_state(); // Set this collector in current thread, returns if success.
void unset_jvmti_thread_state(); // Reset previous collector in current thread.
@ -582,10 +582,10 @@ class JvmtiSampledObjectAllocEventCollector : public JvmtiObjectAllocEventCollec
class NoJvmtiVMObjectAllocMark : public StackObj {
private:
// enclosing collector if enabled, NULL otherwise
// enclosing collector if enabled, null otherwise
JvmtiVMObjectAllocEventCollector *_collector;
bool was_enabled() { return _collector != NULL; }
bool was_enabled() { return _collector != nullptr; }
public:
NoJvmtiVMObjectAllocMark() NOT_JVMTI_RETURN;
@ -609,7 +609,7 @@ class JvmtiHideSingleStepping : public StackObj {
public:
JvmtiHideSingleStepping(JavaThread * thread) {
assert(thread != NULL, "sanity check");
assert(thread != nullptr, "sanity check");
_single_step_hidden = false;
_thread = thread;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -42,14 +42,14 @@ GrowableArray<jvmtiExtensionEventInfo*>* JvmtiExtensions::_ext_events;
// Extension Functions
//
static jvmtiError JNICALL IsClassUnloadingEnabled(const jvmtiEnv* env, ...) {
jboolean* enabled = NULL;
jboolean* enabled = nullptr;
va_list ap;
va_start(ap, env);
enabled = va_arg(ap, jboolean *);
va_end(ap);
if (enabled == NULL) {
if (enabled == nullptr) {
return JVMTI_ERROR_NULL_POINTER;
}
*enabled = (jboolean)ClassUnloading;
@ -65,11 +65,11 @@ static jvmtiError JNICALL GetVirtualThread(const jvmtiEnv* env, ...) {
JavaThread* current_thread = JavaThread::current();
ResourceMark rm(current_thread);
jthread thread = NULL;
jthread* vthread_ptr = NULL;
JavaThread* java_thread = NULL;
oop cthread_oop = NULL;
oop thread_oop = NULL;
jthread thread = nullptr;
jthread* vthread_ptr = nullptr;
JavaThread* java_thread = nullptr;
oop cthread_oop = nullptr;
oop thread_oop = nullptr;
va_list ap;
va_start(ap, env);
@ -83,7 +83,7 @@ static jvmtiError JNICALL GetVirtualThread(const jvmtiEnv* env, ...) {
jvmtiError err;
if (thread == NULL) {
if (thread == nullptr) {
java_thread = current_thread;
cthread_oop = java_thread->threadObj();
} else {
@ -92,21 +92,21 @@ static jvmtiError JNICALL GetVirtualThread(const jvmtiEnv* env, ...) {
return err;
}
}
if (vthread_ptr == NULL) {
if (vthread_ptr == nullptr) {
return JVMTI_ERROR_NULL_POINTER;
}
if (cthread_oop == NULL || java_lang_VirtualThread::is_instance(cthread_oop)) {
if (cthread_oop == nullptr || java_lang_VirtualThread::is_instance(cthread_oop)) {
return JVMTI_ERROR_INVALID_THREAD;
}
*vthread_ptr = NULL;
*vthread_ptr = nullptr;
JvmtiThreadState *state = JvmtiThreadState::state_for(java_thread);
if (state == NULL) {
if (state == nullptr) {
return JVMTI_ERROR_THREAD_NOT_ALIVE;
}
oop vthread_oop = java_thread->jvmti_vthread();
if (!java_lang_VirtualThread::is_instance(vthread_oop)) { // not a virtual thread
vthread_oop = NULL;
vthread_oop = nullptr;
}
*vthread_ptr = (jthread)JNIHandles::make_local(current_thread, vthread_oop);
return JVMTI_ERROR_NONE;
@ -121,8 +121,8 @@ static jvmtiError JNICALL GetCarrierThread(const jvmtiEnv* env, ...) {
JavaThread* current_thread = JavaThread::current();
HandleMark hm(current_thread);
jthread vthread = NULL;
jthread* thread_ptr = NULL;
jthread vthread = nullptr;
jthread* thread_ptr = nullptr;
va_list ap;
va_start(ap, env);
@ -135,9 +135,9 @@ static jvmtiError JNICALL GetCarrierThread(const jvmtiEnv* env, ...) {
ThreadsListHandle tlh(current_thread);
JavaThread* java_thread;
oop vthread_oop = NULL;
oop vthread_oop = nullptr;
if (vthread == NULL) {
if (vthread == nullptr) {
vthread = (jthread)JNIHandles::make_local(current_thread, JvmtiEnvBase::get_vthread_or_thread_oop(current_thread));
}
jvmtiError err = JvmtiExport::cv_external_thread_to_JavaThread(tlh.list(), vthread, &java_thread, &vthread_oop);
@ -147,7 +147,7 @@ static jvmtiError JNICALL GetCarrierThread(const jvmtiEnv* env, ...) {
// thread_oop.
// In a vthread case the cv_external_thread_to_JavaThread is expected to correctly set
// the thread_oop and return JVMTI_ERROR_INVALID_THREAD which we ignore here.
if (vthread_oop == NULL) {
if (vthread_oop == nullptr) {
return err;
}
}
@ -155,7 +155,7 @@ static jvmtiError JNICALL GetCarrierThread(const jvmtiEnv* env, ...) {
if (!java_lang_VirtualThread::is_instance(vthread_oop)) {
return JVMTI_ERROR_INVALID_THREAD;
}
if (thread_ptr == NULL) {
if (thread_ptr == nullptr) {
return JVMTI_ERROR_NULL_POINTER;
}
VirtualThreadGetThreadClosure op(Handle(current_thread, vthread_oop), thread_ptr);
@ -198,7 +198,7 @@ void JvmtiExtensions::register_extensions() {
sizeof(func_params0)/sizeof(func_params0[0]),
func_params0,
0, // no non-universal errors
NULL
nullptr
};
static jvmtiExtensionFunctionInfo ext_func1 = {
@ -270,7 +270,7 @@ jvmtiError JvmtiExtensions::get_functions(JvmtiEnv* env,
jint* extension_count_ptr,
jvmtiExtensionFunctionInfo** extensions)
{
guarantee(_ext_functions != NULL, "registration not done");
guarantee(_ext_functions != nullptr, "registration not done");
ResourceTracker rt(env);
@ -306,7 +306,7 @@ jvmtiError JvmtiExtensions::get_functions(JvmtiEnv* env,
ext_funcs[i].param_count = param_count;
if (param_count == 0) {
ext_funcs[i].params = NULL;
ext_funcs[i].params = nullptr;
} else {
err = rt.allocate(param_count*sizeof(jvmtiParamInfo),
(unsigned char**)&(ext_funcs[i].params));
@ -335,7 +335,7 @@ jvmtiError JvmtiExtensions::get_functions(JvmtiEnv* env,
jint error_count = _ext_functions->at(i)->error_count;
ext_funcs[i].error_count = error_count;
if (error_count == 0) {
ext_funcs[i].errors = NULL;
ext_funcs[i].errors = nullptr;
} else {
err = rt.allocate(error_count*sizeof(jvmtiError),
(unsigned char**)&(ext_funcs[i].errors));
@ -359,7 +359,7 @@ jvmtiError JvmtiExtensions::get_events(JvmtiEnv* env,
jint* extension_count_ptr,
jvmtiExtensionEventInfo** extensions)
{
guarantee(_ext_events != NULL, "registration not done");
guarantee(_ext_events != nullptr, "registration not done");
ResourceTracker rt(env);
@ -394,7 +394,7 @@ jvmtiError JvmtiExtensions::get_events(JvmtiEnv* env,
ext_events[i].param_count = param_count;
if (param_count == 0) {
ext_events[i].params = NULL;
ext_events[i].params = nullptr;
} else {
err = rt.allocate(param_count*sizeof(jvmtiParamInfo),
(unsigned char**)&(ext_events[i].params));
@ -430,13 +430,13 @@ jvmtiError JvmtiExtensions::set_event_callback(JvmtiEnv* env,
jint extension_event_index,
jvmtiExtensionEvent callback)
{
guarantee(_ext_events != NULL, "registration not done");
guarantee(_ext_events != nullptr, "registration not done");
jvmtiExtensionEventInfo* event = NULL;
jvmtiExtensionEventInfo* event = nullptr;
// if there are extension events registered then validate that the
// extension_event_index matches one of the registered events.
if (_ext_events != NULL) {
if (_ext_events != nullptr) {
for (int i=0; i<_ext_events->length(); i++ ) {
if (_ext_events->at(i)->extension_event_index == extension_event_index) {
event = _ext_events->at(i);
@ -446,7 +446,7 @@ jvmtiError JvmtiExtensions::set_event_callback(JvmtiEnv* env,
}
// invalid event index
if (event == NULL) {
if (event == nullptr) {
return JVMTI_ERROR_ILLEGAL_ARGUMENT;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -75,7 +75,7 @@ public:
if (_dictionary_walk) {
// Collect array classes this way when walking the dictionary (because array classes are
// not in the dictionary).
for (Klass* l = k->array_klass_or_null(); l != NULL; l = l->array_klass_or_null()) {
for (Klass* l = k->array_klass_or_null(); l != nullptr; l = l->array_klass_or_null()) {
_classStack.push((jclass) _env->jni_reference(Handle(_cur_thread, l->java_mirror())));
}
}
@ -128,11 +128,11 @@ JvmtiGetLoadedClasses::getClassLoaderClasses(JvmtiEnv *env, jobject initiatingLo
oop loader = JNIHandles::resolve(initiatingLoader);
// All classes loaded from this loader as initiating loader are
// requested, so only need to walk this loader's ClassLoaderData
// dictionary, or the NULL ClassLoaderData dictionary for bootstrap loader.
if (loader != NULL) {
// dictionary, or the null ClassLoaderData dictionary for bootstrap loader.
if (loader != nullptr) {
ClassLoaderData* data = java_lang_ClassLoader::loader_data_acquire(loader);
// ClassLoader may not be used yet for loading.
if (data != NULL && data->dictionary() != NULL) {
if (data != nullptr && data->dictionary() != nullptr) {
data->dictionary()->all_entries_do(&closure);
}
} else {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -108,21 +108,21 @@ void GrowableCache::recache() {
// mode. The call to remove() will rebuild the cache again
// without the bad entry.
//
if (_cache[i] == NULL) {
assert(false, "cannot recache NULL elements");
if (_cache[i] == nullptr) {
assert(false, "cannot recache null elements");
remove(i);
return;
}
}
_cache[len] = NULL;
_cache[len] = nullptr;
_listener_fun(_this_obj,_cache);
}
bool GrowableCache::equals(void* v, GrowableElement *e2) {
GrowableElement *e1 = (GrowableElement *) v;
assert(e1 != NULL, "e1 != NULL");
assert(e2 != NULL, "e2 != NULL");
assert(e1 != nullptr, "e1 != nullptr");
assert(e2 != nullptr, "e2 != nullptr");
return e1->equals(e2);
}
@ -132,10 +132,10 @@ bool GrowableCache::equals(void* v, GrowableElement *e2) {
//
GrowableCache::GrowableCache() {
_this_obj = NULL;
_listener_fun = NULL;
_elements = NULL;
_cache = NULL;
_this_obj = nullptr;
_listener_fun = nullptr;
_elements = nullptr;
_cache = nullptr;
}
GrowableCache::~GrowableCache() {
@ -159,7 +159,7 @@ int GrowableCache::length() {
// get the value of the index element in the collection
GrowableElement* GrowableCache::at(int index) {
GrowableElement *e = (GrowableElement *) _elements->at(index);
assert(e != NULL, "e != NULL");
assert(e != nullptr, "e != nullptr");
return e;
}
@ -177,7 +177,7 @@ void GrowableCache::append(GrowableElement* e) {
// remove the element at index
void GrowableCache::remove (int index) {
GrowableElement *e = _elements->at(index);
assert(e != NULL, "e != NULL");
assert(e != nullptr, "e != nullptr");
_elements->remove(e);
delete e;
recache();
@ -200,7 +200,7 @@ void GrowableCache::clear() {
JvmtiBreakpoint::JvmtiBreakpoint(Method* m_method, jlocation location)
: _method(m_method), _bci((int)location) {
assert(_method != NULL, "No method for breakpoint.");
assert(_method != nullptr, "No method for breakpoint.");
assert(_bci >= 0, "Negative bci for breakpoint.");
oop class_holder_oop = _method->method_holder()->klass_holder();
_class_holder = OopHandle(JvmtiExport::jvmti_oop_storage(), class_holder_oop);
@ -236,7 +236,7 @@ void JvmtiBreakpoint::each_method_version_do(method_action meth_act) {
// search previous versions if they exist
for (InstanceKlass* pv_node = ik->previous_versions();
pv_node != NULL;
pv_node != nullptr;
pv_node = pv_node->previous_versions()) {
Array<Method*>* methods = pv_node->methods();
@ -277,8 +277,8 @@ void JvmtiBreakpoint::clear() {
void JvmtiBreakpoint::print_on(outputStream* out) const {
#ifndef PRODUCT
ResourceMark rm;
const char *class_name = (_method == NULL) ? "NULL" : _method->klass_name()->as_C_string();
const char *method_name = (_method == NULL) ? "NULL" : _method->name()->as_C_string();
const char *class_name = (_method == nullptr) ? "null" : _method->klass_name()->as_C_string();
const char *method_name = (_method == nullptr) ? "null" : _method->name()->as_C_string();
out->print("Breakpoint(%s,%s,%d,%p)", class_name, method_name, _bci, getBcp());
#endif
}
@ -402,23 +402,23 @@ void JvmtiBreakpoints::clearall_in_class_at_safepoint(Klass* klass) {
// class JvmtiCurrentBreakpoints
//
JvmtiBreakpoints *JvmtiCurrentBreakpoints::_jvmti_breakpoints = NULL;
address * JvmtiCurrentBreakpoints::_breakpoint_list = NULL;
JvmtiBreakpoints *JvmtiCurrentBreakpoints::_jvmti_breakpoints = nullptr;
address * JvmtiCurrentBreakpoints::_breakpoint_list = nullptr;
JvmtiBreakpoints& JvmtiCurrentBreakpoints::get_jvmti_breakpoints() {
if (_jvmti_breakpoints != NULL) return (*_jvmti_breakpoints);
if (_jvmti_breakpoints != nullptr) return (*_jvmti_breakpoints);
_jvmti_breakpoints = new JvmtiBreakpoints(listener_fun);
assert(_jvmti_breakpoints != NULL, "_jvmti_breakpoints != NULL");
assert(_jvmti_breakpoints != nullptr, "_jvmti_breakpoints != nullptr");
return (*_jvmti_breakpoints);
}
void JvmtiCurrentBreakpoints::listener_fun(void *this_obj, address *cache) {
JvmtiBreakpoints *this_jvmti = (JvmtiBreakpoints *) this_obj;
assert(this_jvmti != NULL, "this_jvmti != NULL");
assert(this_jvmti != nullptr, "this_jvmti != nullptr");
debug_only(int n = this_jvmti->length(););
assert(cache[n] == NULL, "cache must be NULL terminated");
assert(cache[n] == nullptr, "cache must be null terminated");
set_breakpoint_list(cache);
}
@ -438,7 +438,7 @@ VM_BaseGetOrSetLocal::VM_BaseGetOrSetLocal(JavaThread* calling_thread, jint dept
, _index(index)
, _type(type)
, _value(value)
, _jvf(NULL)
, _jvf(nullptr)
, _set(set)
, _self(self)
, _result(JVMTI_ERROR_NONE)
@ -451,9 +451,9 @@ VM_BaseGetOrSetLocal::VM_BaseGetOrSetLocal(JavaThread* calling_thread, jint dept
// This may cause unexpected consequences like CFLH or class-init JVMTI events.
// It is better to avoid such a behavior.
bool VM_BaseGetOrSetLocal::is_assignable(const char* ty_sign, Klass* klass, Thread* thread) {
assert(ty_sign != NULL, "type signature must not be NULL");
assert(thread != NULL, "thread must not be NULL");
assert(klass != NULL, "klass must not be NULL");
assert(ty_sign != nullptr, "type signature must not be null");
assert(thread != nullptr, "thread must not be null");
assert(klass != nullptr, "klass must not be null");
int len = (int) strlen(ty_sign);
if (ty_sign[0] == JVM_SIGNATURE_CLASS &&
@ -545,7 +545,7 @@ bool VM_BaseGetOrSetLocal::check_slot_type_lvt(javaVFrame* jvf) {
}
jobject jobj = _value.l;
if (_set && slot_type == T_OBJECT && jobj != NULL) { // NULL reference is allowed
if (_set && slot_type == T_OBJECT && jobj != nullptr) { // null reference is allowed
// Check that the jobject class matches the return type signature.
oop obj = JNIHandles::resolve_external_guard(jobj);
NULL_CHECK(obj, (_result = JVMTI_ERROR_INVALID_OBJECT, false));
@ -606,7 +606,7 @@ bool VM_GetOrSetLocal::doit_prologue() {
void VM_BaseGetOrSetLocal::doit() {
_jvf = get_java_vframe();
if (_jvf == NULL) {
if (_jvf == nullptr) {
return;
};
@ -745,15 +745,15 @@ bool VM_BaseGetOrSetLocal::allow_nested_vm_operations() const {
// Constructor for non-object getter
VM_GetOrSetLocal::VM_GetOrSetLocal(JavaThread* thread, jint depth, jint index, BasicType type, bool self)
: VM_BaseGetOrSetLocal(NULL, depth, index, type, _DEFAULT_VALUE, false, self),
: VM_BaseGetOrSetLocal(nullptr, depth, index, type, _DEFAULT_VALUE, false, self),
_thread(thread),
_eb(false, NULL, NULL)
_eb(false, nullptr, nullptr)
{
}
// Constructor for object or non-object setter
VM_GetOrSetLocal::VM_GetOrSetLocal(JavaThread* thread, jint depth, jint index, BasicType type, jvalue value, bool self)
: VM_BaseGetOrSetLocal(NULL, depth, index, type, value, true, self),
: VM_BaseGetOrSetLocal(nullptr, depth, index, type, value, true, self),
_thread(thread),
_eb(type == T_OBJECT, JavaThread::current(), thread)
{
@ -769,7 +769,7 @@ VM_GetOrSetLocal::VM_GetOrSetLocal(JavaThread* thread, JavaThread* calling_threa
vframe *VM_GetOrSetLocal::get_vframe() {
if (!_thread->has_last_Java_frame()) {
return NULL;
return nullptr;
}
RegisterMap reg_map(_thread,
RegisterMap::UpdateMap::include,
@ -777,7 +777,7 @@ vframe *VM_GetOrSetLocal::get_vframe() {
RegisterMap::WalkContinuation::include);
vframe *vf = JvmtiEnvBase::get_cthread_last_java_vframe(_thread, &reg_map);
int d = 0;
while ((vf != NULL) && (d < _depth)) {
while ((vf != nullptr) && (d < _depth)) {
vf = vf->java_sender();
d++;
}
@ -788,17 +788,17 @@ javaVFrame *VM_GetOrSetLocal::get_java_vframe() {
vframe* vf = get_vframe();
if (!(_self || _thread->is_carrier_thread_suspended())) {
_result = JVMTI_ERROR_THREAD_NOT_SUSPENDED;
return NULL;
return nullptr;
}
if (vf == NULL) {
if (vf == nullptr) {
_result = JVMTI_ERROR_NO_MORE_FRAMES;
return NULL;
return nullptr;
}
javaVFrame *jvf = (javaVFrame*)vf;
if (!vf->is_java_frame()) {
_result = JVMTI_ERROR_OPAQUE_FRAME;
return NULL;
return nullptr;
}
return jvf;
}
@ -816,7 +816,7 @@ VM_GetReceiver::VM_GetReceiver(
// Constructor for non-object getter
VM_VirtualThreadGetOrSetLocal::VM_VirtualThreadGetOrSetLocal(JvmtiEnv* env, Handle vthread_h, jint depth,
jint index, BasicType type, bool self)
: VM_BaseGetOrSetLocal(NULL, depth, index, type, _DEFAULT_VALUE, false, self)
: VM_BaseGetOrSetLocal(nullptr, depth, index, type, _DEFAULT_VALUE, false, self)
{
_env = env;
_vthread_h = vthread_h;
@ -825,7 +825,7 @@ VM_VirtualThreadGetOrSetLocal::VM_VirtualThreadGetOrSetLocal(JvmtiEnv* env, Hand
// Constructor for object or non-object setter
VM_VirtualThreadGetOrSetLocal::VM_VirtualThreadGetOrSetLocal(JvmtiEnv* env, Handle vthread_h, jint depth,
jint index, BasicType type, jvalue value, bool self)
: VM_BaseGetOrSetLocal(NULL, depth, index, type, value, true, self)
: VM_BaseGetOrSetLocal(nullptr, depth, index, type, value, true, self)
{
_env = env;
_vthread_h = vthread_h;
@ -843,15 +843,15 @@ VM_VirtualThreadGetOrSetLocal::VM_VirtualThreadGetOrSetLocal(JvmtiEnv* env, Hand
javaVFrame *VM_VirtualThreadGetOrSetLocal::get_java_vframe() {
Thread* cur_thread = Thread::current();
oop cont = java_lang_VirtualThread::continuation(_vthread_h());
assert(cont != NULL, "vthread contintuation must not be NULL");
assert(cont != nullptr, "vthread contintuation must not be null");
javaVFrame* jvf = NULL;
javaVFrame* jvf = nullptr;
JavaThread* java_thread = JvmtiEnvBase::get_JavaThread_or_null(_vthread_h());
bool is_cont_mounted = (java_thread != NULL);
bool is_cont_mounted = (java_thread != nullptr);
if (!(_self || JvmtiVTSuspender::is_vthread_suspended(_vthread_h()))) {
_result = JVMTI_ERROR_THREAD_NOT_SUSPENDED;
return NULL;
return nullptr;
}
if (is_cont_mounted) {
@ -870,19 +870,19 @@ javaVFrame *VM_VirtualThreadGetOrSetLocal::get_java_vframe() {
}
}
int d = 0;
while ((jvf != NULL) && (d < _depth)) {
while ((jvf != nullptr) && (d < _depth)) {
jvf = jvf->java_sender();
d++;
}
if (d < _depth || jvf == NULL) {
if (d < _depth || jvf == nullptr) {
_result = JVMTI_ERROR_NO_MORE_FRAMES;
return NULL;
return nullptr;
}
if ((_set && !is_cont_mounted) || !jvf->is_java_frame()) {
_result = JVMTI_ERROR_OPAQUE_FRAME;
return NULL;
return nullptr;
}
return jvf;
}
@ -982,11 +982,11 @@ void JvmtiDeferredEvent::post() {
case TYPE_DYNAMIC_CODE_GENERATED: {
JvmtiExport::post_dynamic_code_generated_internal(
// if strdup failed give the event a default name
(_event_data.dynamic_code_generated.name == NULL)
(_event_data.dynamic_code_generated.name == nullptr)
? "unknown_code" : _event_data.dynamic_code_generated.name,
_event_data.dynamic_code_generated.code_begin,
_event_data.dynamic_code_generated.code_end);
if (_event_data.dynamic_code_generated.name != NULL) {
if (_event_data.dynamic_code_generated.name != nullptr) {
// release our copy
os::free((void *)_event_data.dynamic_code_generated.name);
}
@ -995,9 +995,9 @@ void JvmtiDeferredEvent::post() {
case TYPE_CLASS_UNLOAD: {
JvmtiExport::post_class_unload_internal(
// if strdup failed give the event a default name
(_event_data.class_unload.name == NULL)
(_event_data.class_unload.name == nullptr)
? "unknown_class" : _event_data.class_unload.name);
if (_event_data.class_unload.name != NULL) {
if (_event_data.class_unload.name != nullptr) {
// release our copy
os::free((void *)_event_data.class_unload.name);
}
@ -1023,7 +1023,7 @@ void JvmtiDeferredEvent::run_nmethod_entry_barriers() {
// Keep the nmethod for compiled_method_load from being unloaded.
void JvmtiDeferredEvent::oops_do(OopClosure* f, CodeBlobClosure* cf) {
if (cf != NULL && _type == TYPE_COMPILED_METHOD_LOAD) {
if (cf != nullptr && _type == TYPE_COMPILED_METHOD_LOAD) {
cf->do_code_blob(_event_data.compiled_method_load);
}
}
@ -1031,7 +1031,7 @@ void JvmtiDeferredEvent::oops_do(OopClosure* f, CodeBlobClosure* cf) {
// The GC calls this and marks the nmethods here on the stack so that
// they cannot be unloaded while in the queue.
void JvmtiDeferredEvent::nmethods_do(CodeBlobClosure* cf) {
if (cf != NULL && _type == TYPE_COMPILED_METHOD_LOAD) {
if (cf != nullptr && _type == TYPE_COMPILED_METHOD_LOAD) {
cf->do_code_blob(_event_data.compiled_method_load);
}
}
@ -1045,39 +1045,39 @@ bool JvmtiDeferredEventQueue::has_events() {
// The events on the queue should all be posted after the live phase so this is an
// ok check. Before the live phase, DynamicCodeGenerated events are posted directly.
// If we add other types of events to the deferred queue, this could get ugly.
return JvmtiEnvBase::get_phase() == JVMTI_PHASE_LIVE && _queue_head != NULL;
return JvmtiEnvBase::get_phase() == JVMTI_PHASE_LIVE && _queue_head != nullptr;
}
void JvmtiDeferredEventQueue::enqueue(JvmtiDeferredEvent event) {
// Events get added to the end of the queue (and are pulled off the front).
QueueNode* node = new QueueNode(event);
if (_queue_tail == NULL) {
if (_queue_tail == nullptr) {
_queue_tail = _queue_head = node;
} else {
assert(_queue_tail->next() == NULL, "Must be the last element in the list");
assert(_queue_tail->next() == nullptr, "Must be the last element in the list");
_queue_tail->set_next(node);
_queue_tail = node;
}
assert((_queue_head == NULL) == (_queue_tail == NULL),
assert((_queue_head == nullptr) == (_queue_tail == nullptr),
"Inconsistent queue markers");
}
JvmtiDeferredEvent JvmtiDeferredEventQueue::dequeue() {
assert(_queue_head != NULL, "Nothing to dequeue");
assert(_queue_head != nullptr, "Nothing to dequeue");
if (_queue_head == NULL) {
if (_queue_head == nullptr) {
// Just in case this happens in product; it shouldn't but let's not crash
return JvmtiDeferredEvent();
}
QueueNode* node = _queue_head;
_queue_head = _queue_head->next();
if (_queue_head == NULL) {
_queue_tail = NULL;
if (_queue_head == nullptr) {
_queue_tail = nullptr;
}
assert((_queue_head == NULL) == (_queue_tail == NULL),
assert((_queue_head == nullptr) == (_queue_tail == nullptr),
"Inconsistent queue markers");
JvmtiDeferredEvent event = node->event();
@ -1087,27 +1087,27 @@ JvmtiDeferredEvent JvmtiDeferredEventQueue::dequeue() {
void JvmtiDeferredEventQueue::post(JvmtiEnv* env) {
// Post events while nmethods are still in the queue and can't be unloaded.
while (_queue_head != NULL) {
while (_queue_head != nullptr) {
_queue_head->event().post_compiled_method_load_event(env);
dequeue();
}
}
void JvmtiDeferredEventQueue::run_nmethod_entry_barriers() {
for(QueueNode* node = _queue_head; node != NULL; node = node->next()) {
for(QueueNode* node = _queue_head; node != nullptr; node = node->next()) {
node->event().run_nmethod_entry_barriers();
}
}
void JvmtiDeferredEventQueue::oops_do(OopClosure* f, CodeBlobClosure* cf) {
for(QueueNode* node = _queue_head; node != NULL; node = node->next()) {
for(QueueNode* node = _queue_head; node != nullptr; node = node->next()) {
node->event().oops_do(f, cf);
}
}
void JvmtiDeferredEventQueue::nmethods_do(CodeBlobClosure* cf) {
for(QueueNode* node = _queue_head; node != NULL; node = node->next()) {
for(QueueNode* node = _queue_head; node != nullptr; node = node->next()) {
node->event().nmethods_do(cf);
}
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -52,7 +52,7 @@ class JvmtiBreakpoints;
//
// GrowableCache is a permanent CHeap growable array of <GrowableElement *>
//
// In addition, the GrowableCache maintains a NULL terminated cache array of type address
// In addition, the GrowableCache maintains a null terminated cache array of type address
// that's created from the element array using the function:
// address GrowableElement::getCacheValue().
//
@ -162,7 +162,7 @@ private:
OopHandle _class_holder; // keeps _method memory from being deallocated
public:
JvmtiBreakpoint() : _method(NULL), _bci(0) {}
JvmtiBreakpoint() : _method(nullptr), _bci(0) {}
JvmtiBreakpoint(Method* m_method, jlocation location);
virtual ~JvmtiBreakpoint();
bool equals(JvmtiBreakpoint& bp);
@ -248,7 +248,7 @@ private:
// Current breakpoints, lazily initialized by get_jvmti_breakpoints();
static JvmtiBreakpoints *_jvmti_breakpoints;
// NULL terminated cache of byte-code pointers corresponding to current breakpoints.
// null terminated cache of byte-code pointers corresponding to current breakpoints.
// Updated only at safepoints (with listener_fun) when the cache is moved.
// It exists only to make is_breakpoint fast.
static address *_breakpoint_list;
@ -289,7 +289,7 @@ public:
_breakpoints = &current_bps;
_bp = bp;
_operation = operation;
assert(bp != NULL, "bp != NULL");
assert(bp != nullptr, "bp != null");
}
VMOp_Type type() const { return VMOp_ChangeBreakpoints; }
@ -521,7 +521,7 @@ class JvmtiDeferredEventQueue : public CHeapObj<mtInternal> {
public:
QueueNode(const JvmtiDeferredEvent& event)
: _event(event), _next(NULL) {}
: _event(event), _next(nullptr) {}
JvmtiDeferredEvent& event() { return _event; }
QueueNode* next() const { return _next; }
@ -533,7 +533,7 @@ class JvmtiDeferredEventQueue : public CHeapObj<mtInternal> {
QueueNode* _queue_tail;
public:
JvmtiDeferredEventQueue() : _queue_head(NULL), _queue_tail(NULL) {}
JvmtiDeferredEventQueue() : _queue_head(nullptr), _queue_tail(nullptr) {}
bool has_events() NOT_JVMTI_RETURN_(false);
JvmtiDeferredEvent dequeue() NOT_JVMTI_RETURN_(JvmtiDeferredEvent());
@ -549,7 +549,7 @@ class JvmtiDeferredEventQueue : public CHeapObj<mtInternal> {
void oops_do(OopClosure* f, CodeBlobClosure* cf) NOT_JVMTI_RETURN;
};
// Utility macro that checks for NULL pointers:
#define NULL_CHECK(X, Y) if ((X) == NULL) { return (Y); }
// Utility macro that checks for null pointers:
#define NULL_CHECK(X, Y) if ((X) == nullptr) { return (Y); }
#endif // SHARE_PRIMS_JVMTIIMPL_HPP

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -31,7 +31,7 @@
#include "runtime/orderAccess.hpp"
#include "runtime/threads.hpp"
JvmtiRawMonitor::QNode::QNode(Thread* thread) : _next(NULL), _prev(NULL),
JvmtiRawMonitor::QNode::QNode(Thread* thread) : _next(nullptr), _prev(nullptr),
_event(thread->_ParkEvent),
_notified(0), _t_state(TS_RUN) {
}
@ -59,12 +59,12 @@ void JvmtiPendingMonitors::transition_raw_monitors() {
// class JvmtiRawMonitor
//
JvmtiRawMonitor::JvmtiRawMonitor(const char* name) : _owner(NULL),
JvmtiRawMonitor::JvmtiRawMonitor(const char* name) : _owner(nullptr),
_recursions(0),
_entry_list(NULL),
_wait_set(NULL),
_entry_list(nullptr),
_wait_set(nullptr),
_magic(JVMTI_RM_MAGIC),
_name(NULL) {
_name(nullptr) {
#ifdef ASSERT
_name = strcpy(NEW_C_HEAP_ARRAY(char, strlen(name) + 1, mtInternal), name);
#endif
@ -138,7 +138,7 @@ void JvmtiRawMonitor::simple_enter(Thread* self) {
node._next = _entry_list;
_entry_list = &node;
OrderAccess::fence();
if (_owner == NULL && Atomic::replace_if_null(&_owner, self)) {
if (_owner == nullptr && Atomic::replace_if_null(&_owner, self)) {
_entry_list = node._next;
RawMonitor_lock->unlock();
if (self->is_Java_thread()) {
@ -155,22 +155,22 @@ void JvmtiRawMonitor::simple_enter(Thread* self) {
void JvmtiRawMonitor::simple_exit(Thread* self) {
guarantee(_owner == self, "invariant");
Atomic::release_store(&_owner, (Thread*)NULL);
Atomic::release_store(&_owner, (Thread*)nullptr);
OrderAccess::fence();
if (self->is_Java_thread()) {
Continuation::unpin(JavaThread::cast(self));
}
if (_entry_list == NULL) {
if (_entry_list == nullptr) {
return;
}
RawMonitor_lock->lock_without_safepoint_check();
QNode* w = _entry_list;
if (w != NULL) {
if (w != nullptr) {
_entry_list = w->_next;
}
RawMonitor_lock->unlock();
if (w != NULL) {
if (w != nullptr) {
guarantee(w ->_t_state == QNode::TS_ENTER, "invariant");
// Once we set _t_state to TS_RUN the waiting thread can complete
// simple_enter and 'w' is pointing into random stack space. So we have
@ -205,12 +205,12 @@ inline void JvmtiRawMonitor::dequeue_waiter(QNode& node) {
if (node._t_state == QNode::TS_WAIT) {
// Simple O(n) unlink, but performance isn't critical here.
QNode* p;
QNode* q = NULL;
QNode* q = nullptr;
for (p = _wait_set; p != &node; p = p->_next) {
q = p;
}
guarantee(p == &node, "invariant");
if (q == NULL) {
if (q == nullptr) {
guarantee (p == _wait_set, "invariant");
_wait_set = p->_next;
} else {
@ -281,7 +281,7 @@ int JvmtiRawMonitor::simple_wait(Thread* self, jlong millis) {
void JvmtiRawMonitor::simple_notify(Thread* self, bool all) {
guarantee(_owner == self, "invariant");
if (_wait_set == NULL) {
if (_wait_set == nullptr) {
return;
}
@ -292,15 +292,15 @@ void JvmtiRawMonitor::simple_notify(Thread* self, bool all) {
// We use (B), which is crude and results in lots of futile
// context switching. In particular (B) induces lots of contention.
ParkEvent* ev = NULL; // consider using a small auto array ...
ParkEvent* ev = nullptr; // consider using a small auto array ...
RawMonitor_lock->lock_without_safepoint_check();
for (;;) {
QNode* w = _wait_set;
if (w == NULL) break;
if (w == nullptr) break;
_wait_set = w->_next;
if (ev != NULL) {
if (ev != nullptr) {
ev->unpark();
ev = NULL;
ev = nullptr;
}
ev = w->_event;
OrderAccess::loadstore();
@ -311,7 +311,7 @@ void JvmtiRawMonitor::simple_notify(Thread* self, bool all) {
}
}
RawMonitor_lock->unlock();
if (ev != NULL) {
if (ev != nullptr) {
ev->unpark();
}
return;
@ -351,7 +351,7 @@ void JvmtiRawMonitor::raw_enter(Thread* self) {
}
}
self->set_current_pending_raw_monitor(NULL);
self->set_current_pending_raw_monitor(nullptr);
guarantee(_owner == self, "invariant");
guarantee(_recursions == 0, "invariant");

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -64,12 +64,12 @@
#include "utilities/bitMap.inline.hpp"
#include "utilities/events.hpp"
Array<Method*>* VM_RedefineClasses::_old_methods = NULL;
Array<Method*>* VM_RedefineClasses::_new_methods = NULL;
Method** VM_RedefineClasses::_matching_old_methods = NULL;
Method** VM_RedefineClasses::_matching_new_methods = NULL;
Method** VM_RedefineClasses::_deleted_methods = NULL;
Method** VM_RedefineClasses::_added_methods = NULL;
Array<Method*>* VM_RedefineClasses::_old_methods = nullptr;
Array<Method*>* VM_RedefineClasses::_new_methods = nullptr;
Method** VM_RedefineClasses::_matching_old_methods = nullptr;
Method** VM_RedefineClasses::_matching_new_methods = nullptr;
Method** VM_RedefineClasses::_deleted_methods = nullptr;
Method** VM_RedefineClasses::_added_methods = nullptr;
int VM_RedefineClasses::_matching_methods_length = 0;
int VM_RedefineClasses::_deleted_methods_length = 0;
int VM_RedefineClasses::_added_methods_length = 0;
@ -86,7 +86,7 @@ VM_RedefineClasses::VM_RedefineClasses(jint class_count,
_class_load_kind = class_load_kind;
_any_class_has_resolved_methods = false;
_res = JVMTI_ERROR_NONE;
_the_class = NULL;
_the_class = nullptr;
_id = next_id();
}
@ -103,7 +103,7 @@ void VM_RedefineClasses::lock_classes() {
MonitorLocker ml(RedefineClasses_lock);
if (redef_classes == NULL) {
if (redef_classes == nullptr) {
redef_classes = new (mtClass) GrowableArray<Klass*>(1, mtClass);
state->set_classes_being_redefined(redef_classes);
}
@ -141,7 +141,7 @@ void VM_RedefineClasses::lock_classes() {
void VM_RedefineClasses::unlock_classes() {
JvmtiThreadState *state = JvmtiThreadState::state_for(JavaThread::current());
GrowableArray<Klass*>* redef_classes = state->get_classes_being_redefined();
assert(redef_classes != NULL, "_classes_being_redefined is not allocated");
assert(redef_classes != nullptr, "_classes_being_redefined is not allocated");
MonitorLocker ml(RedefineClasses_lock);
@ -170,13 +170,13 @@ bool VM_RedefineClasses::doit_prologue() {
_res = JVMTI_ERROR_NONE;
return false;
}
if (_class_defs == NULL) {
if (_class_defs == nullptr) {
_res = JVMTI_ERROR_NULL_POINTER;
return false;
}
for (int i = 0; i < _class_count; i++) {
if (_class_defs[i].klass == NULL) {
if (_class_defs[i].klass == nullptr) {
_res = JVMTI_ERROR_INVALID_CLASS;
return false;
}
@ -184,7 +184,7 @@ bool VM_RedefineClasses::doit_prologue() {
_res = JVMTI_ERROR_INVALID_CLASS_FORMAT;
return false;
}
if (_class_defs[i].class_bytes == NULL) {
if (_class_defs[i].class_bytes == nullptr) {
_res = JVMTI_ERROR_NULL_POINTER;
return false;
}
@ -211,14 +211,14 @@ bool VM_RedefineClasses::doit_prologue() {
if (_res != JVMTI_ERROR_NONE) {
// free any successfully created classes, since none are redefined
for (int i = 0; i < _class_count; i++) {
if (_scratch_classes[i] != NULL) {
if (_scratch_classes[i] != nullptr) {
ClassLoaderData* cld = _scratch_classes[i]->class_loader_data();
// Free the memory for this class at class unloading time. Not before
// because CMS might think this is still live.
InstanceKlass* ik = get_ik(_class_defs[i].klass);
if (ik->get_cached_class_file() == _scratch_classes[i]->get_cached_class_file()) {
// Don't double-free cached_class_file copied from the original class if error.
_scratch_classes[i]->set_cached_class_file(NULL);
_scratch_classes[i]->set_cached_class_file(nullptr);
}
cld->add_to_deallocate_list(InstanceKlass::cast(_scratch_classes[i]));
}
@ -311,7 +311,7 @@ void VM_RedefineClasses::doit_epilogue() {
os::free(_scratch_classes);
// Reset the_class to null for error printing.
_the_class = NULL;
_the_class = nullptr;
if (log_is_enabled(Info, redefine, class, timer)) {
// Used to have separate timers for "doit" and "all", but the timer
@ -335,7 +335,7 @@ bool VM_RedefineClasses::is_modifiable_class(oop klass_mirror) {
}
Klass* k = java_lang_Class::as_Klass(klass_mirror);
// classes for arrays cannot be redefined
if (k == NULL || !k->is_instance_klass()) {
if (k == nullptr || !k->is_instance_klass()) {
return false;
}
@ -479,7 +479,7 @@ void VM_RedefineClasses::append_entry(const constantPoolHandle& scratch_cp,
int new_name_and_type_ref_i = find_or_append_indirect_entry(scratch_cp, name_and_type_ref_i,
merge_cp_p, merge_cp_length_p);
const char *entry_name = NULL;
const char *entry_name = nullptr;
switch (scratch_cp->tag_at(scratch_i).value()) {
case JVM_CONSTANT_Fieldref:
entry_name = "Fieldref";
@ -720,7 +720,7 @@ int VM_RedefineClasses::find_or_append_operand(const constantPoolHandle& scratch
void VM_RedefineClasses::finalize_operands_merge(const constantPoolHandle& merge_cp, TRAPS) {
if (merge_cp->operands() == NULL) {
if (merge_cp->operands() == nullptr) {
return;
}
// Shrink the merge_cp operands
@ -738,7 +738,7 @@ void VM_RedefineClasses::finalize_operands_merge(const constantPoolHandle& merge
}
}
// Clean-up
_operands_index_map_p = NULL;
_operands_index_map_p = nullptr;
_operands_cur_length = 0;
_operands_index_map_count = 0;
} // end finalize_operands_merge()
@ -775,7 +775,7 @@ static jvmtiError check_attribute_arrays(const char* attr_name,
Symbol** the_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
Symbol** scr_syms = NEW_RESOURCE_ARRAY_RETURN_NULL(Symbol*, array_len);
if (the_syms == NULL || scr_syms == NULL) {
if (the_syms == nullptr || scr_syms == nullptr) {
return JVMTI_ERROR_OUT_OF_MEMORY;
}
@ -845,8 +845,8 @@ static jvmtiError check_record_attribute(InstanceKlass* the_class, InstanceKlass
// Get lists of record components.
Array<RecordComponent*>* the_record = the_class->record_components();
Array<RecordComponent*>* scr_record = scratch_class->record_components();
bool the_record_exists = the_record != NULL;
bool scr_record_exists = scr_record != NULL;
bool the_record_exists = the_record != nullptr;
bool scr_record_exists = scr_record != nullptr;
if (the_record_exists && scr_record_exists) {
int the_num_components = the_record->length();
@ -877,9 +877,9 @@ static jvmtiError check_record_attribute(InstanceKlass* the_class, InstanceKlass
int the_gen_sig = the_component->generic_signature_index();
int scr_gen_sig = scr_component->generic_signature_index();
const Symbol* const the_gen_sig_sym = (the_gen_sig == 0 ? NULL :
const Symbol* const the_gen_sig_sym = (the_gen_sig == 0 ? nullptr :
the_cp->symbol_at(the_component->generic_signature_index()));
const Symbol* const scr_gen_sig_sym = (scr_gen_sig == 0 ? NULL :
const Symbol* const scr_gen_sig_sym = (scr_gen_sig == 0 ? nullptr :
scr_cp->symbol_at(scr_component->generic_signature_index()));
if (the_gen_sig_sym != scr_gen_sig_sym) {
log_info(redefine, class, record)
@ -928,16 +928,16 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
// Check superclasses, or rather their names, since superclasses themselves can be
// requested to replace.
// Check for NULL superclass first since this might be java.lang.Object
// Check for null superclass first since this might be java.lang.Object
if (the_class->super() != scratch_class->super() &&
(the_class->super() == NULL || scratch_class->super() == NULL ||
(the_class->super() == nullptr || scratch_class->super() == nullptr ||
the_class->super()->name() !=
scratch_class->super()->name())) {
log_info(redefine, class, normalize)
("redefined class %s superclass change error: superclass changed from %s to %s.",
the_class->external_name(),
the_class->super() == NULL ? "NULL" : the_class->super()->external_name(),
scratch_class->super() == NULL ? "NULL" : scratch_class->super()->external_name());
the_class->super() == nullptr ? "null" : the_class->super()->external_name(),
scratch_class->super() == nullptr ? "null" : scratch_class->super()->external_name());
return JVMTI_ERROR_UNSUPPORTED_REDEFINITION_HIERARCHY_CHANGED;
}
@ -1150,7 +1150,7 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
u2 old_num = k_old_method->method_idnum();
if (new_num != old_num) {
Method* idnum_owner = scratch_class->method_with_idnum(old_num);
if (idnum_owner != NULL) {
if (idnum_owner != nullptr) {
// There is already a method assigned this idnum -- switch them
// Take current and original idnum from the new_method
idnum_owner->set_method_idnum(new_num);
@ -1190,7 +1190,7 @@ jvmtiError VM_RedefineClasses::compare_and_normalize_class_versions(
}
u2 new_num = k_new_method->method_idnum();
Method* idnum_owner = scratch_class->method_with_idnum(num);
if (idnum_owner != NULL) {
if (idnum_owner != nullptr) {
// There is already a method assigned this idnum -- switch them
// Take current and original idnum from the new_method
idnum_owner->set_method_idnum(new_num);
@ -1350,21 +1350,21 @@ jvmtiError VM_RedefineClasses::load_new_class_versions() {
// For consistency allocate memory using os::malloc wrapper.
_scratch_classes = (InstanceKlass**)
os::malloc(sizeof(InstanceKlass*) * _class_count, mtClass);
if (_scratch_classes == NULL) {
if (_scratch_classes == nullptr) {
return JVMTI_ERROR_OUT_OF_MEMORY;
}
// Zero initialize the _scratch_classes array.
for (int i = 0; i < _class_count; i++) {
_scratch_classes[i] = NULL;
_scratch_classes[i] = nullptr;
}
JavaThread* current = JavaThread::current();
ResourceMark rm(current);
JvmtiThreadState *state = JvmtiThreadState::state_for(current);
// state can only be NULL if the current thread is exiting which
// state can only be null if the current thread is exiting which
// should not happen since we're trying to do a RedefineClasses
guarantee(state != NULL, "exiting thread calling load_new_class_versions");
guarantee(state != nullptr, "exiting thread calling load_new_class_versions");
for (int i = 0; i < _class_count; i++) {
// Create HandleMark so that any handles created while loading new class
// versions are deleted. Constant pools are deallocated while merging
@ -1435,7 +1435,7 @@ jvmtiError VM_RedefineClasses::load_new_class_versions() {
if (HAS_PENDING_EXCEPTION) {
Symbol* ex_name = PENDING_EXCEPTION->klass()->name();
oop message = java_lang_Throwable::message(PENDING_EXCEPTION);
if (message != NULL) {
if (message != nullptr) {
char* ex_msg = java_lang_String::as_utf8_string(message);
log_info(redefine, class, load, exceptions)("link_class exception: '%s %s'",
ex_name->as_C_string(), ex_msg);
@ -1600,11 +1600,11 @@ bool VM_RedefineClasses::merge_constant_pools(const constantPoolHandle& old_cp,
const constantPoolHandle& scratch_cp, constantPoolHandle *merge_cp_p,
int *merge_cp_length_p, TRAPS) {
if (merge_cp_p == NULL) {
if (merge_cp_p == nullptr) {
assert(false, "caller must provide scratch constantPool");
return false; // robustness
}
if (merge_cp_length_p == NULL) {
if (merge_cp_length_p == nullptr) {
assert(false, "caller must provide scratch CP length");
return false; // robustness
}
@ -1784,10 +1784,10 @@ class MergeCPCleaner {
ConstantPool* _scratch_cp;
public:
MergeCPCleaner(ClassLoaderData* loader_data, ConstantPool* merge_cp) :
_loader_data(loader_data), _cp(merge_cp), _scratch_cp(NULL) {}
_loader_data(loader_data), _cp(merge_cp), _scratch_cp(nullptr) {}
~MergeCPCleaner() {
_loader_data->add_to_deallocate_list(_cp);
if (_scratch_cp != NULL) {
if (_scratch_cp != nullptr) {
_loader_data->add_to_deallocate_list(_scratch_cp);
}
}
@ -1850,7 +1850,7 @@ jvmtiError VM_RedefineClasses::merge_cp_and_rewrite(
merge_cp->set_pool_holder(scratch_class);
bool result = merge_constant_pools(old_cp, scratch_cp, &merge_cp,
&merge_cp_length, THREAD);
merge_cp->set_pool_holder(NULL);
merge_cp->set_pool_holder(nullptr);
if (!result) {
// The merge can fail due to memory allocation failure or due
@ -2062,7 +2062,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_nest_attributes(
// Rewrite constant pool references in the Record attribute.
bool VM_RedefineClasses::rewrite_cp_refs_in_record_attribute(InstanceKlass* scratch_class) {
Array<RecordComponent*>* components = scratch_class->record_components();
if (components != NULL) {
if (components != nullptr) {
for (int i = 0; i < components->length(); i++) {
RecordComponent* component = components->at(i);
u2 cp_index = component->name_index();
@ -2075,7 +2075,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_record_attribute(InstanceKlass* scra
}
AnnotationArray* annotations = component->annotations();
if (annotations != NULL && annotations->length() != 0) {
if (annotations != nullptr && annotations->length() != 0) {
int byte_i = 0; // byte index into annotations
if (!rewrite_cp_refs_in_annotations_typeArray(annotations, byte_i)) {
log_debug(redefine, class, annotation)("bad record_component_annotations at %d", i);
@ -2085,7 +2085,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_record_attribute(InstanceKlass* scra
}
AnnotationArray* type_annotations = component->type_annotations();
if (type_annotations != NULL && type_annotations->length() != 0) {
if (type_annotations != nullptr && type_annotations->length() != 0) {
int byte_i = 0; // byte index into annotations
if (!rewrite_cp_refs_in_annotations_typeArray(type_annotations, byte_i)) {
log_debug(redefine, class, annotation)("bad record_component_type_annotations at %d", i);
@ -2103,7 +2103,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_permitted_subclasses_attribute(
InstanceKlass* scratch_class) {
Array<u2>* permitted_subclasses = scratch_class->permitted_subclasses();
assert(permitted_subclasses != NULL, "unexpected null permitted_subclasses");
assert(permitted_subclasses != nullptr, "unexpected null permitted_subclasses");
for (int i = 0; i < permitted_subclasses->length(); i++) {
u2 cp_index = permitted_subclasses->at(i);
permitted_subclasses->at_put(i, find_new_index(cp_index));
@ -2116,7 +2116,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_methods(InstanceKlass* scratch_class
Array<Method*>* methods = scratch_class->methods();
if (methods == NULL || methods->length() == 0) {
if (methods == nullptr || methods->length() == 0) {
// no methods so nothing to do
return true;
}
@ -2214,7 +2214,7 @@ void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
// and put_native_u2().
Bytes::put_Java_u2(bcp, new_index);
Relocator rc(method, NULL /* no RelocatorListener needed */);
Relocator rc(method, nullptr /* no RelocatorListener needed */);
methodHandle m;
{
PauseNoSafepointVerifier pnsv(&nsv);
@ -2283,7 +2283,7 @@ void VM_RedefineClasses::rewrite_cp_refs_in_method(methodHandle method,
bool VM_RedefineClasses::rewrite_cp_refs_in_class_annotations(InstanceKlass* scratch_class) {
AnnotationArray* class_annotations = scratch_class->class_annotations();
if (class_annotations == NULL || class_annotations->length() == 0) {
if (class_annotations == nullptr || class_annotations->length() == 0) {
// no class_annotations so nothing to do
return true;
}
@ -2569,7 +2569,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
Array<AnnotationArray*>* fields_annotations = scratch_class->fields_annotations();
if (fields_annotations == NULL || fields_annotations->length() == 0) {
if (fields_annotations == nullptr || fields_annotations->length() == 0) {
// no fields_annotations so nothing to do
return true;
}
@ -2578,7 +2578,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_fields_annotations(
for (int i = 0; i < fields_annotations->length(); i++) {
AnnotationArray* field_annotations = fields_annotations->at(i);
if (field_annotations == NULL || field_annotations->length() == 0) {
if (field_annotations == nullptr || field_annotations->length() == 0) {
// this field does not have any annotations so skip it
continue;
}
@ -2603,7 +2603,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_methods_annotations(
Method* m = scratch_class->methods()->at(i);
AnnotationArray* method_annotations = m->constMethod()->method_annotations();
if (method_annotations == NULL || method_annotations->length() == 0) {
if (method_annotations == nullptr || method_annotations->length() == 0) {
// this method does not have any annotations so skip it
continue;
}
@ -2639,7 +2639,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_methods_parameter_annotations(
for (int i = 0; i < scratch_class->methods()->length(); i++) {
Method* m = scratch_class->methods()->at(i);
AnnotationArray* method_parameter_annotations = m->constMethod()->parameter_annotations();
if (method_parameter_annotations == NULL
if (method_parameter_annotations == nullptr
|| method_parameter_annotations->length() == 0) {
// this method does not have any parameter annotations so skip it
continue;
@ -2687,7 +2687,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_methods_default_annotations(
for (int i = 0; i < scratch_class->methods()->length(); i++) {
Method* m = scratch_class->methods()->at(i);
AnnotationArray* method_default_annotations = m->constMethod()->default_annotations();
if (method_default_annotations == NULL
if (method_default_annotations == nullptr
|| method_default_annotations->length() == 0) {
// this method does not have any default annotations so skip it
continue;
@ -2712,7 +2712,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
InstanceKlass* scratch_class) {
AnnotationArray* class_type_annotations = scratch_class->class_type_annotations();
if (class_type_annotations == NULL || class_type_annotations->length() == 0) {
if (class_type_annotations == nullptr || class_type_annotations->length() == 0) {
// no class_type_annotations so nothing to do
return true;
}
@ -2729,7 +2729,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_class_type_annotations(
bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(InstanceKlass* scratch_class) {
Array<AnnotationArray*>* fields_type_annotations = scratch_class->fields_type_annotations();
if (fields_type_annotations == NULL || fields_type_annotations->length() == 0) {
if (fields_type_annotations == nullptr || fields_type_annotations->length() == 0) {
// no fields_type_annotations so nothing to do
return true;
}
@ -2738,7 +2738,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_fields_type_annotations(InstanceKlas
for (int i = 0; i < fields_type_annotations->length(); i++) {
AnnotationArray* field_type_annotations = fields_type_annotations->at(i);
if (field_type_annotations == NULL || field_type_annotations->length() == 0) {
if (field_type_annotations == nullptr || field_type_annotations->length() == 0) {
// this field does not have any annotations so skip it
continue;
}
@ -2764,7 +2764,7 @@ bool VM_RedefineClasses::rewrite_cp_refs_in_methods_type_annotations(
Method* m = scratch_class->methods()->at(i);
AnnotationArray* method_type_annotations = m->constMethod()->type_annotations();
if (method_type_annotations == NULL || method_type_annotations->length() == 0) {
if (method_type_annotations == nullptr || method_type_annotations->length() == 0) {
// this method does not have any annotations so skip it
continue;
}
@ -3736,7 +3736,7 @@ void VM_RedefineClasses::AdjustAndCleanMetadata::do_klass(Klass* k) {
Array<Method*>* methods = ik->methods();
int num_methods = methods->length();
for (int index = 0; index < num_methods; ++index) {
if (methods->at(index)->method_data() != NULL) {
if (methods->at(index)->method_data() != nullptr) {
methods->at(index)->method_data()->clean_weak_method_links();
}
}
@ -3775,16 +3775,16 @@ void VM_RedefineClasses::AdjustAndCleanMetadata::do_klass(Klass* k) {
// this klass' constant pool cache may need adjustment
ConstantPool* other_cp = ik->constants();
cp_cache = other_cp->cache();
if (cp_cache != NULL) {
if (cp_cache != nullptr) {
cp_cache->adjust_method_entries(&trace_name_printed);
}
// the previous versions' constant pool caches may need adjustment
for (InstanceKlass* pv_node = ik->previous_versions();
pv_node != NULL;
pv_node != nullptr;
pv_node = pv_node->previous_versions()) {
cp_cache = pv_node->constants()->cache();
if (cp_cache != NULL) {
if (cp_cache != nullptr) {
cp_cache->adjust_method_entries(&trace_name_printed);
}
}
@ -3795,7 +3795,7 @@ void VM_RedefineClasses::update_jmethod_ids() {
for (int j = 0; j < _matching_methods_length; ++j) {
Method* old_method = _matching_old_methods[j];
jmethodID jmid = old_method->find_jmethod_id_or_null();
if (jmid != NULL) {
if (jmid != nullptr) {
// There is a jmethodID, change it to point to the new method
Method* new_method = _matching_new_methods[j];
Method::change_method_associated_with_jmethod_id(jmid, new_method);
@ -3978,9 +3978,9 @@ class TransferNativeFunctionRegistration {
Method* search_prefix_name_space(int depth, char* name_str, size_t name_len,
Symbol* signature) {
TempNewSymbol name_symbol = SymbolTable::probe(name_str, (int)name_len);
if (name_symbol != NULL) {
if (name_symbol != nullptr) {
Method* method = the_class->lookup_method(name_symbol, signature);
if (method != NULL) {
if (method != nullptr) {
// Even if prefixed, intermediate methods must exist.
if (method->is_native()) {
// Wahoo, we found a (possibly prefixed) version of the method, return it.
@ -3989,7 +3989,7 @@ class TransferNativeFunctionRegistration {
if (depth < prefix_count) {
// Try applying further prefixes (other than this one).
method = search_prefix_name_space(depth+1, name_str, name_len, signature);
if (method != NULL) {
if (method != nullptr) {
return method; // found
}
@ -4003,7 +4003,7 @@ class TransferNativeFunctionRegistration {
strcat(trial_name_str, name_str);
method = search_prefix_name_space(depth+1, trial_name_str, trial_len,
signature);
if (method != NULL) {
if (method != nullptr) {
// If found along this branch, it was prefixed, mark as such
method->set_is_prefixed_native();
return method; // found
@ -4011,7 +4011,7 @@ class TransferNativeFunctionRegistration {
}
}
}
return NULL; // This whole branch bore nothing
return nullptr; // This whole branch bore nothing
}
// Return the method name with old prefixes stripped away.
@ -4056,7 +4056,7 @@ class TransferNativeFunctionRegistration {
if (old_method->is_native() && old_method->has_native_function()) {
Method* new_method = strip_and_search_for_new_native(old_method);
if (new_method != NULL) {
if (new_method != nullptr) {
// Actually set the native function in the new method.
// Redefine does not send events (except CFLH), certainly not this
// behind the scenes re-registration.
@ -4260,7 +4260,7 @@ void VM_RedefineClasses::redefine_single_class(Thread* current, jclass the_jclas
PreviousVersionWalker pvw(ik);
do {
ik = pvw.next_previous_version();
if (ik != NULL) {
if (ik != nullptr) {
// attach previous version of klass to the new constant pool
ik->set_constants(scratch_class->constants());
@ -4273,7 +4273,7 @@ void VM_RedefineClasses::redefine_single_class(Thread* current, jclass the_jclas
method->set_constants(scratch_class->constants());
}
}
} while (ik != NULL);
} while (ik != nullptr);
}
#endif
@ -4343,9 +4343,9 @@ void VM_RedefineClasses::redefine_single_class(Thread* current, jclass the_jclas
the_class->set_cached_class_file(scratch_class->get_cached_class_file());
}
// NULL out in scratch class to not delete twice. The class to be redefined
// null out in scratch class to not delete twice. The class to be redefined
// always owns these bytes.
scratch_class->set_cached_class_file(NULL);
scratch_class->set_cached_class_file(nullptr);
// Replace inner_classes
Array<u2>* old_inner_classes = the_class->inner_classes();
@ -4367,7 +4367,7 @@ void VM_RedefineClasses::redefine_single_class(Thread* current, jclass the_jclas
// Copy the "source debug extension" attribute from new class version
the_class->set_source_debug_extension(
scratch_class->source_debug_extension(),
scratch_class->source_debug_extension() == NULL ? 0 :
scratch_class->source_debug_extension() == nullptr ? 0 :
(int)strlen(scratch_class->source_debug_extension()));
// Use of javac -g could be different in the old and the new
@ -4411,7 +4411,7 @@ void VM_RedefineClasses::redefine_single_class(Thread* current, jclass the_jclas
_timer_rsc_phase2.start();
}
if (the_class->oop_map_cache() != NULL) {
if (the_class->oop_map_cache() != nullptr) {
// Flush references to any obsolete methods from the oop map cache
// so that obsolete methods are not pinned.
the_class->oop_map_cache()->flush_obsolete_entries();
@ -4498,8 +4498,8 @@ void VM_RedefineClasses::CheckClass::do_klass(Klass* k) {
}
// the constant pool cache should never contain non-deleted old or obsolete methods
if (ik->constants() != NULL &&
ik->constants()->cache() != NULL &&
if (ik->constants() != nullptr &&
ik->constants()->cache() != nullptr &&
!ik->constants()->cache()->check_no_old_or_obsolete_entries()) {
if (log_is_enabled(Trace, redefine, class, obsolete, metadata)) {
log_trace(redefine, class, obsolete, metadata)
@ -4595,7 +4595,7 @@ void VM_RedefineClasses::dump_methods() {
void VM_RedefineClasses::print_on_error(outputStream* st) const {
VM_Operation::print_on_error(st);
if (_the_class != NULL) {
if (_the_class != nullptr) {
ResourceMark rm;
st->print_cr(", redefining class %s", _the_class->external_name());
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -540,10 +540,10 @@ class VM_RedefineClasses: public VM_Operation {
static bool is_modifiable_class(oop klass_mirror);
static jint get_cached_class_file_len(JvmtiCachedClassFileData *cache) {
return cache == NULL ? 0 : cache->length;
return cache == nullptr ? 0 : cache->length;
}
static unsigned char * get_cached_class_file_bytes(JvmtiCachedClassFileData *cache) {
return cache == NULL ? NULL : cache->data;
return cache == nullptr ? nullptr : cache->data;
}
// Error printing

View File

@ -80,7 +80,7 @@ JvmtiTagMap::JvmtiTagMap(JvmtiEnv* env) :
_posting_events(false) {
assert(JvmtiThreadState_lock->is_locked(), "sanity check");
assert(((JvmtiEnvBase *)env)->tag_map() == NULL, "tag map already exists for environment");
assert(((JvmtiEnvBase *)env)->tag_map() == nullptr, "tag map already exists for environment");
_hashmap = new JvmtiTagMapTable();
@ -93,11 +93,11 @@ JvmtiTagMap::~JvmtiTagMap() {
// no lock acquired as we assume the enclosing environment is
// also being destroyed.
((JvmtiEnvBase *)_env)->set_tag_map(NULL);
((JvmtiEnvBase *)_env)->set_tag_map(nullptr);
// finally destroy the hashmap
delete _hashmap;
_hashmap = NULL;
_hashmap = nullptr;
}
// Called by env_dispose() to reclaim memory before deallocation.
@ -112,10 +112,10 @@ void JvmtiTagMap::clear() {
// doesn't exist then it is created.
JvmtiTagMap* JvmtiTagMap::tag_map_for(JvmtiEnv* env) {
JvmtiTagMap* tag_map = ((JvmtiEnvBase*)env)->tag_map_acquire();
if (tag_map == NULL) {
if (tag_map == nullptr) {
MutexLocker mu(JvmtiThreadState_lock);
tag_map = ((JvmtiEnvBase*)env)->tag_map();
if (tag_map == NULL) {
if (tag_map == nullptr) {
tag_map = new JvmtiTagMap(env);
}
} else {
@ -143,7 +143,7 @@ void JvmtiTagMap::check_hashmap(GrowableArray<jlong>* objects) {
if (is_empty()) { return; }
if (_needs_cleaning &&
objects != NULL &&
objects != nullptr &&
env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
remove_dead_entries_locked(objects);
}
@ -156,9 +156,9 @@ void JvmtiTagMap::check_hashmaps_for_heapwalk(GrowableArray<jlong>* objects) {
// Verify that the tag map tables are valid and unconditionally post events
// that are expected to be posted before gc_notification.
JvmtiEnvIterator it;
for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
JvmtiTagMap* tag_map = env->tag_map_acquire();
if (tag_map != NULL) {
if (tag_map != nullptr) {
// The ZDriver may be walking the hashmaps concurrently so this lock is needed.
MutexLocker ml(tag_map->lock(), Mutex::_no_safepoint_check_flag);
tag_map->check_hashmap(objects);
@ -340,7 +340,7 @@ void JvmtiTagMap::set_tag(jobject object, jlong tag) {
// SetTag should not post events because the JavaThread has to
// transition to native for the callback and this cannot stop for
// safepoints with the hashmap lock held.
check_hashmap(NULL); /* don't collect dead objects */
check_hashmap(nullptr); /* don't collect dead objects */
// resolve the object
oop o = JNIHandles::resolve_non_null(object);
@ -375,7 +375,7 @@ jlong JvmtiTagMap::get_tag(jobject object) {
// GetTag should not post events because the JavaThread has to
// transition to native for the callback and this cannot stop for
// safepoints with the hashmap lock held.
check_hashmap(NULL); /* don't collect dead objects */
check_hashmap(nullptr); /* don't collect dead objects */
// resolve the object
oop o = JNIHandles::resolve_non_null(object);
@ -535,7 +535,7 @@ JvmtiCachedClassFieldMap::JvmtiCachedClassFieldMap(ClassFieldMap* field_map) {
}
JvmtiCachedClassFieldMap::~JvmtiCachedClassFieldMap() {
if (_field_map != NULL) {
if (_field_map != nullptr) {
delete _field_map;
}
}
@ -563,7 +563,7 @@ bool ClassFieldMapCacheMark::_is_active;
// record that the given InstanceKlass is caching a field map
void JvmtiCachedClassFieldMap::add_to_class_list(InstanceKlass* ik) {
if (_class_list == NULL) {
if (_class_list == nullptr) {
_class_list = new (mtServiceability)
GrowableArray<InstanceKlass*>(initial_class_count, mtServiceability);
}
@ -581,8 +581,8 @@ ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
// return cached map if possible
JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
if (cached_map != NULL) {
assert(cached_map->field_map() != NULL, "missing field list");
if (cached_map != nullptr) {
assert(cached_map->field_map() != nullptr, "missing field list");
return cached_map->field_map();
} else {
ClassFieldMap* field_map = ClassFieldMap::create_map_of_instance_fields(obj);
@ -596,22 +596,22 @@ ClassFieldMap* JvmtiCachedClassFieldMap::get_map_of_instance_fields(oop obj) {
// remove the fields maps cached from all instanceKlasses
void JvmtiCachedClassFieldMap::clear_cache() {
assert(Thread::current()->is_VM_thread(), "must be VMThread");
if (_class_list != NULL) {
if (_class_list != nullptr) {
for (int i = 0; i < _class_list->length(); i++) {
InstanceKlass* ik = _class_list->at(i);
JvmtiCachedClassFieldMap* cached_map = ik->jvmti_cached_class_field_map();
assert(cached_map != NULL, "should not be NULL");
ik->set_jvmti_cached_class_field_map(NULL);
assert(cached_map != nullptr, "should not be null");
ik->set_jvmti_cached_class_field_map(nullptr);
delete cached_map; // deletes the encapsulated field map
}
delete _class_list;
_class_list = NULL;
_class_list = nullptr;
}
}
// returns the number of ClassFieldMap cached by instanceKlasses
int JvmtiCachedClassFieldMap::cached_field_map_count() {
return (_class_list == NULL) ? 0 : _class_list->length();
return (_class_list == nullptr) ? 0 : _class_list->length();
}
// helper function to indicate if an object is filtered by its tag or class tag
@ -638,7 +638,7 @@ static inline bool is_filtered_by_heap_filter(jlong obj_tag,
// helper function to indicate if an object is filtered by a klass filter
static inline bool is_filtered_by_klass_filter(oop obj, Klass* klass_filter) {
if (klass_filter != NULL) {
if (klass_filter != nullptr) {
if (obj->klass() != klass_filter) {
return true;
}
@ -679,7 +679,7 @@ static jint invoke_string_value_callback(jvmtiStringPrimitiveValueCallback cb,
// JDK-6584008: the value field may be null if a String instance is
// partially constructed.
if (s_value == NULL) {
if (s_value == nullptr) {
return 0;
}
// get the string value and length
@ -942,12 +942,12 @@ void IterateOverHeapObjectClosure::do_object(oop o) {
if (is_iteration_aborted()) return;
// instanceof check when filtering by klass
if (klass() != NULL && !o->is_a(klass())) {
if (klass() != nullptr && !o->is_a(klass())) {
return;
}
// skip if object is a dormant shared object whose mirror hasn't been loaded
if (o != NULL && o->klass()->java_mirror() == NULL) {
if (o != nullptr && o->klass()->java_mirror() == nullptr) {
log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(o),
o->klass()->external_name());
return;
@ -1033,7 +1033,7 @@ void IterateThroughHeapObjectClosure::do_object(oop obj) {
if (is_filtered_by_klass_filter(obj, klass())) return;
// skip if object is a dormant shared object whose mirror hasn't been loaded
if (obj != NULL && obj->klass()->java_mirror() == NULL) {
if (obj != nullptr && obj->klass()->java_mirror() == nullptr) {
log_debug(cds, heap)("skipped dormant archived object " INTPTR_FORMAT " (%s)", p2i(obj),
obj->klass()->external_name());
return;
@ -1052,7 +1052,7 @@ void IterateThroughHeapObjectClosure::do_object(oop obj) {
int len = is_array ? arrayOop(obj)->length() : -1;
// invoke the object callback (if callback is provided)
if (callbacks()->heap_iteration_callback != NULL) {
if (callbacks()->heap_iteration_callback != nullptr) {
jvmtiHeapIterationCallback cb = callbacks()->heap_iteration_callback;
jint res = (*cb)(wrapper.klass_tag(),
wrapper.obj_size(),
@ -1063,7 +1063,7 @@ void IterateThroughHeapObjectClosure::do_object(oop obj) {
}
// for objects and classes we report primitive fields if callback provided
if (callbacks()->primitive_field_callback != NULL && obj->is_instance()) {
if (callbacks()->primitive_field_callback != nullptr && obj->is_instance()) {
jint res;
jvmtiPrimitiveFieldCallback cb = callbacks()->primitive_field_callback;
if (obj->klass() == vmClasses::Class_klass()) {
@ -1082,7 +1082,7 @@ void IterateThroughHeapObjectClosure::do_object(oop obj) {
// string callback
if (!is_array &&
callbacks()->string_primitive_value_callback != NULL &&
callbacks()->string_primitive_value_callback != nullptr &&
obj->klass() == vmClasses::String_klass()) {
jint res = invoke_string_value_callback(
callbacks()->string_primitive_value_callback,
@ -1094,7 +1094,7 @@ void IterateThroughHeapObjectClosure::do_object(oop obj) {
// array callback
if (is_array &&
callbacks()->array_primitive_value_callback != NULL &&
callbacks()->array_primitive_value_callback != nullptr &&
obj->is_typeArray()) {
jint res = invoke_array_primitive_value_callback(
callbacks()->array_primitive_value_callback,
@ -1166,10 +1166,10 @@ void JvmtiTagMap::remove_dead_entries_locked(GrowableArray<jlong>* objects) {
if (_needs_cleaning) {
// Recheck whether to post object free events under the lock.
if (!env()->is_enabled(JVMTI_EVENT_OBJECT_FREE)) {
objects = NULL;
objects = nullptr;
}
log_info(jvmti, table)("TagMap table needs cleaning%s",
((objects != NULL) ? " and posting" : ""));
((objects != nullptr) ? " and posting" : ""));
hashmap()->remove_dead_entries(objects);
_needs_cleaning = false;
}
@ -1182,7 +1182,7 @@ void JvmtiTagMap::remove_dead_entries(GrowableArray<jlong>* objects) {
void JvmtiTagMap::post_dead_objects(GrowableArray<jlong>* const objects) {
assert(Thread::current()->is_Java_thread(), "Must post from JavaThread");
if (objects != NULL && objects->length() > 0) {
if (objects != nullptr && objects->length() > 0) {
JvmtiExport::post_object_free(env(), objects);
log_info(jvmti, table)("%d free object posted", objects->length());
}
@ -1220,7 +1220,7 @@ void JvmtiTagMap::flush_object_free_events() {
ml.notify_all();
}
} else {
remove_dead_entries(NULL);
remove_dead_entries(nullptr);
}
}
@ -1266,12 +1266,12 @@ class TagObjectCollector : public JvmtiTagMapKeyClosure {
// SATB marking similar to other j.l.ref.Reference referents. This is
// achieved by using a phantom load in the object() accessor.
oop o = key.object();
if (o == NULL) {
if (o == nullptr) {
_some_dead_found = true;
// skip this whole entry
return true;
}
assert(o != NULL && Universe::heap()->is_in(o), "sanity check");
assert(o != nullptr && Universe::heap()->is_in(o), "sanity check");
jobject ref = JNIHandles::make_local(_thread, o);
_object_results->append(ref);
_tag_results->append(value);
@ -1287,9 +1287,9 @@ class TagObjectCollector : public JvmtiTagMapKeyClosure {
int count = _object_results->length();
assert(count >= 0, "sanity check");
// if object_result_ptr is not NULL then allocate the result and copy
// if object_result_ptr is not null then allocate the result and copy
// in the object references.
if (object_result_ptr != NULL) {
if (object_result_ptr != nullptr) {
error = _env->Allocate(count * sizeof(jobject), (unsigned char**)object_result_ptr);
if (error != JVMTI_ERROR_NONE) {
return error;
@ -1299,12 +1299,12 @@ class TagObjectCollector : public JvmtiTagMapKeyClosure {
}
}
// if tag_result_ptr is not NULL then allocate the result and copy
// if tag_result_ptr is not null then allocate the result and copy
// in the tag values.
if (tag_result_ptr != NULL) {
if (tag_result_ptr != nullptr) {
error = _env->Allocate(count * sizeof(jlong), (unsigned char**)tag_result_ptr);
if (error != JVMTI_ERROR_NONE) {
if (object_result_ptr != NULL) {
if (object_result_ptr != nullptr) {
_env->Deallocate((unsigned char*)object_result_ptr);
}
return error;
@ -1384,7 +1384,7 @@ class BasicHeapWalkContext: public HeapWalkContext {
_heap_root_callback(heap_root_callback),
_stack_ref_callback(stack_ref_callback),
_object_ref_callback(object_ref_callback),
_last_referrer(NULL),
_last_referrer(nullptr),
_last_referrer_tag(0) {
}
@ -1587,7 +1587,7 @@ void CallbackInvoker::initialize_for_advanced_heap_walk(JvmtiTagMap* tag_map,
inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind root_kind, oop obj) {
// if we heap roots should be reported
jvmtiHeapRootCallback cb = basic_context()->heap_root_callback();
if (cb == NULL) {
if (cb == nullptr) {
return check_for_visit(obj);
}
@ -1599,7 +1599,7 @@ inline bool CallbackInvoker::invoke_basic_heap_root_callback(jvmtiHeapRootKind r
(void*)user_data());
// push root to visit stack when following references
if (control == JVMTI_ITERATION_CONTINUE &&
basic_context()->object_ref_callback() != NULL) {
basic_context()->object_ref_callback() != nullptr) {
visit_stack()->push(obj);
}
return control != JVMTI_ITERATION_ABORT;
@ -1614,7 +1614,7 @@ inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind r
oop obj) {
// if we stack refs should be reported
jvmtiStackReferenceCallback cb = basic_context()->stack_ref_callback();
if (cb == NULL) {
if (cb == nullptr) {
return check_for_visit(obj);
}
@ -1630,7 +1630,7 @@ inline bool CallbackInvoker::invoke_basic_stack_ref_callback(jvmtiHeapRootKind r
(void*)user_data());
// push root to visit stack when following references
if (control == JVMTI_ITERATION_CONTINUE &&
basic_context()->object_ref_callback() != NULL) {
basic_context()->object_ref_callback() != nullptr) {
visit_stack()->push(obj);
}
return control != JVMTI_ITERATION_ABORT;
@ -1687,7 +1687,7 @@ inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferen
// check that callback is provided
jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
if (cb == NULL) {
if (cb == nullptr) {
return check_for_visit(obj);
}
@ -1711,12 +1711,12 @@ inline bool CallbackInvoker::invoke_advanced_heap_root_callback(jvmtiHeapReferen
// invoke the callback
jint res = (*cb)(ref_kind,
NULL, // referrer info
nullptr, // referrer info
wrapper.klass_tag(),
0, // referrer_class_tag is 0 for heap root
wrapper.obj_size(),
wrapper.obj_tag_p(),
NULL, // referrer_tag_p
nullptr, // referrer_tag_p
len,
(void*)user_data());
if (res & JVMTI_VISIT_ABORT) {
@ -1741,7 +1741,7 @@ inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferen
// check that callback is provider
jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
if (cb == NULL) {
if (cb == nullptr) {
return check_for_visit(obj);
}
@ -1779,7 +1779,7 @@ inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferen
0, // referrer_class_tag is 0 for heap root (stack)
wrapper.obj_size(),
wrapper.obj_tag_p(),
NULL, // referrer_tag is 0 for root
nullptr, // referrer_tag is 0 for root
len,
(void*)user_data());
@ -1793,7 +1793,7 @@ inline bool CallbackInvoker::invoke_advanced_stack_ref_callback(jvmtiHeapReferen
}
// This mask is used to pass reference_info to a jvmtiHeapReferenceCallback
// only for ref_kinds defined by the JVM TI spec. Otherwise, NULL is passed.
// only for ref_kinds defined by the JVM TI spec. Otherwise, null is passed.
#define REF_INFO_MASK ((1 << JVMTI_HEAP_REFERENCE_FIELD) \
| (1 << JVMTI_HEAP_REFERENCE_STATIC_FIELD) \
| (1 << JVMTI_HEAP_REFERENCE_ARRAY_ELEMENT) \
@ -1814,7 +1814,7 @@ inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeap
// check that callback is provider
jvmtiHeapReferenceCallback cb = context->heap_reference_callback();
if (cb == NULL) {
if (cb == nullptr) {
return check_for_visit(obj);
}
@ -1841,7 +1841,7 @@ inline bool CallbackInvoker::invoke_advanced_object_reference_callback(jvmtiHeap
// invoke the callback
int res = (*cb)(ref_kind,
(REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : NULL,
(REF_INFO_MASK & (1 << ref_kind)) ? &reference_info : nullptr,
wrapper.klass_tag(),
wrapper.referrer_klass_tag(),
wrapper.obj_size(),
@ -1880,7 +1880,7 @@ inline bool CallbackInvoker::report_primitive_array_values(oop obj) {
assert(obj->is_typeArray(), "not a primitive array");
AdvancedHeapWalkContext* context = advanced_context();
assert(context->array_primitive_value_callback() != NULL, "no callback");
assert(context->array_primitive_value_callback() != nullptr, "no callback");
// apply class filter
if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
@ -1909,7 +1909,7 @@ inline bool CallbackInvoker::report_string_value(oop str) {
assert(str->klass() == vmClasses::String_klass(), "not a string");
AdvancedHeapWalkContext* context = advanced_context();
assert(context->string_primitive_value_callback() != NULL, "no callback");
assert(context->string_primitive_value_callback() != nullptr, "no callback");
// apply class filter
if (is_filtered_by_klass_filter(str, context->klass_filter())) {
@ -1944,7 +1944,7 @@ inline bool CallbackInvoker::report_primitive_field(jvmtiHeapReferenceKind ref_k
static jvmtiHeapReferenceInfo reference_info = { 0 };
AdvancedHeapWalkContext* context = advanced_context();
assert(context->primitive_field_callback() != NULL, "no callback");
assert(context->primitive_field_callback() != nullptr, "no callback");
// apply class filter
if (is_filtered_by_klass_filter(obj, context->klass_filter())) {
@ -2171,7 +2171,7 @@ class SimpleRootsClosure : public OopClosure {
oop o = NativeAccess<AS_NO_KEEPALIVE>::oop_load(obj_p);
// ignore null
if (o == NULL) {
if (o == nullptr) {
return;
}
@ -2215,7 +2215,7 @@ class JNILocalRootsClosure : public OopClosure {
oop o = *obj_p;
// ignore null
if (o == NULL) {
if (o == nullptr) {
return;
}
@ -2321,7 +2321,7 @@ VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
_is_advanced_heap_walk = false;
_tag_map = tag_map;
_initial_object = initial_object;
_following_object_refs = (callbacks.object_ref_callback() != NULL);
_following_object_refs = (callbacks.object_ref_callback() != nullptr);
_reporting_primitive_fields = false;
_reporting_primitive_array_values = false;
_reporting_string_values = false;
@ -2340,9 +2340,9 @@ VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
_tag_map = tag_map;
_initial_object = initial_object;
_following_object_refs = true;
_reporting_primitive_fields = (callbacks.primitive_field_callback() != NULL);;
_reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != NULL);;
_reporting_string_values = (callbacks.string_primitive_value_callback() != NULL);;
_reporting_primitive_fields = (callbacks.primitive_field_callback() != nullptr);;
_reporting_primitive_array_values = (callbacks.array_primitive_value_callback() != nullptr);;
_reporting_string_values = (callbacks.string_primitive_value_callback() != nullptr);;
_visit_stack = create_visit_stack();
_dead_objects = objects;
CallbackInvoker::initialize_for_advanced_heap_walk(tag_map, _visit_stack, user_data, callbacks, &_bitset);
@ -2350,9 +2350,9 @@ VM_HeapWalkOperation::VM_HeapWalkOperation(JvmtiTagMap* tag_map,
VM_HeapWalkOperation::~VM_HeapWalkOperation() {
if (_following_object_refs) {
assert(_visit_stack != NULL, "checking");
assert(_visit_stack != nullptr, "checking");
delete _visit_stack;
_visit_stack = NULL;
_visit_stack = nullptr;
}
}
@ -2371,7 +2371,7 @@ inline bool VM_HeapWalkOperation::iterate_over_array(oop o) {
// non-null element
for (int index=0; index<array->length(); index++) {
oop elem = array->obj_at(index);
if (elem == NULL) {
if (elem == nullptr) {
continue;
}
@ -2436,7 +2436,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
// super (only if something more interesting than java.lang.Object)
InstanceKlass* java_super = ik->java_super();
if (java_super != NULL && java_super != vmClasses::Object_klass()) {
if (java_super != nullptr && java_super != vmClasses::Object_klass()) {
oop super = java_super->java_mirror();
if (!CallbackInvoker::report_superclass_reference(mirror, super)) {
return false;
@ -2445,7 +2445,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
// class loader
oop cl = ik->class_loader();
if (cl != NULL) {
if (cl != nullptr) {
if (!CallbackInvoker::report_class_loader_reference(mirror, cl)) {
return false;
}
@ -2453,7 +2453,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
// protection domain
oop pd = ik->protection_domain();
if (pd != NULL) {
if (pd != nullptr) {
if (!CallbackInvoker::report_protection_domain_reference(mirror, pd)) {
return false;
}
@ -2461,7 +2461,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
// signers
oop signers = ik->signers();
if (signers != NULL) {
if (signers != nullptr) {
if (!CallbackInvoker::report_signers_reference(mirror, signers)) {
return false;
}
@ -2477,7 +2477,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
if (tag.is_string()) {
entry = pool->resolved_string_at(i);
// If the entry is non-null it is resolved.
if (entry == NULL) {
if (entry == nullptr) {
continue;
}
} else if (tag.is_klass()) {
@ -2488,7 +2488,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
assert(tag.is_unresolved_klass(), "must be");
constantPoolHandle cp(Thread::current(), pool);
Klass* klass = ConstantPool::klass_at_if_loaded(cp, i);
if (klass == NULL) {
if (klass == nullptr) {
continue;
}
entry = klass->java_mirror();
@ -2506,7 +2506,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
Array<InstanceKlass*>* interfaces = ik->local_interfaces();
for (i = 0; i < interfaces->length(); i++) {
oop interf = interfaces->at(i)->java_mirror();
if (interf == NULL) {
if (interf == nullptr) {
continue;
}
if (!CallbackInvoker::report_interface_reference(mirror, interf)) {
@ -2523,7 +2523,7 @@ inline bool VM_HeapWalkOperation::iterate_over_class(oop java_class) {
if (!is_primitive_field_type(type)) {
oop fld_o = mirror->obj_field(field->field_offset());
assert(verify_static_oop(ik, mirror, field->field_offset()), "sanity check");
if (fld_o != NULL) {
if (fld_o != nullptr) {
int slot = field->field_index();
if (!CallbackInvoker::report_static_field_reference(mirror, fld_o, slot)) {
delete field_map;
@ -2566,7 +2566,7 @@ inline bool VM_HeapWalkOperation::iterate_over_object(oop o) {
if (!is_primitive_field_type(type)) {
oop fld_o = o->obj_field_access<AS_NO_KEEPALIVE | ON_UNKNOWN_OOP_REF>(field->field_offset());
// ignore any objects that aren't visible to profiler
if (fld_o != NULL) {
if (fld_o != nullptr) {
assert(Universe::heap()->is_in(fld_o), "unsafe code should not "
"have references to Klass* anymore");
int slot = field->field_index();
@ -2643,7 +2643,7 @@ inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
JNILocalRootsClosure* blk)
{
oop threadObj = java_thread->threadObj();
assert(threadObj != NULL, "sanity check");
assert(threadObj != nullptr, "sanity check");
// only need to get the thread's tag once per thread
jlong thread_tag = tag_for(_tag_map, threadObj);
@ -2668,9 +2668,9 @@ inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
bool is_top_frame = true;
int depth = 0;
frame* last_entry_frame = NULL;
frame* last_entry_frame = nullptr;
while (vf != NULL) {
while (vf != nullptr) {
if (vf->is_java_frame()) {
// java frame (interpreted, compiled, ...)
@ -2685,7 +2685,7 @@ inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
for (int slot=0; slot<locals->size(); slot++) {
if (locals->at(slot)->type() == T_OBJECT) {
oop o = locals->obj_at(slot)();
if (o == NULL) {
if (o == nullptr) {
continue;
}
@ -2701,7 +2701,7 @@ inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
for (int index=0; index < exprs->size(); index++) {
if (exprs->at(index)->type() == T_OBJECT) {
oop o = exprs->obj_at(index)();
if (o == NULL) {
if (o == nullptr) {
continue;
}
@ -2714,7 +2714,7 @@ inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
}
// Follow oops from compiled nmethod
if (jvf->cb() != NULL && jvf->cb()->is_nmethod()) {
if (jvf->cb() != nullptr && jvf->cb()->is_nmethod()) {
blk->set_context(thread_tag, tid, depth, method);
jvf->cb()->as_nmethod()->oops_do(blk);
}
@ -2724,20 +2724,20 @@ inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
// JNI locals for the top frame.
java_thread->active_handles()->oops_do(blk);
} else {
if (last_entry_frame != NULL) {
if (last_entry_frame != nullptr) {
// JNI locals for the entry frame
assert(last_entry_frame->is_entry_frame(), "checking");
last_entry_frame->entry_frame_call_wrapper()->handles()->oops_do(blk);
}
}
}
last_entry_frame = NULL;
last_entry_frame = nullptr;
depth++;
} else {
// externalVFrame - for an entry frame then we report the JNI locals
// when we find the corresponding javaVFrame
frame* fr = vf->frame_pointer();
assert(fr != NULL, "sanity check");
assert(fr != nullptr, "sanity check");
if (fr->is_entry_frame()) {
last_entry_frame = fr;
}
@ -2748,7 +2748,7 @@ inline bool VM_HeapWalkOperation::collect_stack_roots(JavaThread* java_thread,
}
} else {
// no last java frame but there may be JNI locals
blk->set_context(thread_tag, tid, 0, (jmethodID)NULL);
blk->set_context(thread_tag, tid, 0, (jmethodID)nullptr);
java_thread->active_handles()->oops_do(blk);
}
return true;
@ -2762,7 +2762,7 @@ inline bool VM_HeapWalkOperation::collect_stack_roots() {
JNILocalRootsClosure blk;
for (JavaThreadIteratorWithHandle jtiwh; JavaThread *thread = jtiwh.next(); ) {
oop threadObj = thread->threadObj();
if (threadObj != NULL && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
if (threadObj != nullptr && !thread->is_exiting() && !thread->is_hidden_from_external_view()) {
// Collect the simple root for this thread before we
// collect its stack roots
if (!CallbackInvoker::report_simple_root(JVMTI_HEAP_REFERENCE_THREAD,
@ -2878,7 +2878,7 @@ void JvmtiTagMap::iterate_over_objects_reachable_from_object(jobject object,
GrowableArray<jlong> dead_objects(&dead_object_arena, 10, 0, 0);
{
MutexLocker ml(Heap_lock);
BasicHeapWalkContext context(NULL, NULL, object_ref_callback);
BasicHeapWalkContext context(nullptr, nullptr, object_ref_callback);
VM_HeapWalkOperation op(this, initial_object, context, user_data, &dead_objects);
VMThread::execute(&op);
}
@ -2925,9 +2925,9 @@ void JvmtiTagMap::set_needs_cleaning() {
DEBUG_ONLY(notified_needs_cleaning = true;)
JvmtiEnvIterator it;
for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
JvmtiTagMap* tag_map = env->tag_map_acquire();
if (tag_map != NULL) {
if (tag_map != nullptr) {
tag_map->_needs_cleaning = !tag_map->is_empty();
}
}
@ -2947,9 +2947,9 @@ void JvmtiTagMap::gc_notification(size_t num_dead_entries) {
// If no dead entries then cancel cleaning requests.
if (num_dead_entries == 0) {
JvmtiEnvIterator it;
for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
JvmtiTagMap* tag_map = env->tag_map_acquire();
if (tag_map != NULL) {
if (tag_map != nullptr) {
MutexLocker ml (tag_map->lock(), Mutex::_no_safepoint_check_flag);
tag_map->_needs_cleaning = false;
}
@ -2969,9 +2969,9 @@ bool JvmtiTagMap::has_object_free_events_and_reset() {
void JvmtiTagMap::flush_all_object_free_events() {
JavaThread* thread = JavaThread::current();
JvmtiEnvIterator it;
for (JvmtiEnv* env = it.first(); env != NULL; env = it.next(env)) {
for (JvmtiEnv* env = it.first(); env != nullptr; env = it.next(env)) {
JvmtiTagMap* tag_map = env->tag_map_acquire();
if (tag_map != NULL) {
if (tag_map != nullptr) {
tag_map->flush_object_free_events();
ThreadBlockInVM tbiv(thread); // Be safepoint-polite while looping.
}

View File

@ -134,8 +134,8 @@ void JvmtiTagMapTable::remove_dead_entries(GrowableArray<jlong>* objects) {
GrowableArray<jlong>* _objects;
IsDead(GrowableArray<jlong>* objects) : _objects(objects) {}
bool do_entry(const JvmtiTagMapKey& entry, jlong tag) {
if (entry.object_no_keepalive() == NULL) {
if (_objects != NULL) {
if (entry.object_no_keepalive() == nullptr) {
if (_objects != nullptr) {
_objects->append(tag);
}
return true;

View File

@ -55,7 +55,7 @@ class JvmtiTagMapKey : public CHeapObj<mtServiceability> {
oop object_no_keepalive() const;
static unsigned get_hash(const JvmtiTagMapKey& entry) {
assert(entry._obj != NULL, "must lookup obj to hash");
assert(entry._obj != nullptr, "must lookup obj to hash");
return entry._obj->identity_hash();
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -51,28 +51,28 @@ static const int UNKNOWN_STACK_DEPTH = -99;
// Thread local storage for JVMTI.
//
JvmtiThreadState *JvmtiThreadState::_head = NULL;
JvmtiThreadState *JvmtiThreadState::_head = nullptr;
JvmtiThreadState::JvmtiThreadState(JavaThread* thread, oop thread_oop)
: _thread_event_enable() {
assert(JvmtiThreadState_lock->is_locked(), "sanity check");
_thread = thread;
_thread_saved = NULL;
_thread_saved = nullptr;
_exception_state = ES_CLEARED;
_debuggable = true;
_hide_single_stepping = false;
_pending_interp_only_mode = false;
_hide_level = 0;
_pending_step_for_popframe = false;
_class_being_redefined = NULL;
_class_being_redefined = nullptr;
_class_load_kind = jvmti_class_load_kind_load;
_classes_being_redefined = NULL;
_head_env_thread_state = NULL;
_dynamic_code_event_collector = NULL;
_vm_object_alloc_event_collector = NULL;
_sampled_object_alloc_event_collector = NULL;
_the_class_for_redefinition_verification = NULL;
_scratch_class_for_redefinition_verification = NULL;
_classes_being_redefined = nullptr;
_head_env_thread_state = nullptr;
_dynamic_code_event_collector = nullptr;
_vm_object_alloc_event_collector = nullptr;
_sampled_object_alloc_event_collector = nullptr;
_the_class_for_redefinition_verification = nullptr;
_scratch_class_for_redefinition_verification = nullptr;
_cur_stack_depth = UNKNOWN_STACK_DEPTH;
_saved_interp_only_mode = 0;
@ -81,9 +81,9 @@ JvmtiThreadState::JvmtiThreadState(JavaThread* thread, oop thread_oop)
_earlyret_state = earlyret_inactive;
_earlyret_tos = ilgl;
_earlyret_value.j = 0L;
_earlyret_oop = NULL;
_earlyret_oop = nullptr;
_jvmti_event_queue = NULL;
_jvmti_event_queue = nullptr;
_is_in_VTMS_transition = false;
_is_virtual = false;
@ -92,7 +92,7 @@ JvmtiThreadState::JvmtiThreadState(JavaThread* thread, oop thread_oop)
// add all the JvmtiEnvThreadState to the new JvmtiThreadState
{
JvmtiEnvIterator it;
for (JvmtiEnvBase* env = it.first(); env != NULL; env = it.next(env)) {
for (JvmtiEnvBase* env = it.first(); env != nullptr; env = it.next(env)) {
if (env->is_valid()) {
add_env(env);
}
@ -105,21 +105,21 @@ JvmtiThreadState::JvmtiThreadState(JavaThread* thread, oop thread_oop)
// See periodic_clean_up().
debug_only(NoSafepointVerifier nosafepoint;)
_prev = NULL;
_prev = nullptr;
_next = _head;
if (_head != NULL) {
if (_head != nullptr) {
_head->_prev = this;
}
_head = this;
}
if (thread_oop != NULL) {
if (thread_oop != nullptr) {
java_lang_Thread::set_jvmti_thread_state(thread_oop, this);
_is_virtual = java_lang_VirtualThread::is_instance(thread_oop);
}
if (thread != NULL) {
if (thread_oop == NULL || thread->jvmti_vthread() == NULL || thread->jvmti_vthread() == thread_oop) {
if (thread != nullptr) {
if (thread_oop == nullptr || thread->jvmti_vthread() == nullptr || thread->jvmti_vthread() == thread_oop) {
// The JavaThread for carrier or mounted virtual thread case.
// Set this only if thread_oop is current thread->jvmti_vthread().
thread->set_jvmti_thread_state(this);
@ -132,18 +132,18 @@ JvmtiThreadState::JvmtiThreadState(JavaThread* thread, oop thread_oop)
JvmtiThreadState::~JvmtiThreadState() {
assert(JvmtiThreadState_lock->is_locked(), "sanity check");
if (_classes_being_redefined != NULL) {
if (_classes_being_redefined != nullptr) {
delete _classes_being_redefined; // free the GrowableArray on C heap
}
// clear this as the state for the thread
get_thread()->set_jvmti_thread_state(NULL);
get_thread()->set_jvmti_thread_state(nullptr);
// zap our env thread states
{
JvmtiEnvBase::entering_dying_thread_env_iteration();
JvmtiEnvThreadStateIterator it(this);
for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ) {
for (JvmtiEnvThreadState* ets = it.first(); ets != nullptr; ) {
JvmtiEnvThreadState* zap = ets;
ets = it.next(ets);
delete zap;
@ -157,21 +157,21 @@ JvmtiThreadState::~JvmtiThreadState() {
// See periodic_clean_up().
debug_only(NoSafepointVerifier nosafepoint;)
if (_prev == NULL) {
if (_prev == nullptr) {
assert(_head == this, "sanity check");
_head = _next;
} else {
assert(_head != this, "sanity check");
_prev->_next = _next;
}
if (_next != NULL) {
if (_next != nullptr) {
_next->_prev = _prev;
}
_next = NULL;
_prev = NULL;
_next = nullptr;
_prev = nullptr;
}
if (get_thread_oop() != NULL) {
java_lang_Thread::set_jvmti_thread_state(get_thread_oop(), NULL);
if (get_thread_oop() != nullptr) {
java_lang_Thread::set_jvmti_thread_state(get_thread_oop(), nullptr);
}
_thread_oop_h.release(JvmtiExport::jvmti_oop_storage());
}
@ -185,13 +185,13 @@ JvmtiThreadState::periodic_clean_up() {
// because the latter requires the JvmtiThreadState_lock.
// This iteration is safe at a safepoint as well, see the NoSafepointVerifier
// asserts at all list manipulation sites.
for (JvmtiThreadState *state = _head; state != NULL; state = state->next()) {
for (JvmtiThreadState *state = _head; state != nullptr; state = state->next()) {
// For each environment thread state corresponding to an invalid environment
// unlink it from the list and deallocate it.
JvmtiEnvThreadStateIterator it(state);
JvmtiEnvThreadState* previous_ets = NULL;
JvmtiEnvThreadState* previous_ets = nullptr;
JvmtiEnvThreadState* ets = it.first();
while (ets != NULL) {
while (ets != nullptr) {
if (ets->get_env()->is_valid()) {
previous_ets = ets;
ets = it.next(ets);
@ -199,7 +199,7 @@ JvmtiThreadState::periodic_clean_up() {
// This one isn't valid, remove it from the list and deallocate it
JvmtiEnvThreadState* defunct_ets = ets;
ets = ets->next();
if (previous_ets == NULL) {
if (previous_ets == nullptr) {
assert(state->head_env_thread_state() == defunct_ets, "sanity check");
state->set_head_env_thread_state(ets);
} else {
@ -244,7 +244,7 @@ JvmtiVTMSTransitionDisabler::JvmtiVTMSTransitionDisabler(bool is_SR) {
if (!Continuations::enabled()) {
return; // JvmtiVTMSTransitionDisabler is no-op without virtual threads
}
if (Thread::current_or_null() == NULL) {
if (Thread::current_or_null() == nullptr) {
return; // Detached thread, can be a call from Agent_OnLoad.
}
_is_SR = is_SR;
@ -255,7 +255,7 @@ JvmtiVTMSTransitionDisabler::~JvmtiVTMSTransitionDisabler() {
if (!Continuations::enabled()) {
return; // JvmtiVTMSTransitionDisabler is a no-op without virtual threads
}
if (Thread::current_or_null() == NULL) {
if (Thread::current_or_null() == nullptr) {
return; // Detached thread, can be a call from Agent_OnLoad.
}
enable_VTMS_transitions();
@ -380,7 +380,7 @@ JvmtiVTMSTransitionDisabler::start_VTMS_transition(jthread vthread, bool is_moun
assert(!thread->is_in_VTMS_transition(), "VTMS_transition sanity check");
thread->set_is_in_VTMS_transition(true);
JvmtiThreadState* vstate = java_lang_Thread::jvmti_thread_state(vth());
if (vstate != NULL) {
if (vstate != nullptr) {
vstate->set_is_in_VTMS_transition(true);
}
}
@ -394,7 +394,7 @@ JvmtiVTMSTransitionDisabler::finish_VTMS_transition(jthread vthread, bool is_mou
oop vt = JNIHandles::resolve_external_guard(vthread);
int64_t thread_id = java_lang_Thread::thread_id(vt);
JvmtiThreadState* vstate = java_lang_Thread::jvmti_thread_state(vt);
if (vstate != NULL) {
if (vstate != nullptr) {
vstate->set_is_in_VTMS_transition(false);
}
@ -530,11 +530,11 @@ void JvmtiThreadState::add_env(JvmtiEnvBase *env) {
debug_only(NoSafepointVerifier nosafepoint;)
JvmtiEnvThreadStateIterator it(this);
JvmtiEnvThreadState* previous_ets = NULL;
for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ets = it.next(ets)) {
JvmtiEnvThreadState* previous_ets = nullptr;
for (JvmtiEnvThreadState* ets = it.first(); ets != nullptr; ets = it.next(ets)) {
previous_ets = ets;
}
if (previous_ets == NULL) {
if (previous_ets == nullptr) {
set_head_env_thread_state(new_ets);
} else {
previous_ets->set_next(new_ets);
@ -543,14 +543,14 @@ void JvmtiThreadState::add_env(JvmtiEnvBase *env) {
}
void JvmtiThreadState::enter_interp_only_mode() {
assert(_thread != NULL, "sanity check");
assert(_thread != nullptr, "sanity check");
_thread->increment_interp_only_mode();
invalidate_cur_stack_depth();
}
void JvmtiThreadState::leave_interp_only_mode() {
assert(is_interp_only_mode(), "leaving interp only when not in interp only mode");
if (_thread == NULL) {
if (_thread == nullptr) {
// Unmounted virtual thread updates the saved value.
--_saved_interp_only_mode;
} else {
@ -564,7 +564,7 @@ int JvmtiThreadState::count_frames() {
JavaThread* thread = get_thread_or_saved();
javaVFrame *jvf;
ResourceMark rm;
if (thread == NULL) {
if (thread == nullptr) {
oop thread_obj = get_thread_oop();
jvf = JvmtiEnvBase::get_vthread_jvf(thread_obj);
} else {
@ -670,7 +670,7 @@ void JvmtiThreadState::process_pending_step_for_popframe() {
// and current method_id after pop and step for recursive calls.
// Force the step by clearing the last location.
JvmtiEnvThreadStateIterator it(this);
for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ets = it.next(ets)) {
for (JvmtiEnvThreadState* ets = it.first(); ets != nullptr; ets = it.next(ets)) {
ets->clear_current_location();
}
}
@ -692,7 +692,7 @@ void JvmtiThreadState::update_for_pop_top_frame() {
int popframe_number = cur_stack_depth();
{
JvmtiEnvThreadStateIterator it(this);
for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ets = it.next(ets)) {
for (JvmtiEnvThreadState* ets = it.first(); ets != nullptr; ets = it.next(ets)) {
if (ets->is_frame_pop(popframe_number)) {
ets->clear_frame_pop(popframe_number);
}
@ -723,7 +723,7 @@ void JvmtiThreadState::process_pending_step_for_earlyret() {
// is the case, then we consider our return from compiled code to
// complete the ForceEarlyReturn request and we clear the condition.
clr_earlyret_pending();
set_earlyret_oop(NULL);
set_earlyret_oop(nullptr);
clr_earlyret_value();
}
@ -745,7 +745,7 @@ void JvmtiThreadState::process_pending_step_for_earlyret() {
// method_id after earlyret and step for recursive calls.
// Force the step by clearing the last location.
JvmtiEnvThreadStateIterator it(this);
for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ets = it.next(ets)) {
for (JvmtiEnvThreadState* ets = it.first(); ets != nullptr; ets = it.next(ets)) {
ets->clear_current_location();
}
}
@ -754,21 +754,21 @@ void JvmtiThreadState::oops_do(OopClosure* f, CodeBlobClosure* cf) {
f->do_oop((oop*) &_earlyret_oop);
// Keep nmethods from unloading on the event queue
if (_jvmti_event_queue != NULL) {
if (_jvmti_event_queue != nullptr) {
_jvmti_event_queue->oops_do(f, cf);
}
}
void JvmtiThreadState::nmethods_do(CodeBlobClosure* cf) {
// Keep nmethods from unloading on the event queue
if (_jvmti_event_queue != NULL) {
if (_jvmti_event_queue != nullptr) {
_jvmti_event_queue->nmethods_do(cf);
}
}
// Thread local event queue.
void JvmtiThreadState::enqueue_event(JvmtiDeferredEvent* event) {
if (_jvmti_event_queue == NULL) {
if (_jvmti_event_queue == nullptr) {
_jvmti_event_queue = new JvmtiDeferredEventQueue();
}
// copy the event
@ -776,15 +776,15 @@ void JvmtiThreadState::enqueue_event(JvmtiDeferredEvent* event) {
}
void JvmtiThreadState::post_events(JvmtiEnv* env) {
if (_jvmti_event_queue != NULL) {
if (_jvmti_event_queue != nullptr) {
_jvmti_event_queue->post(env); // deletes each queue node
delete _jvmti_event_queue;
_jvmti_event_queue = NULL;
_jvmti_event_queue = nullptr;
}
}
void JvmtiThreadState::run_nmethod_entry_barriers() {
if (_jvmti_event_queue != NULL) {
if (_jvmti_event_queue != nullptr) {
_jvmti_event_queue->run_nmethod_entry_barriers();
}
}
@ -794,8 +794,8 @@ oop JvmtiThreadState::get_thread_oop() {
}
void JvmtiThreadState::set_thread(JavaThread* thread) {
_thread_saved = NULL; // Common case.
if (!_is_virtual && thread == NULL) {
_thread_saved = nullptr; // Common case.
if (!_is_virtual && thread == nullptr) {
// Save JavaThread* if carrier thread is being detached.
_thread_saved = _thread;
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -192,11 +192,11 @@ class JvmtiThreadState : public CHeapObj<mtInternal> {
JvmtiThreadState *_next;
JvmtiThreadState *_prev;
// holds the current dynamic code event collector, NULL if no event collector in use
// holds the current dynamic code event collector, null if no event collector in use
JvmtiDynamicCodeEventCollector* _dynamic_code_event_collector;
// holds the current vm object alloc event collector, NULL if no event collector in use
// holds the current vm object alloc event collector, null if no event collector in use
JvmtiVMObjectAllocEventCollector* _vm_object_alloc_event_collector;
// holds the current sampled object alloc event collector, NULL if no event collector in use
// holds the current sampled object alloc event collector, null if no event collector in use
JvmtiSampledObjectAllocEventCollector* _sampled_object_alloc_event_collector;
// Should only be created by factory methods
@ -234,7 +234,7 @@ class JvmtiThreadState : public CHeapObj<mtInternal> {
// Used by the interpreter for fullspeed debugging support
bool is_interp_only_mode() {
return _thread == NULL ? _saved_interp_only_mode != 0 : _thread->is_interp_only_mode();
return _thread == nullptr ? _saved_interp_only_mode != 0 : _thread->is_interp_only_mode();
}
void enter_interp_only_mode();
void leave_interp_only_mode();
@ -263,7 +263,7 @@ class JvmtiThreadState : public CHeapObj<mtInternal> {
int count_frames();
inline JavaThread *get_thread() { return _thread; }
inline JavaThread *get_thread_or_saved(); // return _thread_saved if _thread is NULL
inline JavaThread *get_thread_or_saved(); // return _thread_saved if _thread is null
// Needed for virtual threads as they can migrate to different JavaThread's.
// Also used for carrier threads to clear/restore _thread.
@ -331,7 +331,7 @@ class JvmtiThreadState : public CHeapObj<mtInternal> {
}
inline void clear_class_being_redefined() {
_class_being_redefined = NULL;
_class_being_redefined = nullptr;
_class_load_kind = jvmti_class_load_kind_load;
}
@ -384,13 +384,13 @@ class JvmtiThreadState : public CHeapObj<mtInternal> {
_scratch_class_for_redefinition_verification = scratch_class;
}
inline void clear_class_versions_map() { set_class_versions_map(NULL, NULL); }
inline void clear_class_versions_map() { set_class_versions_map(nullptr, nullptr); }
static inline
Klass* class_to_verify_considering_redefinition(Klass* klass,
JavaThread *thread) {
JvmtiThreadState *state = thread->jvmti_thread_state();
if (state != NULL && state->_the_class_for_redefinition_verification != NULL) {
if (state != nullptr && state->_the_class_for_redefinition_verification != nullptr) {
if (state->_the_class_for_redefinition_verification == klass) {
klass = state->_scratch_class_for_redefinition_verification;
}
@ -442,10 +442,10 @@ class JvmtiThreadState : public CHeapObj<mtInternal> {
void update_for_pop_top_frame();
// already holding JvmtiThreadState_lock - retrieve or create JvmtiThreadState
// Can return NULL if JavaThread is exiting.
static JvmtiThreadState *state_for_while_locked(JavaThread *thread, oop thread_oop = NULL);
// Can return null if JavaThread is exiting.
static JvmtiThreadState *state_for_while_locked(JavaThread *thread, oop thread_oop = nullptr);
// retrieve or create JvmtiThreadState
// Can return NULL if JavaThread is exiting.
// Can return null if JavaThread is exiting.
static JvmtiThreadState *state_for(JavaThread *thread, Handle thread_handle = Handle());
// JVMTI ForceEarlyReturn support

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2006, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2006, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -56,12 +56,12 @@ inline JvmtiEnvThreadState* JvmtiEnvThreadStateIterator::next(JvmtiEnvThreadStat
JvmtiEnvThreadState* JvmtiThreadState::env_thread_state(JvmtiEnvBase *env) {
JvmtiEnvThreadStateIterator it(this);
for (JvmtiEnvThreadState* ets = it.first(); ets != NULL; ets = it.next(ets)) {
for (JvmtiEnvThreadState* ets = it.first(); ets != nullptr; ets = it.next(ets)) {
if ((JvmtiEnvBase*)(ets->get_env()) == env) {
return ets;
}
}
return NULL;
return nullptr;
}
JvmtiEnvThreadState* JvmtiThreadState::head_env_thread_state() {
@ -74,27 +74,27 @@ void JvmtiThreadState::set_head_env_thread_state(JvmtiEnvThreadState* ets) {
inline JvmtiThreadState* JvmtiThreadState::state_for_while_locked(JavaThread *thread, oop thread_oop) {
assert(JvmtiThreadState_lock->is_locked(), "sanity check");
assert(thread != NULL || thread_oop != NULL, "sanity check");
assert(thread != nullptr || thread_oop != nullptr, "sanity check");
NoSafepointVerifier nsv; // oop is safe to use.
if (thread_oop == NULL) { // Then thread should not be NULL (see assert above).
thread_oop = thread->jvmti_vthread() != NULL ? thread->jvmti_vthread() : thread->threadObj();
if (thread_oop == nullptr) { // Then thread should not be null (see assert above).
thread_oop = thread->jvmti_vthread() != nullptr ? thread->jvmti_vthread() : thread->threadObj();
}
// In a case of unmounted virtual thread the thread can be NULL.
JvmtiThreadState *state = thread == NULL ? NULL : thread->jvmti_thread_state();
// In a case of unmounted virtual thread the thread can be null.
JvmtiThreadState *state = thread == nullptr ? nullptr : thread->jvmti_thread_state();
if (state == NULL && thread != NULL && thread->is_exiting()) {
if (state == nullptr && thread != nullptr && thread->is_exiting()) {
// Don't add a JvmtiThreadState to a thread that is exiting.
return NULL;
return nullptr;
}
if (state == NULL || state->get_thread_oop() != thread_oop) {
if (state == nullptr || state->get_thread_oop() != thread_oop) {
// Check if java_lang_Thread already has a link to the JvmtiThreadState.
if (thread_oop != NULL) { // thread_oop can be NULL during early VMStart.
if (thread_oop != nullptr) { // thread_oop can be null during early VMStart.
state = java_lang_Thread::jvmti_thread_state(thread_oop);
}
if (state == NULL) { // Need to create state.
if (state == nullptr) { // Need to create state.
state = new JvmtiThreadState(thread, thread_oop);
}
}
@ -102,10 +102,10 @@ inline JvmtiThreadState* JvmtiThreadState::state_for_while_locked(JavaThread *th
}
inline JvmtiThreadState* JvmtiThreadState::state_for(JavaThread *thread, Handle thread_handle) {
// In a case of unmounted virtual thread the thread can be NULL.
JvmtiThreadState* state = thread_handle == NULL ? thread->jvmti_thread_state() :
// In a case of unmounted virtual thread the thread can be null.
JvmtiThreadState* state = thread_handle == nullptr ? thread->jvmti_thread_state() :
java_lang_Thread::jvmti_thread_state(thread_handle());
if (state == NULL) {
if (state == nullptr) {
MutexLocker mu(JvmtiThreadState_lock);
// check again with the lock held
state = state_for_while_locked(thread, thread_handle());
@ -118,8 +118,8 @@ inline JvmtiThreadState* JvmtiThreadState::state_for(JavaThread *thread, Handle
}
inline JavaThread* JvmtiThreadState::get_thread_or_saved() {
// Use _thread_saved if cthread is detached from JavaThread (_thread == NULL).
return (_thread == NULL && !is_virtual()) ? _thread_saved : _thread;
// Use _thread_saved if cthread is detached from JavaThread (_thread == null).
return (_thread == nullptr && !is_virtual()) ? _thread_saved : _thread;
}
inline void JvmtiThreadState::set_should_post_on_exceptions(bool val) {
@ -127,17 +127,17 @@ inline void JvmtiThreadState::set_should_post_on_exceptions(bool val) {
}
inline void JvmtiThreadState::unbind_from(JvmtiThreadState* state, JavaThread* thread) {
if (state == NULL) {
if (state == nullptr) {
return;
}
// Save thread's interp_only_mode.
state->_saved_interp_only_mode = thread->get_interp_only_mode();
state->set_thread(NULL); // Make sure stale _thread value is never used.
state->set_thread(nullptr); // Make sure stale _thread value is never used.
}
inline void JvmtiThreadState::bind_to(JvmtiThreadState* state, JavaThread* thread) {
// Restore thread's interp_only_mode.
thread->set_interp_only_mode(state == NULL ? 0 : state->_saved_interp_only_mode);
thread->set_interp_only_mode(state == nullptr ? 0 : state->_saved_interp_only_mode);
// Make continuation notice the interp_only_mode change.
Continuation::set_cont_fastpath_thread_state(thread);
@ -145,7 +145,7 @@ inline void JvmtiThreadState::bind_to(JvmtiThreadState* state, JavaThread* threa
// Bind JavaThread to JvmtiThreadState.
thread->set_jvmti_thread_state(state);
if (state != NULL) {
if (state != nullptr) {
// Bind to JavaThread.
state->set_thread(thread);
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -79,7 +79,7 @@ void JvmtiTrace::initialize() {
const char *very_end;
const char *curr;
if (TraceJVMTI != NULL) {
if (TraceJVMTI != nullptr) {
curr = TraceJVMTI;
} else {
curr = ""; // hack in fixed tracing here
@ -98,18 +98,18 @@ void JvmtiTrace::initialize() {
very_end = curr + strlen(curr);
while (curr < very_end) {
const char *curr_end = strchr(curr, ',');
if (curr_end == NULL) {
if (curr_end == nullptr) {
curr_end = very_end;
}
const char *op_pos = strchr(curr, '+');
const char *minus_pos = strchr(curr, '-');
if (minus_pos != NULL && (minus_pos < op_pos || op_pos == NULL)) {
if (minus_pos != nullptr && (minus_pos < op_pos || op_pos == nullptr)) {
op_pos = minus_pos;
}
char op;
const char *flags = op_pos + 1;
const char *flags_end = curr_end;
if (op_pos == NULL || op_pos > curr_end) {
if (op_pos == nullptr || op_pos > curr_end) {
flags = "ies";
flags_end = flags + strlen(flags);
op_pos = curr_end;
@ -190,7 +190,7 @@ void JvmtiTrace::initialize() {
do_op = true;
} else {
const char *fname = function_name(i);
if (fname != NULL) {
if (fname != nullptr) {
size_t fnlen = strlen(fname);
if (len==fnlen && strncmp(curr, fname, fnlen)==0) {
log_trace(jvmti)("Tracing the function: %s", fname);
@ -219,7 +219,7 @@ void JvmtiTrace::initialize() {
do_op = true;
} else {
const char *ename = event_name(i);
if (ename != NULL) {
if (ename != nullptr) {
size_t evtlen = strlen(ename);
if (len==evtlen && strncmp(curr, ename, evtlen)==0) {
log_trace(jvmti)("Tracing the event: %s", ename);
@ -271,22 +271,22 @@ const char* JvmtiTrace::enum_name(const char** names, const jint* values, jint v
// return a valid string no matter what state the thread is in
const char *JvmtiTrace::safe_get_thread_name(Thread *thread) {
if (thread == NULL) {
return "NULL";
if (thread == nullptr) {
return "null";
}
if (!thread->is_Java_thread()) {
return thread->name();
}
JavaThread* java_thread = JavaThread::cast(thread);
oop threadObj = java_thread->jvmti_vthread();
if (threadObj == NULL) {
if (threadObj == nullptr) {
threadObj = java_thread->threadObj();
}
if (threadObj == NULL) {
return "NULL";
if (threadObj == nullptr) {
return "null";
}
oop name = java_lang_Thread::name(threadObj);
if (name == NULL) {
if (name == nullptr) {
return "<NOT FILLED IN>";
}
return java_lang_String::as_utf8_string(name);
@ -308,7 +308,7 @@ const char * JvmtiTrace::get_class_name(oop k_mirror) {
return "primitive";
}
Klass* k_oop = java_lang_Class::as_Klass(k_mirror);
if (k_oop == NULL) {
if (k_oop == nullptr) {
return "INVALID";
}
return k_oop->external_name();

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2003, 2012, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2003, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -34,10 +34,10 @@
// class JvmtiUtil
//
ResourceArea* JvmtiUtil::_single_threaded_resource_area = NULL;
ResourceArea* JvmtiUtil::_single_threaded_resource_area = nullptr;
ResourceArea* JvmtiUtil::single_threaded_resource_area() {
if (_single_threaded_resource_area == NULL) {
if (_single_threaded_resource_area == nullptr) {
// lazily create the single threaded resource area
// pick a size which is not a standard since the pools don't exist yet
_single_threaded_resource_area = new (mtInternal) ResourceArea(Chunk::non_pool_size);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -81,7 +81,7 @@ class SafeResourceMark : public ResourceMark {
return JvmtiUtil::single_threaded_resource_area();
}
thread = Thread::current_or_null();
if (thread == NULL) {
if (thread == nullptr) {
return JvmtiUtil::single_threaded_resource_area();
}
return thread->resource_area();

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2008, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2008, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -77,15 +77,15 @@
*/
bool MethodHandles::_enabled = false; // set true after successful native linkage
MethodHandlesAdapterBlob* MethodHandles::_adapter_code = NULL;
MethodHandlesAdapterBlob* MethodHandles::_adapter_code = nullptr;
/**
* Generates method handle adapters. Returns 'false' if memory allocation
* failed and true otherwise.
*/
void MethodHandles::generate_adapters() {
assert(vmClasses::MethodHandle_klass() != NULL, "should be present");
assert(_adapter_code == NULL, "generate only once");
assert(vmClasses::MethodHandle_klass() != nullptr, "should be present");
assert(_adapter_code == nullptr, "generate only once");
ResourceMark rm;
TraceTime timer("MethodHandles adapters generation", TRACETIME_LOG(Info, startuptime));
@ -112,7 +112,7 @@ void MethodHandlesAdapterGenerator::generate() {
vmIntrinsics::ID iid = Interpreter::method_handle_intrinsic(mk);
StubCodeMark mark(this, "MethodHandle::interpreter_entry", vmIntrinsics::name_at(iid));
address entry = MethodHandles::generate_method_handle_interpreter_entry(_masm, iid);
if (entry != NULL) {
if (entry != nullptr) {
Interpreter::set_entry_for_kind(mk, entry);
}
// If the entry is not set, it will throw AbstractMethodError.
@ -164,7 +164,7 @@ Handle MethodHandles::resolve_MemberName_type(Handle mname, Klass* caller, TRAPS
return type; // already resolved
}
Symbol* signature = java_lang_String::as_symbol_or_null(type());
if (signature == NULL) {
if (signature == nullptr) {
return empty; // no such signature exists in the VM
}
Handle resolved;
@ -195,14 +195,14 @@ oop MethodHandles::init_MemberName(Handle mname, Handle target, TRAPS) {
oop clazz = java_lang_reflect_Field::clazz(target_oop); // fd.field_holder()
int slot = java_lang_reflect_Field::slot(target_oop); // fd.index()
Klass* k = java_lang_Class::as_Klass(clazz);
if (k != NULL && k->is_instance_klass()) {
if (k != nullptr && k->is_instance_klass()) {
fieldDescriptor fd(InstanceKlass::cast(k), slot);
oop mname2 = init_field_MemberName(mname, fd);
if (mname2 != NULL) {
if (mname2 != nullptr) {
// Since we have the reified name and type handy, add them to the result.
if (java_lang_invoke_MemberName::name(mname2) == NULL)
if (java_lang_invoke_MemberName::name(mname2) == nullptr)
java_lang_invoke_MemberName::set_name(mname2, java_lang_reflect_Field::name(target_oop));
if (java_lang_invoke_MemberName::type(mname2) == NULL)
if (java_lang_invoke_MemberName::type(mname2) == nullptr)
java_lang_invoke_MemberName::set_type(mname2, java_lang_reflect_Field::type(target_oop));
}
return mname2;
@ -211,10 +211,10 @@ oop MethodHandles::init_MemberName(Handle mname, Handle target, TRAPS) {
oop clazz = java_lang_reflect_Method::clazz(target_oop);
int slot = java_lang_reflect_Method::slot(target_oop);
Klass* k = java_lang_Class::as_Klass(clazz);
if (k != NULL && k->is_instance_klass()) {
if (k != nullptr && k->is_instance_klass()) {
Method* m = InstanceKlass::cast(k)->method_with_idnum(slot);
if (m == NULL || is_signature_polymorphic(m->intrinsic_id()))
return NULL; // do not resolve unless there is a concrete signature
if (m == nullptr || is_signature_polymorphic(m->intrinsic_id()))
return nullptr; // do not resolve unless there is a concrete signature
CallInfo info(m, k, CHECK_NULL);
return init_method_MemberName(mname, info);
}
@ -222,14 +222,14 @@ oop MethodHandles::init_MemberName(Handle mname, Handle target, TRAPS) {
oop clazz = java_lang_reflect_Constructor::clazz(target_oop);
int slot = java_lang_reflect_Constructor::slot(target_oop);
Klass* k = java_lang_Class::as_Klass(clazz);
if (k != NULL && k->is_instance_klass()) {
if (k != nullptr && k->is_instance_klass()) {
Method* m = InstanceKlass::cast(k)->method_with_idnum(slot);
if (m == NULL) return NULL;
if (m == nullptr) return nullptr;
CallInfo info(m, k, CHECK_NULL);
return init_method_MemberName(mname, info);
}
}
return NULL;
return nullptr;
}
oop MethodHandles::init_method_MemberName(Handle mname, CallInfo& info) {
@ -237,7 +237,7 @@ oop MethodHandles::init_method_MemberName(Handle mname, CallInfo& info) {
methodHandle m(Thread::current(), info.resolved_method());
assert(m.not_null(), "null method handle");
InstanceKlass* m_klass = m->method_holder();
assert(m_klass != NULL, "null holder for method handle");
assert(m_klass != nullptr, "null holder for method handle");
int flags = (jushort)( m->access_flags().as_short() & JVM_RECOGNIZED_METHOD_MODIFIERS );
int vmindex = Method::invalid_vtable_index;
LogTarget(Debug, methodhandles, indy) lt_indy;
@ -290,7 +290,7 @@ oop MethodHandles::init_method_MemberName(Handle mname, CallInfo& info) {
}
if (!m->is_public()) {
assert(m->is_public(), "virtual call must be to public interface method");
return NULL; // elicit an error later in product build
return nullptr; // elicit an error later in product build
}
assert(info.resolved_klass()->is_subtype_of(m_klass_non_interface), "virtual call must be type-safe");
m_klass = m_klass_non_interface;
@ -323,7 +323,7 @@ oop MethodHandles::init_method_MemberName(Handle mname, CallInfo& info) {
}
break;
default: assert(false, "bad CallInfo"); return NULL;
default: assert(false, "bad CallInfo"); return nullptr;
}
// @CallerSensitive annotation detected
@ -358,15 +358,15 @@ oop MethodHandles::init_field_MemberName(Handle mname, fieldDescriptor& fd, bool
oop mname_oop = mname();
java_lang_invoke_MemberName::set_flags (mname_oop, flags);
java_lang_invoke_MemberName::set_method (mname_oop, NULL);
java_lang_invoke_MemberName::set_method (mname_oop, nullptr);
java_lang_invoke_MemberName::set_vmindex(mname_oop, vmindex);
java_lang_invoke_MemberName::set_clazz (mname_oop, ik->java_mirror());
oop type = field_signature_type_or_null(fd.signature());
oop name = field_name_or_null(fd.name());
if (name != NULL)
if (name != nullptr)
java_lang_invoke_MemberName::set_name(mname_oop, name);
if (type != NULL)
if (type != nullptr)
java_lang_invoke_MemberName::set_type(mname_oop, type);
// Note: name and type can be lazily computed by resolve_MemberName,
// if Java code needs them as resolved String and Class objects.
@ -386,7 +386,7 @@ oop MethodHandles::init_field_MemberName(Handle mname, fieldDescriptor& fd, bool
// * It has a return type of Object for a polymorphic return type, otherwise a fixed return type.
// * It has the ACC_VARARGS and ACC_NATIVE flags set.
bool MethodHandles::is_method_handle_invoke_name(Klass* klass, Symbol* name) {
if (klass == NULL)
if (klass == nullptr)
return false;
// The following test will fail spuriously during bootstrap of MethodHandle itself:
// if (klass != vmClasses::MethodHandle_klass())
@ -400,7 +400,7 @@ bool MethodHandles::is_method_handle_invoke_name(Klass* klass, Symbol* name) {
Symbol* poly_sig = vmSymbols::object_array_object_signature();
InstanceKlass* iklass = InstanceKlass::cast(klass);
Method* m = iklass->find_method(name, poly_sig);
if (m != NULL) {
if (m != nullptr) {
int required = JVM_ACC_NATIVE | JVM_ACC_VARARGS;
int flags = m->access_flags().as_int();
if ((flags & required) == required) {
@ -485,13 +485,13 @@ vmIntrinsics::ID MethodHandles::signature_polymorphic_name_id(Symbol* name) {
// Cover the case of invokeExact and any future variants of invokeFoo.
Klass* mh_klass = vmClasses::klass_at(VM_CLASS_ID(MethodHandle_klass));
if (mh_klass != NULL && is_method_handle_invoke_name(mh_klass, name)) {
if (mh_klass != nullptr && is_method_handle_invoke_name(mh_klass, name)) {
return vmIntrinsics::_invokeGeneric;
}
// Cover the case of methods on VarHandle.
Klass* vh_klass = vmClasses::klass_at(VM_CLASS_ID(VarHandle_klass));
if (vh_klass != NULL && is_method_handle_invoke_name(vh_klass, name)) {
if (vh_klass != nullptr && is_method_handle_invoke_name(vh_klass, name)) {
return vmIntrinsics::_invokeGeneric;
}
@ -501,7 +501,7 @@ vmIntrinsics::ID MethodHandles::signature_polymorphic_name_id(Symbol* name) {
}
vmIntrinsics::ID MethodHandles::signature_polymorphic_name_id(Klass* klass, Symbol* name) {
if (klass != NULL &&
if (klass != nullptr &&
(klass->name() == vmSymbols::java_lang_invoke_MethodHandle() ||
klass->name() == vmSymbols::java_lang_invoke_VarHandle())) {
vmIntrinsics::ID iid = signature_polymorphic_name_id(name);
@ -545,7 +545,7 @@ Symbol* MethodHandles::lookup_signature(oop type_str, bool intern_if_not_found,
return java_lang_String::as_symbol_or_null(type_str);
}
} else {
THROW_MSG_(vmSymbols::java_lang_InternalError(), "unrecognized type", NULL);
THROW_MSG_(vmSymbols::java_lang_InternalError(), "unrecognized type", nullptr);
}
}
@ -577,8 +577,8 @@ bool MethodHandles::is_basic_type_signature(Symbol* sig) {
}
Symbol* MethodHandles::lookup_basic_type_signature(Symbol* sig, bool keep_last_arg) {
Symbol* bsig = NULL;
if (sig == NULL) {
Symbol* bsig = nullptr;
if (sig == nullptr) {
return sig;
} else if (is_basic_type_signature(sig)) {
sig->increment_refcount();
@ -657,12 +657,12 @@ static oop object_java_mirror() {
}
oop MethodHandles::field_name_or_null(Symbol* s) {
if (s == NULL) return NULL;
if (s == nullptr) return nullptr;
return StringTable::lookup(s);
}
oop MethodHandles::field_signature_type_or_null(Symbol* s) {
if (s == NULL) return NULL;
if (s == nullptr) return nullptr;
BasicType bt = Signature::basic_type(s);
if (is_java_primitive(bt)) {
assert(s->utf8_length() == 1, "");
@ -679,7 +679,7 @@ oop MethodHandles::field_signature_type_or_null(Symbol* s) {
return vmClasses::String_klass()->java_mirror();
}
}
return NULL;
return nullptr;
}
// An unresolved member name is a mere symbolic reference.
@ -690,7 +690,7 @@ Handle MethodHandles::resolve_MemberName(Handle mname, Klass* caller, int lookup
Handle empty;
assert(java_lang_invoke_MemberName::is_instance(mname()), "");
if (java_lang_invoke_MemberName::vmtarget(mname()) != NULL) {
if (java_lang_invoke_MemberName::vmtarget(mname()) != nullptr) {
// Already resolved.
DEBUG_ONLY(int vmindex = java_lang_invoke_MemberName::vmindex(mname()));
assert(vmindex >= Method::nonvirtual_vtable_index, "");
@ -713,24 +713,24 @@ Handle MethodHandles::resolve_MemberName(Handle mname, Klass* caller, int lookup
THROW_MSG_(vmSymbols::java_lang_IllegalArgumentException(), "nothing to resolve", empty);
}
InstanceKlass* defc = NULL;
InstanceKlass* defc = nullptr;
{
Klass* defc_klass = java_lang_Class::as_Klass(defc_oop());
if (defc_klass == NULL) return empty; // a primitive; no resolution possible
if (defc_klass == nullptr) return empty; // a primitive; no resolution possible
if (!defc_klass->is_instance_klass()) {
if (!defc_klass->is_array_klass()) return empty;
defc_klass = vmClasses::Object_klass();
}
defc = InstanceKlass::cast(defc_klass);
}
if (defc == NULL) {
if (defc == nullptr) {
THROW_MSG_(vmSymbols::java_lang_InternalError(), "primitive class", empty);
}
defc->link_class(CHECK_(empty)); // possible safepoint
// convert the external string name to an internal symbol
TempNewSymbol name = java_lang_String::as_symbol_or_null(name_str());
if (name == NULL) return empty; // no such name
if (name == nullptr) return empty; // no such name
if (name == vmSymbols::class_initializer_name())
return empty; // illegal name
@ -752,14 +752,14 @@ Handle MethodHandles::resolve_MemberName(Handle mname, Klass* caller, int lookup
// convert the external string or reflective type to an internal signature
TempNewSymbol type = lookup_signature(type_str(), (mh_invoke_id != vmIntrinsics::_none), CHECK_(empty));
if (type == NULL) return empty; // no such signature exists in the VM
if (type == nullptr) return empty; // no such signature exists in the VM
// skip access check if it's trusted lookup
LinkInfo::AccessCheck access_check = caller != NULL ?
LinkInfo::AccessCheck access_check = caller != nullptr ?
LinkInfo::AccessCheck::required :
LinkInfo::AccessCheck::skip;
// skip loader constraints if it's trusted lookup or a public lookup
LinkInfo::LoaderConstraintCheck loader_constraint_check = (caller != NULL && (lookup_mode & LM_UNCONDITIONAL) == 0) ?
LinkInfo::LoaderConstraintCheck loader_constraint_check = (caller != nullptr && (lookup_mode & LM_UNCONDITIONAL) == 0) ?
LinkInfo::LoaderConstraintCheck::required :
LinkInfo::LoaderConstraintCheck::skip;
@ -860,9 +860,9 @@ Handle MethodHandles::resolve_MemberName(Handle mname, Klass* caller, int lookup
void MethodHandles::expand_MemberName(Handle mname, int suppress, TRAPS) {
assert(java_lang_invoke_MemberName::is_instance(mname()), "");
bool have_defc = (java_lang_invoke_MemberName::clazz(mname()) != NULL);
bool have_name = (java_lang_invoke_MemberName::name(mname()) != NULL);
bool have_type = (java_lang_invoke_MemberName::type(mname()) != NULL);
bool have_defc = (java_lang_invoke_MemberName::clazz(mname()) != nullptr);
bool have_name = (java_lang_invoke_MemberName::name(mname()) != nullptr);
bool have_type = (java_lang_invoke_MemberName::type(mname()) != nullptr);
int flags = java_lang_invoke_MemberName::flags(mname());
if (suppress != 0) {
@ -878,11 +878,11 @@ void MethodHandles::expand_MemberName(Handle mname, int suppress, TRAPS) {
case IS_CONSTRUCTOR:
{
Method* vmtarget = java_lang_invoke_MemberName::vmtarget(mname());
if (vmtarget == NULL) {
if (vmtarget == nullptr) {
THROW_MSG(vmSymbols::java_lang_IllegalArgumentException(), "nothing to expand");
}
methodHandle m(THREAD, vmtarget);
DEBUG_ONLY(vmtarget = NULL); // safety
DEBUG_ONLY(vmtarget = nullptr); // safety
if (!have_defc) {
InstanceKlass* defc = m->method_holder();
java_lang_invoke_MemberName::set_clazz(mname(), defc->java_mirror());
@ -901,11 +901,11 @@ void MethodHandles::expand_MemberName(Handle mname, int suppress, TRAPS) {
case IS_FIELD:
{
oop clazz = java_lang_invoke_MemberName::clazz(mname());
if (clazz == NULL) {
if (clazz == nullptr) {
THROW_MSG(vmSymbols::java_lang_IllegalArgumentException(), "nothing to expand (as field)");
}
InstanceKlass* defc = InstanceKlass::cast(java_lang_Class::as_Klass(clazz));
DEBUG_ONLY(clazz = NULL); // safety
DEBUG_ONLY(clazz = nullptr); // safety
int vmindex = java_lang_invoke_MemberName::vmindex(mname());
bool is_static = ((flags & JVM_ACC_STATIC) != 0);
fieldDescriptor fd; // find_field initializes fd if found
@ -1069,8 +1069,8 @@ JVM_END
// void init(MemberName self, AccessibleObject ref)
JVM_ENTRY(void, MHN_init_Mem(JNIEnv *env, jobject igcls, jobject mname_jh, jobject target_jh)) {
if (mname_jh == NULL) { THROW_MSG(vmSymbols::java_lang_InternalError(), "mname is null"); }
if (target_jh == NULL) { THROW_MSG(vmSymbols::java_lang_InternalError(), "target is null"); }
if (mname_jh == nullptr) { THROW_MSG(vmSymbols::java_lang_InternalError(), "mname is null"); }
if (target_jh == nullptr) { THROW_MSG(vmSymbols::java_lang_InternalError(), "target is null"); }
Handle mname(THREAD, JNIHandles::resolve_non_null(mname_jh));
Handle target(THREAD, JNIHandles::resolve_non_null(target_jh));
MethodHandles::init_MemberName(mname, target, CHECK);
@ -1079,7 +1079,7 @@ JVM_END
// void expand(MemberName self)
JVM_ENTRY(void, MHN_expand_Mem(JNIEnv *env, jobject igcls, jobject mname_jh)) {
if (mname_jh == NULL) { THROW_MSG(vmSymbols::java_lang_InternalError(), "mname is null"); }
if (mname_jh == nullptr) { THROW_MSG(vmSymbols::java_lang_InternalError(), "mname is null"); }
Handle mname(THREAD, JNIHandles::resolve_non_null(mname_jh));
MethodHandles::expand_MemberName(mname, 0, CHECK);
}
@ -1088,7 +1088,7 @@ JVM_END
// void resolve(MemberName self, Class<?> caller)
JVM_ENTRY(jobject, MHN_resolve_Mem(JNIEnv *env, jobject igcls, jobject mname_jh, jclass caller_jh,
jint lookup_mode, jboolean speculative_resolve)) {
if (mname_jh == NULL) { THROW_MSG_NULL(vmSymbols::java_lang_InternalError(), "mname is null"); }
if (mname_jh == nullptr) { THROW_MSG_NULL(vmSymbols::java_lang_InternalError(), "mname is null"); }
Handle mname(THREAD, JNIHandles::resolve_non_null(mname_jh));
// The trusted Java code that calls this method should already have performed
@ -1097,15 +1097,15 @@ JVM_ENTRY(jobject, MHN_resolve_Mem(JNIEnv *env, jobject igcls, jobject mname_jh,
// verify the lookup context for a Lookup object teleported from one module
// to another. Such Lookup object can only access the intersection of the set
// of accessible classes from both lookup class and previous lookup class.
if (VerifyMethodHandles && (lookup_mode & LM_TRUSTED) == LM_TRUSTED && caller_jh != NULL &&
java_lang_invoke_MemberName::clazz(mname()) != NULL) {
if (VerifyMethodHandles && (lookup_mode & LM_TRUSTED) == LM_TRUSTED && caller_jh != nullptr &&
java_lang_invoke_MemberName::clazz(mname()) != nullptr) {
Klass* reference_klass = java_lang_Class::as_Klass(java_lang_invoke_MemberName::clazz(mname()));
if (reference_klass != NULL && reference_klass->is_objArray_klass()) {
if (reference_klass != nullptr && reference_klass->is_objArray_klass()) {
reference_klass = ObjArrayKlass::cast(reference_klass)->bottom_klass();
}
// Reflection::verify_class_access can only handle instance classes.
if (reference_klass != NULL && reference_klass->is_instance_klass()) {
if (reference_klass != nullptr && reference_klass->is_instance_klass()) {
// Emulate LinkResolver::check_klass_accessability.
Klass* caller = java_lang_Class::as_Klass(JNIHandles::resolve_non_null(caller_jh));
// access check on behalf of the caller if this is not a public lookup
@ -1123,7 +1123,7 @@ JVM_ENTRY(jobject, MHN_resolve_Mem(JNIEnv *env, jobject igcls, jobject mname_jh,
}
}
Klass* caller = caller_jh == NULL ? NULL :
Klass* caller = caller_jh == nullptr ? nullptr :
java_lang_Class::as_Klass(JNIHandles::resolve_non_null(caller_jh));
Handle resolved = MethodHandles::resolve_MemberName(mname, caller, lookup_mode,
speculative_resolve == JNI_TRUE,
@ -1137,7 +1137,7 @@ JVM_ENTRY(jobject, MHN_resolve_Mem(JNIEnv *env, jobject igcls, jobject mname_jh,
}
if (speculative_resolve) {
assert(!HAS_PENDING_EXCEPTION, "No exceptions expected when resolving speculatively");
return NULL;
return nullptr;
}
if ((flags & ALL_KINDS) == IS_FIELD) {
THROW_MSG_NULL(vmSymbols::java_lang_NoSuchFieldError(), "field resolution failed");
@ -1154,8 +1154,8 @@ JVM_ENTRY(jobject, MHN_resolve_Mem(JNIEnv *env, jobject igcls, jobject mname_jh,
JVM_END
static jlong find_member_field_offset(oop mname, bool must_be_static, TRAPS) {
if (mname == NULL ||
java_lang_invoke_MemberName::clazz(mname) == NULL) {
if (mname == nullptr ||
java_lang_invoke_MemberName::clazz(mname) == nullptr) {
THROW_MSG_0(vmSymbols::java_lang_InternalError(), "mname not resolved");
} else {
int flags = java_lang_invoke_MemberName::flags(mname);
@ -1191,7 +1191,7 @@ JVM_ENTRY(jobject, MHN_staticFieldBase(JNIEnv *env, jobject igcls, jobject mname
JVM_END
JVM_ENTRY(jobject, MHN_getMemberVMInfo(JNIEnv *env, jobject igcls, jobject mname_jh)) {
if (mname_jh == NULL) return NULL;
if (mname_jh == nullptr) return nullptr;
Handle mname(THREAD, JNIHandles::resolve_non_null(mname_jh));
intptr_t vmindex = java_lang_invoke_MemberName::vmindex(mname());
objArrayHandle result = oopFactory::new_objArray_handle(vmClasses::Object_klass(), 2, CHECK_NULL);
@ -1204,7 +1204,7 @@ JVM_ENTRY(jobject, MHN_getMemberVMInfo(JNIEnv *env, jobject igcls, jobject mname
x = java_lang_invoke_MemberName::clazz(mname());
} else {
Method* vmtarget = java_lang_invoke_MemberName::vmtarget(mname());
assert(vmtarget != NULL && vmtarget->is_method(), "vmtarget is only method");
assert(vmtarget != nullptr && vmtarget->is_method(), "vmtarget is only method");
x = mname();
}
result->obj_at_put(1, x);
@ -1242,12 +1242,12 @@ JVM_ENTRY(void, MHN_copyOutBootstrapArguments(JNIEnv* env, jobject igcls,
jobjectArray buf_jh, jint pos,
jboolean resolve, jobject ifna_jh)) {
Klass* caller_k = java_lang_Class::as_Klass(JNIHandles::resolve(caller_jh));
if (caller_k == NULL || !caller_k->is_instance_klass()) {
if (caller_k == nullptr || !caller_k->is_instance_klass()) {
THROW_MSG(vmSymbols::java_lang_InternalError(), "bad caller");
}
InstanceKlass* caller = InstanceKlass::cast(caller_k);
typeArrayOop index_info_oop = (typeArrayOop) JNIHandles::resolve(index_info_jh);
if (index_info_oop == NULL ||
if (index_info_oop == nullptr ||
index_info_oop->klass() != Universe::intArrayKlassObj() ||
typeArrayOop(index_info_oop)->length() < 2) {
THROW_MSG(vmSymbols::java_lang_InternalError(), "bad index info (0)");
@ -1266,7 +1266,7 @@ JVM_ENTRY(void, MHN_copyOutBootstrapArguments(JNIEnv* env, jobject igcls,
for (int pseudo_index = -4; pseudo_index < 0; pseudo_index++) {
if (start == pseudo_index) {
if (start >= end || 0 > pos || pos >= buf->length()) break;
oop pseudo_arg = NULL;
oop pseudo_arg = nullptr;
switch (pseudo_index) {
case -4: // bootstrap method
{
@ -1347,7 +1347,7 @@ JVM_END
*/
JVM_ENTRY(jobject, MH_invoke_UOE(JNIEnv* env, jobject mh, jobjectArray args)) {
THROW_MSG_NULL(vmSymbols::java_lang_UnsupportedOperationException(), "MethodHandle.invoke cannot be invoked reflectively");
return NULL;
return nullptr;
}
JVM_END
@ -1358,7 +1358,7 @@ JVM_END
*/
JVM_ENTRY(jobject, MH_invokeExact_UOE(JNIEnv* env, jobject mh, jobjectArray args)) {
THROW_MSG_NULL(vmSymbols::java_lang_UnsupportedOperationException(), "MethodHandle.invokeExact cannot be invoked reflectively");
return NULL;
return nullptr;
}
JVM_END
@ -1407,7 +1407,7 @@ static JNINativeMethod MH_methods[] = {
*/
JVM_ENTRY(void, JVM_RegisterMethodHandleMethods(JNIEnv *env, jclass MHN_class)) {
assert(!MethodHandles::enabled(), "must not be enabled");
assert(vmClasses::MethodHandle_klass() != NULL, "should be present");
assert(vmClasses::MethodHandle_klass() != nullptr, "should be present");
oop mirror = vmClasses::MethodHandle_klass()->java_mirror();
jclass MH_class = (jclass) JNIHandles::make_local(THREAD, mirror);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 1997, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1997, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -172,12 +172,12 @@ char* NativeLookup::pure_jni_name(const methodHandle& method) {
st.print("Java_");
// Klass name
if (!map_escaped_name_on(&st, method->klass_name())) {
return NULL;
return nullptr;
}
st.print("_");
// Method name
if (!map_escaped_name_on(&st, method->name())) {
return NULL;
return nullptr;
}
return st.as_string();
}
@ -192,7 +192,7 @@ char* NativeLookup::long_jni_name(const methodHandle& method) {
for (end = 0; end < signature->utf8_length() && signature->char_at(end) != JVM_SIGNATURE_ENDFUNC; end++);
// skip first '('
if (!map_escaped_name_on(&st, signature, 1, end)) {
return NULL;
return nullptr;
}
return st.as_string();
@ -217,34 +217,34 @@ extern "C" {
#define FN_PTR(f) CAST_FROM_FN_PTR(void*, &f)
static JNINativeMethod lookup_special_native_methods[] = {
{ CC"Java_jdk_internal_misc_Unsafe_registerNatives", NULL, FN_PTR(JVM_RegisterJDKInternalMiscUnsafeMethods) },
{ CC"Java_java_lang_invoke_MethodHandleNatives_registerNatives", NULL, FN_PTR(JVM_RegisterMethodHandleMethods) },
{ CC"Java_jdk_internal_foreign_abi_UpcallStubs_registerNatives", NULL, FN_PTR(JVM_RegisterUpcallHandlerMethods) },
{ CC"Java_jdk_internal_foreign_abi_UpcallLinker_registerNatives", NULL, FN_PTR(JVM_RegisterUpcallLinkerMethods) },
{ CC"Java_jdk_internal_foreign_abi_NativeEntryPoint_registerNatives", NULL, FN_PTR(JVM_RegisterNativeEntryPointMethods) },
{ CC"Java_jdk_internal_perf_Perf_registerNatives", NULL, FN_PTR(JVM_RegisterPerfMethods) },
{ CC"Java_sun_hotspot_WhiteBox_registerNatives", NULL, FN_PTR(JVM_RegisterWhiteBoxMethods) },
{ CC"Java_jdk_test_whitebox_WhiteBox_registerNatives", NULL, FN_PTR(JVM_RegisterWhiteBoxMethods) },
{ CC"Java_jdk_internal_vm_vector_VectorSupport_registerNatives", NULL, FN_PTR(JVM_RegisterVectorSupportMethods)},
{ CC"Java_jdk_internal_misc_Unsafe_registerNatives", nullptr, FN_PTR(JVM_RegisterJDKInternalMiscUnsafeMethods) },
{ CC"Java_java_lang_invoke_MethodHandleNatives_registerNatives", nullptr, FN_PTR(JVM_RegisterMethodHandleMethods) },
{ CC"Java_jdk_internal_foreign_abi_UpcallStubs_registerNatives", nullptr, FN_PTR(JVM_RegisterUpcallHandlerMethods) },
{ CC"Java_jdk_internal_foreign_abi_UpcallLinker_registerNatives", nullptr, FN_PTR(JVM_RegisterUpcallLinkerMethods) },
{ CC"Java_jdk_internal_foreign_abi_NativeEntryPoint_registerNatives", nullptr, FN_PTR(JVM_RegisterNativeEntryPointMethods) },
{ CC"Java_jdk_internal_perf_Perf_registerNatives", nullptr, FN_PTR(JVM_RegisterPerfMethods) },
{ CC"Java_sun_hotspot_WhiteBox_registerNatives", nullptr, FN_PTR(JVM_RegisterWhiteBoxMethods) },
{ CC"Java_jdk_test_whitebox_WhiteBox_registerNatives", nullptr, FN_PTR(JVM_RegisterWhiteBoxMethods) },
{ CC"Java_jdk_internal_vm_vector_VectorSupport_registerNatives", nullptr, FN_PTR(JVM_RegisterVectorSupportMethods)},
#if INCLUDE_JVMCI
{ CC"Java_jdk_vm_ci_runtime_JVMCI_initializeRuntime", NULL, FN_PTR(JVM_GetJVMCIRuntime) },
{ CC"Java_jdk_vm_ci_hotspot_CompilerToVM_registerNatives", NULL, FN_PTR(JVM_RegisterJVMCINatives) },
{ CC"Java_jdk_vm_ci_runtime_JVMCI_initializeRuntime", nullptr, FN_PTR(JVM_GetJVMCIRuntime) },
{ CC"Java_jdk_vm_ci_hotspot_CompilerToVM_registerNatives", nullptr, FN_PTR(JVM_RegisterJVMCINatives) },
#endif
#if INCLUDE_JFR
{ CC"Java_jdk_jfr_internal_JVM_registerNatives", NULL, FN_PTR(jfr_register_natives) },
{ CC"Java_jdk_jfr_internal_JVM_registerNatives", nullptr, FN_PTR(jfr_register_natives) },
#endif
{ CC"Java_jdk_internal_misc_ScopedMemoryAccess_registerNatives", NULL, FN_PTR(JVM_RegisterJDKInternalMiscScopedMemoryAccessMethods) },
{ CC"Java_jdk_internal_misc_ScopedMemoryAccess_registerNatives", nullptr, FN_PTR(JVM_RegisterJDKInternalMiscScopedMemoryAccessMethods) },
};
static address lookup_special_native(const char* jni_name) {
int count = sizeof(lookup_special_native_methods) / sizeof(JNINativeMethod);
for (int i = 0; i < count; i++) {
// NB: To ignore the jni prefix and jni postfix strstr is used matching.
if (strstr(jni_name, lookup_special_native_methods[i].name) != NULL) {
if (strstr(jni_name, lookup_special_native_methods[i].name) != nullptr) {
return CAST_FROM_FN_PTR(address, lookup_special_native_methods[i].fnPtr);
}
}
return NULL;
return nullptr;
}
address NativeLookup::lookup_style(const methodHandle& method, char* pure_name, const char* long_name, int args_size, bool os_style, TRAPS) {
@ -260,10 +260,10 @@ address NativeLookup::lookup_style(const methodHandle& method, char* pure_name,
Handle loader(THREAD, method->method_holder()->class_loader());
if (loader.is_null()) {
entry = lookup_special_native(jni_name);
if (entry == NULL) {
if (entry == nullptr) {
entry = (address) os::dll_lookup(os::native_java_library(), jni_name);
}
if (entry != NULL) {
if (entry != nullptr) {
return entry;
}
}
@ -283,12 +283,12 @@ address NativeLookup::lookup_style(const methodHandle& method, char* pure_name,
CHECK_NULL);
entry = (address) (intptr_t) result.get_jlong();
if (entry == NULL) {
if (entry == nullptr) {
// findNative didn't find it, if there are any agent libraries look in them
AgentLibrary* agent;
for (agent = Arguments::agents(); agent != NULL; agent = agent->next()) {
for (agent = Arguments::agents(); agent != nullptr; agent = agent->next()) {
entry = (address) os::dll_lookup(agent->os_lib(), jni_name);
if (entry != NULL) {
if (entry != nullptr) {
return entry;
}
}
@ -315,13 +315,13 @@ const char* NativeLookup::compute_complete_jni_name(const char* pure_name, const
// Check all the formats of native implementation name to see if there is one
// for the specified method.
address NativeLookup::lookup_entry(const methodHandle& method, TRAPS) {
address entry = NULL;
address entry = nullptr;
// Compute pure name
char* pure_name = pure_jni_name(method);
if (pure_name == NULL) {
if (pure_name == nullptr) {
// JNI name mapping rejected this method so return
// NULL to indicate UnsatisfiedLinkError should be thrown.
return NULL;
// null to indicate UnsatisfiedLinkError should be thrown.
return nullptr;
}
// Compute argument size
@ -331,28 +331,28 @@ address NativeLookup::lookup_entry(const methodHandle& method, TRAPS) {
// 1) Try JNI short style
entry = lookup_style(method, pure_name, "", args_size, true, CHECK_NULL);
if (entry != NULL) return entry;
if (entry != nullptr) return entry;
// Compute long name
char* long_name = long_jni_name(method);
if (long_name == NULL) {
if (long_name == nullptr) {
// JNI name mapping rejected this method so return
// NULL to indicate UnsatisfiedLinkError should be thrown.
return NULL;
// null to indicate UnsatisfiedLinkError should be thrown.
return nullptr;
}
// 2) Try JNI long style
entry = lookup_style(method, pure_name, long_name, args_size, true, CHECK_NULL);
if (entry != NULL) return entry;
if (entry != nullptr) return entry;
// 3) Try JNI short style without os prefix/suffix
entry = lookup_style(method, pure_name, "", args_size, false, CHECK_NULL);
if (entry != NULL) return entry;
if (entry != nullptr) return entry;
// 4) Try JNI long style without os prefix/suffix
entry = lookup_style(method, pure_name, long_name, args_size, false, CHECK_NULL);
return entry; // NULL indicates not found
return entry; // null indicates not found
}
// Check if there are any JVM TI prefixes which have been applied to the native method name.
@ -380,10 +380,10 @@ address NativeLookup::lookup_entry_prefixed(const methodHandle& method, TRAPS) {
// we have a name for a wrapping method
int wrapper_name_len = (int)strlen(wrapper_name);
TempNewSymbol wrapper_symbol = SymbolTable::probe(wrapper_name, wrapper_name_len);
if (wrapper_symbol != NULL) {
if (wrapper_symbol != nullptr) {
Klass* k = method->method_holder();
Method* wrapper_method = k->lookup_method(wrapper_symbol, method->signature());
if (wrapper_method != NULL && !wrapper_method->is_native()) {
if (wrapper_method != nullptr && !wrapper_method->is_native()) {
// we found a wrapper method, use its native entry
method->set_is_prefixed_native();
return lookup_entry(methodHandle(THREAD, wrapper_method), THREAD);
@ -391,20 +391,20 @@ address NativeLookup::lookup_entry_prefixed(const methodHandle& method, TRAPS) {
}
}
#endif // INCLUDE_JVMTI
return NULL;
return nullptr;
}
address NativeLookup::lookup_base(const methodHandle& method, TRAPS) {
address entry = NULL;
address entry = nullptr;
ResourceMark rm(THREAD);
entry = lookup_entry(method, CHECK_NULL);
if (entry != NULL) return entry;
if (entry != nullptr) return entry;
// standard native method resolution has failed. Check if there are any
// JVM TI prefixes which have been applied to the native method name.
entry = lookup_entry_prefixed(method, CHECK_NULL);
if (entry != NULL) return entry;
if (entry != nullptr) return entry;
// Native function not found, throw UnsatisfiedLinkError
stringStream ss;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2001, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2001, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -47,9 +47,9 @@
static char* jstr_to_utf(JNIEnv *env, jstring str, TRAPS) {
char* utfstr = NULL;
char* utfstr = nullptr;
if (str == NULL) {
if (str == nullptr) {
THROW_0(vmSymbols::java_lang_NullPointerException());
//throw_new(env,"NullPointerException");
}
@ -109,7 +109,7 @@ PERF_ENTRY(jobject, Perf_CreateLong(JNIEnv *env, jobject perf, jstring name,
PerfWrapper("Perf_CreateLong");
char* name_utf = NULL;
char* name_utf = nullptr;
if (units <= 0 || units > PerfData::U_Last) {
debug_only(warning("unexpected units argument, units = %d", units));
@ -124,7 +124,7 @@ PERF_ENTRY(jobject, Perf_CreateLong(JNIEnv *env, jobject perf, jstring name,
name_utf = jstr_to_utf(env, name, CHECK_NULL);
}
PerfLong* pl = NULL;
PerfLong* pl = nullptr;
// check that the PerfData name doesn't already exist
if (PerfDataManager::exists(name_utf)) {
@ -173,7 +173,7 @@ PERF_ENTRY(jobject, Perf_CreateByteArray(JNIEnv *env, jobject perf,
PerfWrapper("Perf_CreateByteArray");
// check for valid byte array objects
if (name == NULL || value == NULL) {
if (name == nullptr || value == nullptr) {
THROW_0(vmSymbols::java_lang_NullPointerException());
}
@ -192,8 +192,8 @@ PERF_ENTRY(jobject, Perf_CreateByteArray(JNIEnv *env, jobject perf,
}
int value_length;
char* name_utf = NULL;
jbyte* value_local = NULL;
char* name_utf = nullptr;
jbyte* value_local = nullptr;
ResourceMark rm;
@ -214,7 +214,7 @@ PERF_ENTRY(jobject, Perf_CreateByteArray(JNIEnv *env, jobject perf,
THROW_MSG_0(vmSymbols::java_lang_IllegalArgumentException(), "PerfByteArray name already exists");
}
PerfByteArray* pbv = NULL;
PerfByteArray* pbv = nullptr;
if (units == PerfData::U_String) {

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017, 2021, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2017, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -71,7 +71,7 @@ class ResolvedMethodTableConfig : public AllStatic {
static uintx get_hash(Value const& value, bool* is_dead) {
oop val_oop = value.peek();
if (val_oop == NULL) {
if (val_oop == nullptr) {
*is_dead = true;
return 0;
}
@ -92,7 +92,7 @@ class ResolvedMethodTableConfig : public AllStatic {
}
};
static ResolvedMethodTableHash* _local_table = NULL;
static ResolvedMethodTableHash* _local_table = nullptr;
static size_t _current_size = (size_t)1 << ResolvedMethodTableSizeLog;
volatile bool ResolvedMethodTable::_has_work = false;
@ -128,7 +128,7 @@ class ResolvedMethodTableLookup : StackObj {
}
bool equals(WeakHandle* value, bool* is_dead) {
oop val_oop = value->peek();
if (val_oop == NULL) {
if (val_oop == nullptr) {
// dead oop, mark this hash dead for cleaning
*is_dead = true;
return false;
@ -152,7 +152,7 @@ public:
ResolvedMethodGet(Thread* thread, const Method* method) : _thread(thread), _method(method) {}
void operator()(WeakHandle* val) {
oop result = val->resolve();
assert(result != NULL, "Result should be reachable");
assert(result != nullptr, "Result should be reachable");
_return = Handle(_thread, result);
log_get();
}
@ -305,7 +305,7 @@ struct ResolvedMethodTableDeleteCheck : StackObj {
bool operator()(WeakHandle* val) {
++_item;
oop tmp = val->peek();
if (tmp == NULL) {
if (tmp == nullptr) {
++_count;
return true;
} else {
@ -342,7 +342,7 @@ public:
AdjustMethodEntries(bool* trace_name_printed) : _trace_name_printed(trace_name_printed) {};
bool operator()(WeakHandle* entry) {
oop mem_name = entry->peek();
if (mem_name == NULL) {
if (mem_name == nullptr) {
// Removed
return true;
}
@ -384,7 +384,7 @@ class VerifyResolvedMethod : StackObj {
public:
bool operator()(WeakHandle* val) {
oop obj = val->peek();
if (obj != NULL) {
if (obj != nullptr) {
Method* method = (Method*)java_lang_invoke_ResolvedMethodName::vmtarget(obj);
guarantee(method->is_method(), "Must be");
guarantee(!method->is_old(), "Must be");

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -97,12 +97,12 @@ public:
}
ResourceMark rm;
if (_deopt != NULL && last_frame.is_compiled_frame() && last_frame.can_be_deoptimized()) {
if (_deopt != nullptr && last_frame.is_compiled_frame() && last_frame.can_be_deoptimized()) {
CloseScopedMemoryFindOopClosure cl(_deopt);
CompiledMethod* cm = last_frame.cb()->as_compiled_method();
/* FIXME: this doesn't work if reachability fences are violated by C2
last_frame.oops_do(&cl, NULL, &register_map);
last_frame.oops_do(&cl, nullptr, &register_map);
if (cl.found()) {
//Found the deopt oop in a compiled method; deoptimize.
Deoptimization::deoptimize(jt, last_frame);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -49,7 +49,7 @@
// setup and cleanup actions
BaseFrameStream::BaseFrameStream(JavaThread* thread, Handle continuation)
: _thread(thread), _continuation(continuation), _anchor(0L) {
assert(thread != NULL, "");
assert(thread != nullptr, "");
}
void BaseFrameStream::setup_magic_on_entry(objArrayHandle frames_array) {
@ -67,7 +67,7 @@ bool BaseFrameStream::check_magic(objArrayHandle frames_array) {
bool BaseFrameStream::cleanup_magic_on_exit(objArrayHandle frames_array) {
bool ok = check_magic(frames_array);
frames_array->obj_at_put(magic_pos, NULL);
frames_array->obj_at_put(magic_pos, nullptr);
_anchor = 0L;
return ok;
}
@ -97,7 +97,7 @@ LiveFrameStream::LiveFrameStream(JavaThread* thread, RegisterMap* rm, Handle con
_cont_entry = thread->last_continuation();
} else {
_jvf = Continuation::last_java_vframe(cont, rm);
_cont_entry = NULL;
_cont_entry = nullptr;
}
}
@ -108,13 +108,13 @@ void JavaFrameStream::next() {
}
void LiveFrameStream::next() {
assert(_cont_scope.is_null() || cont() != (oop)NULL, "must be");
assert(_cont_scope.is_null() || cont() != (oop)nullptr, "must be");
oop cont = this->cont();
if (cont != (oop)NULL && Continuation::is_continuation_entry_frame(_jvf->fr(), _jvf->register_map())) {
if (cont != (oop)nullptr && Continuation::is_continuation_entry_frame(_jvf->fr(), _jvf->register_map())) {
oop scope = jdk_internal_vm_Continuation::scope(cont);
if (_cont_scope.not_null() && scope == _cont_scope()) {
_jvf = NULL;
_jvf = nullptr;
return;
}
_cont_entry = _cont_entry->parent();
@ -136,10 +136,10 @@ BaseFrameStream* BaseFrameStream::from_current(JavaThread* thread, jlong magic,
objArrayHandle frames_array)
{
oop m1 = frames_array->obj_at(magic_pos);
if (m1 != thread->threadObj()) return NULL;
if (magic == 0L) return NULL;
if (m1 != thread->threadObj()) return nullptr;
if (magic == 0L) return nullptr;
BaseFrameStream* stream = (BaseFrameStream*) (intptr_t) magic;
if (!stream->is_valid_in(thread, frames_array)) return NULL;
if (!stream->is_valid_in(thread, frames_array)) return nullptr;
return stream;
}
@ -173,10 +173,10 @@ int StackWalk::fill_in_frames(jlong mode, BaseFrameStream& stream,
int frames_decoded = 0;
for (; !stream.at_end(); stream.next()) {
assert(stream.continuation() == NULL || stream.continuation() == stream.reg_map()->cont(), "");
assert(stream.continuation() == nullptr || stream.continuation() == stream.reg_map()->cont(), "");
Method* method = stream.method();
if (method == NULL) continue;
if (method == nullptr) continue;
// skip hidden frames for default StackWalker option (i.e. SHOW_HIDDEN_FRAMES
// not set) and when StackWalker::getCallerClass is called
@ -223,7 +223,7 @@ int StackWalk::fill_in_frames(jlong mode, BaseFrameStream& stream,
frames_decoded++;
// We end a batch on continuation bottom to let the Java side skip top frames of the next one
if (stream.continuation() != NULL && method->intrinsic_id() == vmIntrinsics::_Continuation_enter) break;
if (stream.continuation() != nullptr && method->intrinsic_id() == vmIntrinsics::_Continuation_enter) break;
if (frames_decoded >= max_nframes) break;
}
@ -262,7 +262,7 @@ oop LiveFrameStream::create_primitive_slot_instance(StackValueCollection* values
JavaValue result(T_OBJECT);
JavaCallArguments args;
Symbol* signature = NULL;
Symbol* signature = nullptr;
// ## TODO: type is only available in LocalVariable table, if present.
// ## StackValue type is T_INT or T_OBJECT (or converted to T_LONG on 64-bit)
@ -283,7 +283,7 @@ oop LiveFrameStream::create_primitive_slot_instance(StackValueCollection* values
case T_SHORT:
case T_CHAR:
case T_BOOLEAN:
THROW_MSG_(vmSymbols::java_lang_InternalError(), "Unexpected StackValue type", NULL);
THROW_MSG_(vmSymbols::java_lang_InternalError(), "Unexpected StackValue type", nullptr);
case T_OBJECT:
return values->obj_at(i)();
@ -329,7 +329,7 @@ objArrayHandle LiveFrameStream::values_to_object_array(StackValueCollection* val
}
#endif
oop obj = create_primitive_slot_instance(values, index, type, CHECK_(empty));
if (obj != NULL) {
if (obj != nullptr) {
array_h->obj_at_put(i, obj);
}
}
@ -357,7 +357,7 @@ void BaseFrameStream::fill_stackframe(Handle stackFrame, const methodHandle& met
void LiveFrameStream::fill_live_stackframe(Handle stackFrame,
const methodHandle& method, TRAPS) {
fill_stackframe(stackFrame, method, CHECK);
if (_jvf != NULL) {
if (_jvf != nullptr) {
ResourceMark rm(THREAD);
HandleMark hm(THREAD);
@ -424,7 +424,7 @@ oop StackWalk::walk(Handle stackStream, jlong mode, int skip_frames, Handle cont
}
if (frames_array.is_null()) {
THROW_MSG_(vmSymbols::java_lang_NullPointerException(), "frames_array is NULL", NULL);
THROW_MSG_(vmSymbols::java_lang_NullPointerException(), "frames_array is null", nullptr);
}
// Setup traversal onto my stack.
@ -492,7 +492,7 @@ oop StackWalk::fetchFirstBatch(BaseFrameStream& stream, Handle stackStream,
numFrames = fill_in_frames(mode, stream, frame_count, start_index,
frames_array, end_index, CHECK_NULL);
if (numFrames < 1) {
THROW_MSG_(vmSymbols::java_lang_InternalError(), "stack walk: decode failed", NULL);
THROW_MSG_(vmSymbols::java_lang_InternalError(), "stack walk: decode failed", nullptr);
}
}
@ -520,7 +520,7 @@ oop StackWalk::fetchFirstBatch(BaseFrameStream& stream, Handle stackStream,
(void) (CHECK_NULL);
if (!ok) {
THROW_MSG_(vmSymbols::java_lang_InternalError(), "doStackWalk: corrupted buffers on exit", NULL);
THROW_MSG_(vmSymbols::java_lang_InternalError(), "doStackWalk: corrupted buffers on exit", nullptr);
}
// Return normally
@ -546,12 +546,12 @@ jint StackWalk::fetchNextBatch(Handle stackStream, jlong mode, jlong magic,
{
JavaThread* jt = THREAD;
BaseFrameStream* existing_stream = BaseFrameStream::from_current(jt, magic, frames_array);
if (existing_stream == NULL) {
if (existing_stream == nullptr) {
THROW_MSG_(vmSymbols::java_lang_InternalError(), "doStackWalk: corrupted buffers", 0L);
}
if (frames_array.is_null()) {
THROW_MSG_(vmSymbols::java_lang_NullPointerException(), "frames_array is NULL", 0L);
THROW_MSG_(vmSymbols::java_lang_NullPointerException(), "frames_array is null", 0L);
}
log_debug(stackwalk)("StackWalk::fetchNextBatch frame_count %d existing_stream "
@ -589,11 +589,11 @@ void StackWalk::setContinuation(Handle stackStream, jlong magic, objArrayHandle
JavaThread* jt = JavaThread::cast(THREAD);
if (frames_array.is_null()) {
THROW_MSG(vmSymbols::java_lang_NullPointerException(), "frames_array is NULL");
THROW_MSG(vmSymbols::java_lang_NullPointerException(), "frames_array is null");
}
BaseFrameStream* existing_stream = BaseFrameStream::from_current(jt, magic, frames_array);
if (existing_stream == NULL) {
if (existing_stream == nullptr) {
THROW_MSG(vmSymbols::java_lang_InternalError(), "doStackWalk: corrupted buffers");
}

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2015, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -130,11 +130,11 @@ public:
const RegisterMap* reg_map() override { return _map; };
void next() override;
bool at_end() override { return _jvf == NULL; }
bool at_end() override { return _jvf == nullptr; }
Method* method() override { return _jvf->method(); }
int bci() override { return _jvf->bci(); }
oop cont() override { return continuation() != NULL ? continuation(): ContinuationEntry::cont_oop_or_null(_cont_entry, _map->thread()); }
oop cont() override { return continuation() != nullptr ? continuation(): ContinuationEntry::cont_oop_or_null(_cont_entry, _map->thread()); }
void fill_frame(int index, objArrayHandle frames_array,
const methodHandle& method, TRAPS) override;

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2000, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2000, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -116,7 +116,7 @@ static inline void assert_field_offset_sane(oop p, jlong field_offset) {
#ifdef ASSERT
jlong byte_offset = field_offset_to_byte_offset(field_offset);
if (p != NULL) {
if (p != nullptr) {
assert(byte_offset >= 0 && byte_offset <= (jlong)MAX_OBJECT_SIZE, "sane offset");
if (byte_offset == (jint)byte_offset) {
void* ptr_plus_disp = cast_from_oop<address>(p) + byte_offset;
@ -381,7 +381,7 @@ UNSAFE_ENTRY(void, Unsafe_CopyMemory0(JNIEnv *env, jobject unsafe, jobject srcOb
void* dst = index_oop_from_field_offset_long(dstp, dstOffset);
{
GuardUnsafeAccess guard(thread);
if (StubRoutines::unsafe_arraycopy() != NULL) {
if (StubRoutines::unsafe_arraycopy() != nullptr) {
MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, thread));
StubRoutines::UnsafeArrayCopy_stub()(src, dst, sz);
} else {
@ -398,7 +398,7 @@ UNSAFE_LEAF(void, Unsafe_CopySwapMemory0(JNIEnv *env, jobject unsafe, jobject sr
size_t sz = (size_t)size;
size_t esz = (size_t)elemSize;
if (srcObj == NULL && dstObj == NULL) {
if (srcObj == nullptr && dstObj == nullptr) {
// Both src & dst are in native memory
address src = (address)srcOffset;
address dst = (address)dstOffset;
@ -435,14 +435,14 @@ UNSAFE_LEAF (void, Unsafe_WriteBack0(JNIEnv *env, jobject unsafe, jlong line)) {
#endif
MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
assert(StubRoutines::data_cache_writeback() != NULL, "sanity");
assert(StubRoutines::data_cache_writeback() != nullptr, "sanity");
(StubRoutines::DataCacheWriteback_stub())(addr_from_java(line));
} UNSAFE_END
static void doWriteBackSync0(bool is_pre)
{
MACOS_AARCH64_ONLY(ThreadWXEnable wx(WXExec, Thread::current()));
assert(StubRoutines::data_cache_writeback_sync() != NULL, "sanity");
assert(StubRoutines::data_cache_writeback_sync() != nullptr, "sanity");
(StubRoutines::DataCacheWritebackSync_stub())(is_pre);
}
@ -471,8 +471,8 @@ UNSAFE_LEAF (void, Unsafe_WriteBackPostSync0(JNIEnv *env, jobject unsafe)) {
////// Random queries
static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
assert(clazz != NULL, "clazz must not be NULL");
assert(name != NULL, "name must not be NULL");
assert(clazz != nullptr, "clazz must not be null");
assert(name != nullptr, "name must not be null");
ResourceMark rm(THREAD);
char *utf_name = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(name));
@ -494,7 +494,7 @@ static jlong find_field_offset(jclass clazz, jstring name, TRAPS) {
}
static jlong find_field_offset(jobject field, int must_be_static, TRAPS) {
assert(field != NULL, "field must not be NULL");
assert(field != nullptr, "field must not be null");
oop reflected = JNIHandles::resolve_non_null(field);
oop mirror = java_lang_reflect_Field::clazz(reflected);
@ -526,14 +526,14 @@ UNSAFE_ENTRY(jlong, Unsafe_StaticFieldOffset0(JNIEnv *env, jobject unsafe, jobje
} UNSAFE_END
UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobject field)) {
assert(field != NULL, "field must not be NULL");
assert(field != nullptr, "field must not be null");
// Note: In this VM implementation, a field address is always a short
// offset from the base of a klass metaobject. Thus, the full dynamic
// range of the return type is never used. However, some implementations
// might put the static field inside an array shared by many classes,
// or even at a fixed address, in which case the address could be quite
// large. In that last case, this function would return NULL, since
// large. In that last case, this function would return null, since
// the address would operate alone, without any base pointer.
oop reflected = JNIHandles::resolve_non_null(field);
@ -548,12 +548,12 @@ UNSAFE_ENTRY(jobject, Unsafe_StaticFieldBase0(JNIEnv *env, jobject unsafe, jobje
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
assert(clazz != NULL, "clazz must not be NULL");
assert(clazz != nullptr, "clazz must not be null");
oop mirror = JNIHandles::resolve_non_null(clazz);
Klass* klass = java_lang_Class::as_Klass(mirror);
if (klass != NULL && klass->should_be_initialized()) {
if (klass != nullptr && klass->should_be_initialized()) {
InstanceKlass* k = InstanceKlass::cast(klass);
k->initialize(CHECK);
}
@ -561,12 +561,12 @@ UNSAFE_ENTRY(void, Unsafe_EnsureClassInitialized0(JNIEnv *env, jobject unsafe, j
UNSAFE_END
UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe, jobject clazz)) {
assert(clazz != NULL, "clazz must not be NULL");
assert(clazz != nullptr, "clazz must not be null");
oop mirror = JNIHandles::resolve_non_null(clazz);
Klass* klass = java_lang_Class::as_Klass(mirror);
if (klass != NULL && klass->should_be_initialized()) {
if (klass != nullptr && klass->should_be_initialized()) {
return true;
}
@ -575,12 +575,12 @@ UNSAFE_ENTRY(jboolean, Unsafe_ShouldBeInitialized0(JNIEnv *env, jobject unsafe,
UNSAFE_END
static void getBaseAndScale(int& base, int& scale, jclass clazz, TRAPS) {
assert(clazz != NULL, "clazz must not be NULL");
assert(clazz != nullptr, "clazz must not be null");
oop mirror = JNIHandles::resolve_non_null(clazz);
Klass* k = java_lang_Class::as_Klass(mirror);
if (k == NULL || !k->is_array_klass()) {
if (k == nullptr || !k->is_array_klass()) {
THROW(vmSymbols::java_lang_InvalidClassException());
} else if (k->is_objArray_klass()) {
base = arrayOopDesc::base_offset_in_bytes(T_OBJECT);
@ -633,18 +633,18 @@ static inline void throw_new(JNIEnv *env, const char *ename) {
return;
}
env->ThrowNew(cls, NULL);
env->ThrowNew(cls, nullptr);
}
static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data, int offset, int length, jobject loader, jobject pd) {
// Code lifted from JDK 1.3 ClassLoader.c
jbyte *body;
char *utfName = NULL;
char *utfName = nullptr;
jclass result = 0;
char buf[128];
assert(data != NULL, "Class bytes must not be NULL");
assert(data != nullptr, "Class bytes must not be null");
assert(length >= 0, "length must not be negative: %d", length);
if (UsePerfData) {
@ -652,7 +652,7 @@ static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data
}
body = NEW_C_HEAP_ARRAY_RETURN_NULL(jbyte, length, mtInternal);
if (body == NULL) {
if (body == nullptr) {
throw_new(env, "java/lang/OutOfMemoryError");
return 0;
}
@ -662,13 +662,13 @@ static jclass Unsafe_DefineClass_impl(JNIEnv *env, jstring name, jbyteArray data
goto free_body;
}
if (name != NULL) {
if (name != nullptr) {
uint len = env->GetStringUTFLength(name);
int unicode_len = env->GetStringLength(name);
if (len >= sizeof(buf)) {
utfName = NEW_C_HEAP_ARRAY_RETURN_NULL(char, len + 1, mtInternal);
if (utfName == NULL) {
if (utfName == nullptr) {
throw_new(env, "java/lang/OutOfMemoryError");
goto free_body;
}
@ -752,11 +752,11 @@ UNSAFE_ENTRY(jboolean, Unsafe_CompareAndSetLong(JNIEnv *env, jobject unsafe, job
} UNSAFE_END
static void post_thread_park_event(EventThreadPark* event, const oop obj, jlong timeout_nanos, jlong until_epoch_millis) {
assert(event != NULL, "invariant");
event->set_parkedClass((obj != NULL) ? obj->klass() : NULL);
assert(event != nullptr, "invariant");
event->set_parkedClass((obj != nullptr) ? obj->klass() : nullptr);
event->set_timeout(timeout_nanos);
event->set_until(until_epoch_millis);
event->set_address((obj != NULL) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
event->set_address((obj != nullptr) ? (u8)cast_from_oop<uintptr_t>(obj) : 0);
event->commit();
}
@ -782,14 +782,14 @@ UNSAFE_ENTRY(void, Unsafe_Park(JNIEnv *env, jobject unsafe, jboolean isAbsolute,
} UNSAFE_END
UNSAFE_ENTRY(void, Unsafe_Unpark(JNIEnv *env, jobject unsafe, jobject jthread)) {
if (jthread != NULL) {
if (jthread != nullptr) {
ThreadsListHandle tlh;
JavaThread* thr = NULL;
oop java_thread = NULL;
JavaThread* thr = nullptr;
oop java_thread = nullptr;
(void) tlh.cv_internal_thread_to_JavaThread(jthread, &thr, &java_thread);
if (java_thread != NULL) {
if (java_thread != nullptr) {
// This is a valid oop.
if (thr != NULL) {
if (thr != nullptr) {
// The JavaThread is alive.
Parker* p = thr->parker();
HOTSPOT_THREAD_UNPARK((uintptr_t) p);

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -49,7 +49,7 @@ struct UpcallContext {
UpcallContext() {} // Explicit constructor to address XL C compiler bug.
~UpcallContext() {
if (attachedThread != NULL) {
if (attachedThread != nullptr) {
JavaVM_ *vm = (JavaVM *)(&main_vm);
vm->functions->DetachCurrentThread(vm);
}
@ -97,7 +97,7 @@ JavaThread* UpcallLinker::on_entry(UpcallStub::FrameData* context) {
context->old_handles = thread->active_handles();
// For the profiler, the last_Java_frame information in thread must always be in
// legal state. We have no last Java frame if last_Java_sp == NULL so
// legal state. We have no last Java frame if last_Java_sp == nullptr so
// the valid transition is to clear _last_Java_sp and then reset the rest of
// the (platform specific) state.

View File

@ -1,5 +1,5 @@
/*
* Copyright (c) 2020, 2022, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2020, 2023, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@ -83,7 +83,7 @@ BasicType VectorSupport::klass2bt(InstanceKlass* ik) {
// static final Class<?> ETYPE;
Klass* holder = ik->find_field(vmSymbols::ETYPE_name(), vmSymbols::class_signature(), &fd);
assert(holder != NULL, "sanity");
assert(holder != nullptr, "sanity");
assert(fd.is_static(), "");
assert(fd.offset() > 0, "");
@ -103,7 +103,7 @@ jint VectorSupport::klass2length(InstanceKlass* ik) {
// static final int VLENGTH;
Klass* holder = ik->find_field(vmSymbols::VLENGTH_name(), vmSymbols::int_signature(), &fd);
assert(holder != NULL, "sanity");
assert(holder != nullptr, "sanity");
assert(fd.is_static(), "");
assert(fd.offset() > 0, "");

View File

@ -190,7 +190,7 @@ public:
// NOTE: This will not work correctly if the original hidden class
// name contains a '+'.
char* plus_char = strchr(k_name, '+');
if (plus_char != NULL) {
if (plus_char != nullptr) {
*plus_char = 0;
char* c_name = _name->as_C_string();
if (strcmp(c_name, k_name) == 0) {
@ -208,7 +208,7 @@ public:
WB_ENTRY(jint, WB_CountAliveClasses(JNIEnv* env, jobject target, jstring name))
oop h_name = JNIHandles::resolve(name);
if (h_name == NULL) {
if (h_name == nullptr) {
return 0;
}
Symbol* sym = java_lang_String::as_symbol(h_name);
@ -223,7 +223,7 @@ WB_END
WB_ENTRY(jint, WB_GetSymbolRefcount(JNIEnv* env, jobject unused, jstring name))
oop h_name = JNIHandles::resolve(name);
if (h_name == NULL) {
if (h_name == nullptr) {
return 0;
}
Symbol* sym = java_lang_String::as_symbol(h_name);
@ -278,8 +278,8 @@ WB_ENTRY(void, WB_ReadFromNoaccessArea(JNIEnv* env, jobject o))
vs.initialize(rhs, 50 * granularity);
// Check if constraints are complied
if (!( UseCompressedOops && rhs.base() != NULL &&
CompressedOops::base() != NULL &&
if (!( UseCompressedOops && rhs.base() != nullptr &&
CompressedOops::base() != nullptr &&
CompressedOops::use_implicit_null_checks() )) {
tty->print_cr("WB_ReadFromNoaccessArea method is useless:\n "
"\tUseCompressedOops is %d\n"
@ -720,7 +720,7 @@ WB_ENTRY(void, WB_NMTArenaMalloc(JNIEnv* env, jobject o, jlong arena, jlong size
WB_END
static jmethodID reflected_method_to_jmid(JavaThread* thread, JNIEnv* env, jobject method) {
assert(method != NULL, "method should not be null");
assert(method != nullptr, "method should not be null");
ThreadToNativeFromVM ttn(thread);
return env->FromReflectedMethod(method);
}
@ -744,7 +744,7 @@ class VM_WhiteBoxDeoptimizeFrames : public VM_WhiteBoxOperation {
Deoptimization::deoptimize(t, *f);
if (_make_not_entrant) {
CompiledMethod* cm = CodeCache::find_compiled(f->pc());
assert(cm != NULL, "sanity check");
assert(cm != nullptr, "sanity check");
cm->make_not_entrant();
}
++_result;
@ -769,10 +769,10 @@ WB_ENTRY(jboolean, WB_IsFrameDeoptimized(JNIEnv* env, jobject o, jint depth))
RegisterMap::ProcessFrames::include,
RegisterMap::WalkContinuation::skip);
javaVFrame *jvf = thread->last_java_vframe(&reg_map);
for (jint d = 0; d < depth && jvf != NULL; d++) {
for (jint d = 0; d < depth && jvf != nullptr; d++) {
jvf = jvf->java_sender();
}
result = jvf != NULL && jvf->fr().is_deoptimized_frame();
result = jvf != nullptr && jvf->fr().is_deoptimized_frame();
}
return result;
WB_END
@ -790,7 +790,7 @@ WB_ENTRY(jint, WB_DeoptimizeMethod(JNIEnv* env, jobject o, jobject method, jbool
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
if (is_osr) {
result += mh->mark_osr_nmethods();
} else if (mh->code() != NULL) {
} else if (mh->code() != nullptr) {
mh->code()->mark_for_deoptimization();
++result;
}
@ -807,14 +807,14 @@ WB_ENTRY(jboolean, WB_IsMethodCompiled(JNIEnv* env, jobject o, jobject method, j
MutexLocker mu(Compile_lock);
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
CompiledMethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code();
if (code == NULL) {
if (code == nullptr) {
return JNI_FALSE;
}
return !code->is_marked_for_deoptimization();
WB_END
static bool is_excluded_for_compiler(AbstractCompiler* comp, methodHandle& mh) {
if (comp == NULL) {
if (comp == nullptr) {
return true;
}
DirectiveSet* directive = DirectivesStack::getMatchingDirective(mh, comp);
@ -833,7 +833,7 @@ static bool can_be_compiled_at_level(methodHandle& mh, jboolean is_osr, int leve
}
WB_ENTRY(jboolean, WB_IsMethodCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr))
if (method == NULL || comp_level > CompilationPolicy::highest_compile_level()) {
if (method == nullptr || comp_level > CompilationPolicy::highest_compile_level()) {
return false;
}
jmethodID jmid = reflected_method_to_jmid(thread, env, method);
@ -887,14 +887,14 @@ WB_ENTRY(jboolean, WB_IsIntrinsicAvailable(JNIEnv* env, jobject o, jobject metho
DirectiveSet* directive;
AbstractCompiler* comp = CompileBroker::compiler((int)compLevel);
assert(comp != NULL, "compiler not available");
if (compilation_context != NULL) {
assert(comp != nullptr, "compiler not available");
if (compilation_context != nullptr) {
compilation_context_id = reflected_method_to_jmid(thread, env, compilation_context);
CHECK_JNI_EXCEPTION_(env, JNI_FALSE);
methodHandle cch(THREAD, Method::checked_resolve_jmethod_id(compilation_context_id));
directive = DirectivesStack::getMatchingDirective(cch, comp);
} else {
// Calling with NULL matches default directive
// Calling with null matches default directive
directive = DirectivesStack::getDefaultDirective(comp);
}
bool result = comp->is_intrinsic_available(mh, directive);
@ -907,7 +907,7 @@ WB_ENTRY(jint, WB_GetMethodCompilationLevel(JNIEnv* env, jobject o, jobject meth
CHECK_JNI_EXCEPTION_(env, CompLevel_none);
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
CompiledMethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code();
return (code != NULL ? code->comp_level() : CompLevel_none);
return (code != nullptr ? code->comp_level() : CompLevel_none);
WB_END
WB_ENTRY(void, WB_MakeMethodNotCompilable(JNIEnv* env, jobject o, jobject method, jint comp_level, jboolean is_osr))
@ -927,7 +927,7 @@ WB_ENTRY(jint, WB_GetMethodDecompileCount(JNIEnv* env, jobject o, jobject method
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
uint cnt = 0;
MethodData* mdo = mh->method_data();
if (mdo != NULL) {
if (mdo != nullptr) {
cnt = mdo->decompile_count();
}
return cnt;
@ -935,7 +935,7 @@ WB_END
// Get the trap count of a method for a specific reason. If the trap count for
// that reason did overflow, this includes the overflow trap count of the method.
// If 'reason' is NULL, the sum of the traps for all reasons will be returned.
// If 'reason' is null, the sum of the traps for all reasons will be returned.
// This number includes the overflow trap count if the trap count for any reason
// did overflow.
WB_ENTRY(jint, WB_GetMethodTrapCount(JNIEnv* env, jobject o, jobject method, jstring reason_obj))
@ -944,20 +944,20 @@ WB_ENTRY(jint, WB_GetMethodTrapCount(JNIEnv* env, jobject o, jobject method, jst
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
uint cnt = 0;
MethodData* mdo = mh->method_data();
if (mdo != NULL) {
if (mdo != nullptr) {
ResourceMark rm(THREAD);
char* reason_str = (reason_obj == NULL) ?
NULL : java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(reason_obj));
char* reason_str = (reason_obj == nullptr) ?
nullptr : java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(reason_obj));
bool overflow = false;
for (uint reason = 0; reason < mdo->trap_reason_limit(); reason++) {
if (reason_str != NULL && !strcmp(reason_str, Deoptimization::trap_reason_name(reason))) {
if (reason_str != nullptr && !strcmp(reason_str, Deoptimization::trap_reason_name(reason))) {
cnt = mdo->trap_count(reason);
// Count in the overflow trap count on overflow
if (cnt == (uint)-1) {
cnt = mdo->trap_count_limit() + mdo->overflow_trap_count();
}
break;
} else if (reason_str == NULL) {
} else if (reason_str == nullptr) {
uint c = mdo->trap_count(reason);
if (c == (uint)-1) {
c = mdo->trap_count_limit();
@ -975,14 +975,14 @@ WB_ENTRY(jint, WB_GetMethodTrapCount(JNIEnv* env, jobject o, jobject method, jst
WB_END
WB_ENTRY(jint, WB_GetDeoptCount(JNIEnv* env, jobject o, jstring reason_obj, jstring action_obj))
if (reason_obj == NULL && action_obj == NULL) {
if (reason_obj == nullptr && action_obj == nullptr) {
return Deoptimization::total_deoptimization_count();
}
ResourceMark rm(THREAD);
const char *reason_str = (reason_obj == NULL) ?
NULL : java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(reason_obj));
const char *action_str = (action_obj == NULL) ?
NULL : java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(action_obj));
const char *reason_str = (reason_obj == nullptr) ?
nullptr : java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(reason_obj));
const char *action_str = (action_obj == nullptr) ?
nullptr : java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(action_obj));
return Deoptimization::deoptimization_count(reason_str, action_str);
WB_END
@ -992,7 +992,7 @@ WB_ENTRY(jint, WB_GetMethodEntryBci(JNIEnv* env, jobject o, jobject method))
CHECK_JNI_EXCEPTION_(env, InvocationEntryBci);
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
CompiledMethod* code = mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false);
return (code != NULL && code->is_osr_method() ? code->osr_entry_bci() : InvocationEntryBci);
return (code != nullptr && code->is_osr_method() ? code->osr_entry_bci() : InvocationEntryBci);
WB_END
WB_ENTRY(jboolean, WB_TestSetDontInlineMethod(JNIEnv* env, jobject o, jobject method, jboolean value))
@ -1037,15 +1037,15 @@ bool WhiteBox::validate_cgroup(const char* proc_cgroups,
bool WhiteBox::compile_method(Method* method, int comp_level, int bci, JavaThread* THREAD) {
// Screen for unavailable/bad comp level or null method
AbstractCompiler* comp = CompileBroker::compiler(comp_level);
if (method == NULL) {
tty->print_cr("WB error: request to compile NULL method");
if (method == nullptr) {
tty->print_cr("WB error: request to compile null method");
return false;
}
if (comp_level > CompilationPolicy::highest_compile_level()) {
tty->print_cr("WB error: invalid compilation level %d", comp_level);
return false;
}
if (comp == NULL) {
if (comp == nullptr) {
tty->print_cr("WB error: no compiler for requested compilation level %d", comp_level);
return false;
}
@ -1060,16 +1060,16 @@ bool WhiteBox::compile_method(Method* method, int comp_level, int bci, JavaThrea
nmethod* nm = CompileBroker::compile_method(mh, bci, comp_level, mh, mh->invocation_count(), CompileTask::Reason_Whitebox, CHECK_false);
MutexLocker mu(THREAD, Compile_lock);
bool is_queued = mh->queued_for_compilation();
if ((!is_blocking && is_queued) || nm != NULL) {
if ((!is_blocking && is_queued) || nm != nullptr) {
return true;
}
// Check code again because compilation may be finished before Compile_lock is acquired.
if (bci == InvocationEntryBci) {
CompiledMethod* code = mh->code();
if (code != NULL && code->as_nmethod_or_null() != NULL) {
if (code != nullptr && code->as_nmethod_or_null() != nullptr) {
return true;
}
} else if (mh->lookup_osr_nmethod_for(bci, comp_level, false) != NULL) {
} else if (mh->lookup_osr_nmethod_for(bci, comp_level, false) != nullptr) {
return true;
}
tty->print("WB error: failed to %s compile at level %d method ", is_blocking ? "blocking" : "", comp_level);
@ -1090,7 +1090,7 @@ WB_END
WB_ENTRY(jboolean, WB_EnqueueInitializerForCompilation(JNIEnv* env, jobject o, jclass klass, jint comp_level))
InstanceKlass* ik = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve(klass)));
Method* clinit = ik->class_initializer();
if (clinit == NULL || clinit->method_holder()->is_not_initialized()) {
if (clinit == nullptr || clinit->method_holder()->is_not_initialized()) {
return false;
}
return WhiteBox::compile_method(clinit, comp_level, InvocationEntryBci, THREAD);
@ -1115,12 +1115,12 @@ WB_ENTRY(jint, WB_MatchesInline(JNIEnv* env, jobject o, jobject method, jstring
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
ResourceMark rm(THREAD);
const char* error_msg = NULL;
const char* error_msg = nullptr;
char* method_str = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(pattern));
InlineMatcher* m = InlineMatcher::parse_inline_pattern(method_str, error_msg);
if (m == NULL) {
assert(error_msg != NULL, "Always have an error message");
if (m == nullptr) {
assert(error_msg != nullptr, "Always have an error message");
tty->print_cr("Got error: %s", error_msg);
return -1; // Pattern failed
}
@ -1147,11 +1147,11 @@ WB_ENTRY(jint, WB_MatchesMethod(JNIEnv* env, jobject o, jobject method, jstring
ResourceMark rm;
char* method_str = java_lang_String::as_utf8_string(JNIHandles::resolve_non_null(pattern));
const char* error_msg = NULL;
const char* error_msg = nullptr;
BasicMatcher* m = BasicMatcher::parse_method_pattern(method_str, error_msg, false);
if (m == NULL) {
assert(error_msg != NULL, "Must have error_msg");
if (m == nullptr) {
assert(error_msg != nullptr, "Must have error_msg");
tty->print_cr("Got error: %s", error_msg);
return -1;
}
@ -1169,7 +1169,7 @@ WB_ENTRY(void, WB_MarkMethodProfiled(JNIEnv* env, jobject o, jobject method))
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
MethodData* mdo = mh->method_data();
if (mdo == NULL) {
if (mdo == nullptr) {
Method::build_profiling_method_data(mh, CHECK_AND_CLEAR);
mdo = mh->method_data();
}
@ -1189,7 +1189,7 @@ WB_ENTRY(void, WB_ClearMethodState(JNIEnv* env, jobject o, jobject method))
MethodData* mdo = mh->method_data();
MethodCounters* mcs = mh->method_counters();
if (mdo != NULL) {
if (mdo != nullptr) {
mdo->init();
ResourceMark rm(THREAD);
int arg_count = mdo->method()->size_of_parameters();
@ -1204,18 +1204,18 @@ WB_ENTRY(void, WB_ClearMethodState(JNIEnv* env, jobject o, jobject method))
mh->clear_not_c2_compilable();
mh->clear_not_c2_osr_compilable();
NOT_PRODUCT(mh->set_compiled_invocation_count(0));
if (mcs != NULL) {
if (mcs != nullptr) {
mcs->clear_counters();
}
WB_END
template <typename T, int type_enum>
static bool GetVMFlag(JavaThread* thread, JNIEnv* env, jstring name, T* value) {
if (name == NULL) {
if (name == nullptr) {
return false;
}
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
const char* flag_name = env->GetStringUTFChars(name, NULL);
const char* flag_name = env->GetStringUTFChars(name, nullptr);
CHECK_JNI_EXCEPTION_(env, false);
const JVMFlag* flag = JVMFlag::find_declared_flag(flag_name);
JVMFlag::Error result = JVMFlagAccess::get<T, type_enum>(flag, value);
@ -1225,11 +1225,11 @@ static bool GetVMFlag(JavaThread* thread, JNIEnv* env, jstring name, T* value) {
template <typename T, int type_enum>
static bool SetVMFlag(JavaThread* thread, JNIEnv* env, jstring name, T* value) {
if (name == NULL) {
if (name == nullptr) {
return false;
}
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
const char* flag_name = env->GetStringUTFChars(name, NULL);
const char* flag_name = env->GetStringUTFChars(name, nullptr);
CHECK_JNI_EXCEPTION_(env, false);
JVMFlag* flag = JVMFlag::find_flag(flag_name);
JVMFlag::Error result = JVMFlagAccess::set<T, type_enum>(flag, value, JVMFlagOrigin::INTERNAL);
@ -1241,13 +1241,13 @@ template <typename T>
static jobject box(JavaThread* thread, JNIEnv* env, Symbol* name, Symbol* sig, T value) {
ResourceMark rm(thread);
jclass clazz = env->FindClass(name->as_C_string());
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
jmethodID methodID = env->GetStaticMethodID(clazz,
vmSymbols::valueOf_name()->as_C_string(),
sig->as_C_string());
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
jobject result = env->CallStaticObjectMethod(clazz, methodID, value);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
return result;
}
@ -1269,8 +1269,8 @@ static jobject doubleBox(JavaThread* thread, JNIEnv* env, jdouble value) {
static const JVMFlag* getVMFlag(JavaThread* thread, JNIEnv* env, jstring name) {
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
const char* flag_name = env->GetStringUTFChars(name, NULL);
CHECK_JNI_EXCEPTION_(env, NULL);
const char* flag_name = env->GetStringUTFChars(name, nullptr);
CHECK_JNI_EXCEPTION_(env, nullptr);
const JVMFlag* result = JVMFlag::find_declared_flag(flag_name);
env->ReleaseStringUTFChars(name, flag_name);
return result;
@ -1278,12 +1278,12 @@ static const JVMFlag* getVMFlag(JavaThread* thread, JNIEnv* env, jstring name) {
WB_ENTRY(jboolean, WB_IsConstantVMFlag(JNIEnv* env, jobject o, jstring name))
const JVMFlag* flag = getVMFlag(thread, env, name);
return (flag != NULL) && flag->is_constant_in_binary();
return (flag != nullptr) && flag->is_constant_in_binary();
WB_END
WB_ENTRY(jboolean, WB_IsLockedVMFlag(JNIEnv* env, jobject o, jstring name))
const JVMFlag* flag = getVMFlag(thread, env, name);
return (flag != NULL) && !(flag->is_unlocked() || flag->is_unlocker());
return (flag != nullptr) && !(flag->is_unlocked() || flag->is_unlocker());
WB_END
WB_ENTRY(jobject, WB_GetBooleanVMFlag(JNIEnv* env, jobject o, jstring name))
@ -1292,7 +1292,7 @@ WB_ENTRY(jobject, WB_GetBooleanVMFlag(JNIEnv* env, jobject o, jstring name))
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
return booleanBox(thread, env, result);
}
return NULL;
return nullptr;
WB_END
template <typename T, int type_enum>
@ -1302,7 +1302,7 @@ jobject GetVMFlag_longBox(JNIEnv* env, JavaThread* thread, jstring name) {
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
return longBox(thread, env, result);
}
return NULL;
return nullptr;
}
WB_ENTRY(jobject, WB_GetIntVMFlag(JNIEnv* env, jobject o, jstring name))
@ -1335,7 +1335,7 @@ WB_ENTRY(jobject, WB_GetDoubleVMFlag(JNIEnv* env, jobject o, jstring name))
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
return doubleBox(thread, env, result);
}
return NULL;
return nullptr;
WB_END
WB_ENTRY(jstring, WB_GetStringVMFlag(JNIEnv* env, jobject o, jstring name))
@ -1343,10 +1343,10 @@ WB_ENTRY(jstring, WB_GetStringVMFlag(JNIEnv* env, jobject o, jstring name))
if (GetVMFlag <JVM_FLAG_TYPE(ccstr)> (thread, env, name, &ccstrResult)) {
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
jstring result = env->NewStringUTF(ccstrResult);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
return result;
}
return NULL;
return nullptr;
WB_END
WB_ENTRY(void, WB_SetBooleanVMFlag(JNIEnv* env, jobject o, jstring name, jboolean value))
@ -1392,21 +1392,21 @@ WB_END
WB_ENTRY(void, WB_SetStringVMFlag(JNIEnv* env, jobject o, jstring name, jstring value))
ThreadToNativeFromVM ttnfv(thread); // can't be in VM when we call JNI
const char* ccstrValue;
if (value == NULL) {
ccstrValue = NULL;
if (value == nullptr) {
ccstrValue = nullptr;
}
else {
ccstrValue = env->GetStringUTFChars(value, NULL);
ccstrValue = env->GetStringUTFChars(value, nullptr);
CHECK_JNI_EXCEPTION(env);
}
{
ccstr param = ccstrValue;
ThreadInVMfromNative ttvfn(thread); // back to VM
if (SetVMFlag <JVM_FLAG_TYPE(ccstr)> (thread, env, name, &param)) {
assert(param == NULL, "old value is freed automatically and not returned");
assert(param == nullptr, "old value is freed automatically and not returned");
}
}
if (value != NULL) {
if (value != nullptr) {
env->ReleaseStringUTFChars(value, ccstrValue);
}
WB_END
@ -1425,7 +1425,7 @@ WB_ENTRY(jboolean, WB_IsInStringTable(JNIEnv* env, jobject o, jstring javaString
ResourceMark rm(THREAD);
int len;
jchar* name = java_lang_String::as_unicode_string(JNIHandles::resolve(javaString), len, CHECK_false);
return (StringTable::lookup(name, len) != NULL);
return (StringTable::lookup(name, len) != nullptr);
WB_END
WB_ENTRY(void, WB_FullGC(JNIEnv* env, jobject o))
@ -1450,7 +1450,7 @@ WB_ENTRY(void, WB_ReadReservedMemory(JNIEnv* env, jobject o))
static volatile char* p;
p = os::reserve_memory(os::vm_allocation_granularity());
if (p == NULL) {
if (p == nullptr) {
THROW_MSG(vmSymbols::java_lang_OutOfMemoryError(), "Failed to reserve memory");
}
@ -1462,7 +1462,7 @@ WB_ENTRY(jstring, WB_GetCPUFeatures(JNIEnv* env, jobject o))
ThreadToNativeFromVM ttn(thread);
jstring features_string = env->NewStringUTF(features);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
return features_string;
WB_END
@ -1493,23 +1493,23 @@ struct CodeBlobStub {
static jobjectArray codeBlob2objectArray(JavaThread* thread, JNIEnv* env, CodeBlobStub* cb) {
ResourceMark rm;
jclass clazz = env->FindClass(vmSymbols::java_lang_Object()->as_C_string());
CHECK_JNI_EXCEPTION_(env, NULL);
jobjectArray result = env->NewObjectArray(4, clazz, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
jobjectArray result = env->NewObjectArray(4, clazz, nullptr);
jstring name = env->NewStringUTF(cb->name);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 0, name);
jobject obj = integerBox(thread, env, cb->size);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 1, obj);
obj = integerBox(thread, env, cb->blob_type);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 2, obj);
obj = longBox(thread, env, cb->address);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 3, obj);
return result;
@ -1518,11 +1518,11 @@ static jobjectArray codeBlob2objectArray(JavaThread* thread, JNIEnv* env, CodeBl
WB_ENTRY(jobjectArray, WB_GetNMethod(JNIEnv* env, jobject o, jobject method, jboolean is_osr))
ResourceMark rm(THREAD);
jmethodID jmid = reflected_method_to_jmid(thread, env, method);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
methodHandle mh(THREAD, Method::checked_resolve_jmethod_id(jmid));
CompiledMethod* code = is_osr ? mh->lookup_osr_nmethod_for(InvocationEntryBci, CompLevel_none, false) : mh->code();
jobjectArray result = NULL;
if (code == NULL) {
jobjectArray result = nullptr;
if (code == nullptr) {
return result;
}
int comp_level = code->comp_level();
@ -1530,32 +1530,32 @@ WB_ENTRY(jobjectArray, WB_GetNMethod(JNIEnv* env, jobject o, jobject method, jbo
ThreadToNativeFromVM ttn(thread);
jclass clazz = env->FindClass(vmSymbols::java_lang_Object()->as_C_string());
CHECK_JNI_EXCEPTION_(env, NULL);
result = env->NewObjectArray(5, clazz, NULL);
if (result == NULL) {
CHECK_JNI_EXCEPTION_(env, nullptr);
result = env->NewObjectArray(5, clazz, nullptr);
if (result == nullptr) {
return result;
}
CodeBlobStub stub(code);
jobjectArray codeBlob = codeBlob2objectArray(thread, env, &stub);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 0, codeBlob);
jobject level = integerBox(thread, env, comp_level);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 1, level);
jbyteArray insts = env->NewByteArray(insts_size);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetByteArrayRegion(insts, 0, insts_size, (jbyte*) code->insts_begin());
env->SetObjectArrayElement(result, 2, insts);
jobject id = integerBox(thread, env, code->compile_id());
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 3, id);
jobject entry_point = longBox(thread, env, (jlong) code->entry_point());
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, 4, entry_point);
return result;
@ -1571,7 +1571,7 @@ CodeBlob* WhiteBox::allocate_code_blob(int size, CodeBlobType blob_type) {
{
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
blob = (BufferBlob*) CodeCache::allocate(full_size, blob_type);
if (blob != NULL) {
if (blob != nullptr) {
::new (blob) BufferBlob("WB::DummyBlob", full_size);
}
}
@ -1601,32 +1601,32 @@ WB_ENTRY(jobjectArray, WB_GetCodeHeapEntries(JNIEnv* env, jobject o, jint blob_t
{
MutexLocker mu(CodeCache_lock, Mutex::_no_safepoint_check_flag);
CodeHeap* heap = WhiteBox::get_code_heap(static_cast<CodeBlobType>(blob_type));
if (heap == NULL) {
return NULL;
if (heap == nullptr) {
return nullptr;
}
for (CodeBlob* cb = (CodeBlob*) heap->first();
cb != NULL; cb = (CodeBlob*) heap->next(cb)) {
cb != nullptr; cb = (CodeBlob*) heap->next(cb)) {
CodeBlobStub* stub = NEW_RESOURCE_OBJ(CodeBlobStub);
new (stub) CodeBlobStub(cb);
blobs.append(stub);
}
}
ThreadToNativeFromVM ttn(thread);
jobjectArray result = NULL;
jobjectArray result = nullptr;
jclass clazz = env->FindClass(vmSymbols::java_lang_Object()->as_C_string());
CHECK_JNI_EXCEPTION_(env, NULL);
result = env->NewObjectArray(blobs.length(), clazz, NULL);
CHECK_JNI_EXCEPTION_(env, NULL);
if (result == NULL) {
CHECK_JNI_EXCEPTION_(env, nullptr);
result = env->NewObjectArray(blobs.length(), clazz, nullptr);
CHECK_JNI_EXCEPTION_(env, nullptr);
if (result == nullptr) {
return result;
}
int i = 0;
for (GrowableArrayIterator<CodeBlobStub*> it = blobs.begin();
it != blobs.end(); ++it) {
jobjectArray obj = codeBlob2objectArray(thread, env, *it);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
env->SetObjectArrayElement(result, i, obj);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
++i;
}
return result;
@ -1733,7 +1733,7 @@ WB_ENTRY(jlong, WB_AllocateMetaspace(JNIEnv* env, jobject wb, jobject class_load
}
oop class_loader_oop = JNIHandles::resolve(class_loader);
ClassLoaderData* cld = class_loader_oop != NULL
ClassLoaderData* cld = class_loader_oop != nullptr
? java_lang_ClassLoader::loader_data_acquire(class_loader_oop)
: ClassLoaderData::the_null_class_loader_data();
@ -1840,7 +1840,7 @@ WB_END
WB_ENTRY(jint, WB_GetConstantPoolCacheLength(JNIEnv* env, jobject wb, jclass klass))
InstanceKlass* ik = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve(klass)));
ConstantPool* cp = ik->constants();
if (cp->cache() == NULL) {
if (cp->cache() == nullptr) {
return -1;
}
return cp->cache()->length();
@ -1849,7 +1849,7 @@ WB_END
WB_ENTRY(jint, WB_ConstantPoolRemapInstructionOperandFromCache(JNIEnv* env, jobject wb, jclass klass, jint index))
InstanceKlass* ik = InstanceKlass::cast(java_lang_Class::as_Klass(JNIHandles::resolve(klass)));
ConstantPool* cp = ik->constants();
if (cp->cache() == NULL) {
if (cp->cache() == nullptr) {
THROW_MSG_0(vmSymbols::java_lang_IllegalStateException(), "Constant pool does not have a cache");
}
jint cpci = index;
@ -1872,8 +1872,8 @@ WB_END
template <typename T>
static bool GetMethodOption(JavaThread* thread, JNIEnv* env, jobject method, jstring name, T* value) {
assert(value != NULL, "sanity");
if (method == NULL || name == NULL) {
assert(value != nullptr, "sanity");
if (method == nullptr || name == nullptr) {
return false;
}
jmethodID jmid = reflected_method_to_jmid(thread, env, method);
@ -1881,7 +1881,7 @@ static bool GetMethodOption(JavaThread* thread, JNIEnv* env, jobject method, jst
methodHandle mh(thread, Method::checked_resolve_jmethod_id(jmid));
// can't be in VM when we call JNI
ThreadToNativeFromVM ttnfv(thread);
const char* flag_name = env->GetStringUTFChars(name, NULL);
const char* flag_name = env->GetStringUTFChars(name, nullptr);
CHECK_JNI_EXCEPTION_(env, false);
enum CompileCommand option = CompilerOracle::string_to_option(flag_name);
env->ReleaseStringUTFChars(name, flag_name);
@ -1901,7 +1901,7 @@ WB_ENTRY(jobject, WB_GetMethodBooleaneOption(JNIEnv* env, jobject wb, jobject me
ThreadToNativeFromVM ttnfv(thread);
return booleanBox(thread, env, result);
}
return NULL;
return nullptr;
WB_END
WB_ENTRY(jobject, WB_GetMethodIntxOption(JNIEnv* env, jobject wb, jobject method, jstring name))
@ -1911,7 +1911,7 @@ WB_ENTRY(jobject, WB_GetMethodIntxOption(JNIEnv* env, jobject wb, jobject method
ThreadToNativeFromVM ttnfv(thread);
return longBox(thread, env, result);
}
return NULL;
return nullptr;
WB_END
WB_ENTRY(jobject, WB_GetMethodUintxOption(JNIEnv* env, jobject wb, jobject method, jstring name))
@ -1921,7 +1921,7 @@ WB_ENTRY(jobject, WB_GetMethodUintxOption(JNIEnv* env, jobject wb, jobject metho
ThreadToNativeFromVM ttnfv(thread);
return longBox(thread, env, result);
}
return NULL;
return nullptr;
WB_END
WB_ENTRY(jobject, WB_GetMethodDoubleOption(JNIEnv* env, jobject wb, jobject method, jstring name))
@ -1931,7 +1931,7 @@ WB_ENTRY(jobject, WB_GetMethodDoubleOption(JNIEnv* env, jobject wb, jobject meth
ThreadToNativeFromVM ttnfv(thread);
return doubleBox(thread, env, result);
}
return NULL;
return nullptr;
WB_END
WB_ENTRY(jobject, WB_GetMethodStringOption(JNIEnv* env, jobject wb, jobject method, jstring name))
@ -1940,10 +1940,10 @@ WB_ENTRY(jobject, WB_GetMethodStringOption(JNIEnv* env, jobject wb, jobject meth
// can't be in VM when we call JNI
ThreadToNativeFromVM ttnfv(thread);
jstring result = env->NewStringUTF(ccstrResult);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
return result;
}
return NULL;
return nullptr;
WB_END
WB_ENTRY(jobject, WB_GetDefaultArchivePath(JNIEnv* env, jobject wb))
@ -1951,7 +1951,7 @@ WB_ENTRY(jobject, WB_GetDefaultArchivePath(JNIEnv* env, jobject wb))
ThreadToNativeFromVM ttn(thread);
jstring path_string = env->NewStringUTF(p);
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
return path_string;
WB_END
@ -2011,7 +2011,7 @@ WB_ENTRY(jobject, WB_GetResolvedReferences(JNIEnv* env, jobject wb, jclass clazz
objArrayOop refs = cp->resolved_references();
return (jobject)JNIHandles::make_local(THREAD, refs);
} else {
return NULL;
return nullptr;
}
WB_END
@ -2108,16 +2108,16 @@ WB_ENTRY(jboolean, WB_HandshakeReadMonitors(JNIEnv* env, jobject wb, jobject thr
RegisterMap::UpdateMap::include,
RegisterMap::ProcessFrames::include,
RegisterMap::WalkContinuation::skip);
for (javaVFrame* vf = jt->last_java_vframe(&rmap); vf != NULL; vf = vf->java_sender()) {
for (javaVFrame* vf = jt->last_java_vframe(&rmap); vf != nullptr; vf = vf->java_sender()) {
GrowableArray<MonitorInfo*> *monitors = vf->monitors();
if (monitors != NULL) {
if (monitors != nullptr) {
int len = monitors->length();
// Walk monitors youngest to oldest
for (int i = len - 1; i >= 0; i--) {
MonitorInfo* mon_info = monitors->at(i);
if (mon_info->eliminated()) continue;
oop owner = mon_info->owner();
if (owner != NULL) {
if (owner != nullptr) {
info->append(mon_info);
}
}
@ -2132,10 +2132,10 @@ WB_ENTRY(jboolean, WB_HandshakeReadMonitors(JNIEnv* env, jobject wb, jobject thr
};
ReadMonitorsClosure rmc;
if (thread_handle != NULL) {
if (thread_handle != nullptr) {
ThreadsListHandle tlh;
JavaThread* target = nullptr;
bool is_alive = tlh.cv_internal_thread_to_JavaThread(thread_handle, &target, NULL);
bool is_alive = tlh.cv_internal_thread_to_JavaThread(thread_handle, &target, nullptr);
if (is_alive) {
Handshake::execute(&rmc, &tlh, target);
}
@ -2166,10 +2166,10 @@ WB_ENTRY(jint, WB_HandshakeWalkStack(JNIEnv* env, jobject wb, jobject thread_han
if (all_threads) {
Handshake::execute(&tsc);
} else if (thread_handle != NULL) {
} else if (thread_handle != nullptr) {
ThreadsListHandle tlh;
JavaThread* target = nullptr;
bool is_alive = tlh.cv_internal_thread_to_JavaThread(thread_handle, &target, NULL);
bool is_alive = tlh.cv_internal_thread_to_JavaThread(thread_handle, &target, nullptr);
if (is_alive) {
Handshake::execute(&tsc, &tlh, target);
}
@ -2195,10 +2195,10 @@ WB_ENTRY(void, WB_AsyncHandshakeWalkStack(JNIEnv* env, jobject wb, jobject threa
public:
TraceSelfClosure(JavaThread* self_target) : AsyncHandshakeClosure("WB_TraceSelf"), _self(self_target) {}
};
if (thread_handle != NULL) {
if (thread_handle != nullptr) {
ThreadsListHandle tlh;
JavaThread* target = nullptr;
bool is_alive = tlh.cv_internal_thread_to_JavaThread(thread_handle, &target, NULL);
bool is_alive = tlh.cv_internal_thread_to_JavaThread(thread_handle, &target, nullptr);
if (is_alive) {
TraceSelfClosure* tsc = new TraceSelfClosure(target);
Handshake::execute(tsc, target);
@ -2234,7 +2234,7 @@ WB_END
// Some convenience methods to deal with objects from java
int WhiteBox::offset_for_field(const char* field_name, oop object,
Symbol* signature_symbol) {
assert(field_name != NULL && strlen(field_name) > 0, "Field name not valid");
assert(field_name != nullptr && strlen(field_name) > 0, "Field name not valid");
//Get the class of our object
Klass* arg_klass = object->klass();
@ -2248,7 +2248,7 @@ int WhiteBox::offset_for_field(const char* field_name, oop object,
fieldDescriptor fd;
Klass* res = ik->find_field(name_symbol, signature_symbol, &fd);
if (res == NULL) {
if (res == nullptr) {
tty->print_cr("Invalid layout of %s at %s", ik->external_name(),
name_symbol->as_C_string());
vm_exit_during_initialization("Invalid layout of preloaded class: use -Xlog:class+load=info to see the origin of the problem class");
@ -2265,8 +2265,8 @@ const char* WhiteBox::lookup_jstring(const char* field_name, oop object) {
int offset = offset_for_field(field_name, object,
vmSymbols::string_signature());
oop string = object->obj_field(offset);
if (string == NULL) {
return NULL;
if (string == nullptr) {
return nullptr;
}
const char* ret = java_lang_String::as_utf8_string(string);
return ret;
@ -2291,10 +2291,10 @@ void WhiteBox::register_methods(JNIEnv* env, jclass wbclass, JavaThread* thread,
CHECK_JNI_EXCEPTION(env);
for (int i = 0, n = method_count; i < n; ++i) {
// Skip dummy entries
if (method_array[i].fnPtr == NULL) continue;
if (method_array[i].fnPtr == nullptr) continue;
if (env->RegisterNatives(wbclass, &method_array[i], 1) != 0) {
jthrowable throwable_obj = env->ExceptionOccurred();
if (throwable_obj != NULL) {
if (throwable_obj != nullptr) {
env->ExceptionClear();
if (env->IsInstanceOf(throwable_obj, no_such_method_error_klass)) {
// NoSuchMethodError is thrown when a method can't be found or a method is not native.
@ -2316,7 +2316,7 @@ void WhiteBox::register_methods(JNIEnv* env, jclass wbclass, JavaThread* thread,
WB_ENTRY(jint, WB_AddCompilerDirective(JNIEnv* env, jobject o, jstring compDirect))
// can't be in VM when we call JNI
ThreadToNativeFromVM ttnfv(thread);
const char* dir = env->GetStringUTFChars(compDirect, NULL);
const char* dir = env->GetStringUTFChars(compDirect, nullptr);
CHECK_JNI_EXCEPTION_(env, 0);
int ret;
{
@ -2341,7 +2341,7 @@ WB_ENTRY(jboolean, WB_CheckLibSpecifiesNoexecstack(JNIEnv* env, jobject o, jstri
#ifdef LINUX
// Can't be in VM when we call JNI.
ThreadToNativeFromVM ttnfv(thread);
const char* lf = env->GetStringUTFChars(libfile, NULL);
const char* lf = env->GetStringUTFChars(libfile, nullptr);
CHECK_JNI_EXCEPTION_(env, 0);
ret = (jboolean) ElfFile::specifies_noexecstack(lf);
env->ReleaseStringUTFChars(libfile, lf);
@ -2374,11 +2374,11 @@ WB_ENTRY(jint, WB_ValidateCgroup(JNIEnv* env,
jint ret = 0;
#ifdef LINUX
ThreadToNativeFromVM ttnfv(thread);
const char* p_cgroups = env->GetStringUTFChars(proc_cgroups, NULL);
const char* p_cgroups = env->GetStringUTFChars(proc_cgroups, nullptr);
CHECK_JNI_EXCEPTION_(env, 0);
const char* p_s_cgroup = env->GetStringUTFChars(proc_self_cgroup, NULL);
const char* p_s_cgroup = env->GetStringUTFChars(proc_self_cgroup, nullptr);
CHECK_JNI_EXCEPTION_(env, 0);
const char* p_s_mountinfo = env->GetStringUTFChars(proc_self_mountinfo, NULL);
const char* p_s_mountinfo = env->GetStringUTFChars(proc_self_mountinfo, nullptr);
CHECK_JNI_EXCEPTION_(env, 0);
u1 cg_type_flags = 0;
// This sets cg_type_flags
@ -2429,7 +2429,7 @@ WB_ENTRY(void, WB_CheckThreadObjOfTerminatingThread(JNIEnv* env, jobject wb, job
// Look up the target thread by tid to ensure it is present
JavaThread* t = tlh.list()->find_JavaThread_from_java_tid(tid);
if (t == NULL) {
if (t == nullptr) {
THROW_MSG(vmSymbols::java_lang_RuntimeException(), "Target thread not found in ThreadsList!");
}
@ -2476,7 +2476,7 @@ WB_ENTRY(void, WB_VerifyFrames(JNIEnv* env, jobject wb, jboolean log, jboolean u
for (StackFrameStream fst(JavaThread::current(), update_map, true); !fst.is_done(); fst.next()) {
frame* current_frame = fst.current();
if (log) {
current_frame->print_value_on(&st, NULL);
current_frame->print_value_on(&st, nullptr);
}
current_frame->verify(fst.register_map());
}
@ -2502,7 +2502,7 @@ WB_END
WB_ENTRY(jstring, WB_GetLibcName(JNIEnv* env, jobject o))
ThreadToNativeFromVM ttn(thread);
jstring info_string = env->NewStringUTF(XSTR(LIBC));
CHECK_JNI_EXCEPTION_(env, NULL);
CHECK_JNI_EXCEPTION_(env, nullptr);
return info_string;
WB_END