mirror of
https://github.com/openjdk/jdk.git
synced 2026-01-28 12:09:14 +00:00
8345687: Improve the implementation of SegmentFactories::allocateSegment
Reviewed-by: jvernee, mcimadamore
This commit is contained in:
parent
f4ddac5e58
commit
e1bcff3ada
@ -48,14 +48,11 @@ public final class ArenaImpl implements Arena {
|
||||
}
|
||||
|
||||
public NativeMemorySegmentImpl allocateNoInit(long byteSize, long byteAlignment) {
|
||||
Utils.checkAllocationSizeAndAlign(byteSize, byteAlignment);
|
||||
return SegmentFactories.allocateSegment(byteSize, byteAlignment, session, shouldReserveMemory);
|
||||
return SegmentFactories.allocateNativeSegment(byteSize, byteAlignment, session, shouldReserveMemory, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NativeMemorySegmentImpl allocate(long byteSize, long byteAlignment) {
|
||||
NativeMemorySegmentImpl segment = allocateNoInit(byteSize, byteAlignment);
|
||||
segment.fill((byte)0);
|
||||
return segment;
|
||||
return SegmentFactories.allocateNativeSegment(byteSize, byteAlignment, session, shouldReserveMemory, true);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 2023, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 2023, 2025, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -35,6 +35,7 @@ import jdk.internal.foreign.HeapMemorySegmentImpl.OfLong;
|
||||
import jdk.internal.foreign.HeapMemorySegmentImpl.OfShort;
|
||||
import jdk.internal.misc.Unsafe;
|
||||
import jdk.internal.misc.VM;
|
||||
import jdk.internal.vm.annotation.DontInline;
|
||||
import jdk.internal.vm.annotation.ForceInline;
|
||||
|
||||
import java.lang.foreign.MemorySegment;
|
||||
@ -175,39 +176,63 @@ public class SegmentFactories {
|
||||
return new OfDouble(offset, base, length, readOnly, bufferScope);
|
||||
}
|
||||
|
||||
public static NativeMemorySegmentImpl allocateSegment(long byteSize, long byteAlignment, MemorySessionImpl sessionImpl,
|
||||
boolean shouldReserve) {
|
||||
public static NativeMemorySegmentImpl allocateNativeSegment(long byteSize, long byteAlignment, MemorySessionImpl sessionImpl,
|
||||
boolean shouldReserve, boolean init) {
|
||||
long address = SegmentFactories.allocateNativeInternal(byteSize, byteAlignment, sessionImpl, shouldReserve, init);
|
||||
return new NativeMemorySegmentImpl(address, byteSize, false, sessionImpl);
|
||||
}
|
||||
|
||||
private static long allocateNativeInternal(long byteSize, long byteAlignment, MemorySessionImpl sessionImpl,
|
||||
boolean shouldReserve, boolean init) {
|
||||
ensureInitialized();
|
||||
Utils.checkAllocationSizeAndAlign(byteSize, byteAlignment);
|
||||
sessionImpl.checkValidState();
|
||||
if (VM.isDirectMemoryPageAligned()) {
|
||||
byteAlignment = Math.max(byteAlignment, AbstractMemorySegmentImpl.NIO_ACCESS.pageSize());
|
||||
}
|
||||
long alignedSize = Math.max(1L, byteAlignment > MAX_MALLOC_ALIGN ?
|
||||
byteSize + (byteAlignment - 1) :
|
||||
byteSize);
|
||||
// Align the allocation size up to a multiple of 8 so we can init the memory with longs
|
||||
long alignedSize = init ? Utils.alignUp(byteSize, Long.BYTES) : byteSize;
|
||||
|
||||
if (shouldReserve) {
|
||||
AbstractMemorySegmentImpl.NIO_ACCESS.reserveMemory(alignedSize, byteSize);
|
||||
long allocationSize;
|
||||
long allocationBase;
|
||||
long result;
|
||||
if (byteAlignment > MAX_MALLOC_ALIGN) {
|
||||
allocationSize = alignedSize + byteAlignment - MAX_MALLOC_ALIGN;
|
||||
if (shouldReserve) {
|
||||
AbstractMemorySegmentImpl.NIO_ACCESS.reserveMemory(allocationSize, byteSize);
|
||||
}
|
||||
|
||||
allocationBase = allocateMemoryWrapper(allocationSize);
|
||||
result = Utils.alignUp(allocationBase, byteAlignment);
|
||||
} else {
|
||||
allocationSize = alignedSize;
|
||||
if (shouldReserve) {
|
||||
AbstractMemorySegmentImpl.NIO_ACCESS.reserveMemory(allocationSize, byteSize);
|
||||
}
|
||||
|
||||
allocationBase = allocateMemoryWrapper(allocationSize);
|
||||
result = allocationBase;
|
||||
}
|
||||
|
||||
long buf = allocateMemoryWrapper(alignedSize);
|
||||
long alignedBuf = Utils.alignUp(buf, byteAlignment);
|
||||
NativeMemorySegmentImpl segment = new NativeMemorySegmentImpl(buf, alignedSize,
|
||||
false, sessionImpl);
|
||||
if (init) {
|
||||
initNativeMemory(result, alignedSize);
|
||||
}
|
||||
sessionImpl.addOrCleanupIfFail(new MemorySessionImpl.ResourceList.ResourceCleanup() {
|
||||
@Override
|
||||
public void cleanup() {
|
||||
UNSAFE.freeMemory(buf);
|
||||
UNSAFE.freeMemory(allocationBase);
|
||||
if (shouldReserve) {
|
||||
AbstractMemorySegmentImpl.NIO_ACCESS.unreserveMemory(alignedSize, byteSize);
|
||||
AbstractMemorySegmentImpl.NIO_ACCESS.unreserveMemory(allocationSize, byteSize);
|
||||
}
|
||||
}
|
||||
});
|
||||
if (alignedSize != byteSize) {
|
||||
long delta = alignedBuf - buf;
|
||||
segment = (NativeMemorySegmentImpl) segment.asSlice(delta, byteSize);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void initNativeMemory(long address, long byteSize) {
|
||||
for (long i = 0; i < byteSize; i += Long.BYTES) {
|
||||
UNSAFE.putLongUnaligned(null, address + i, 0);
|
||||
}
|
||||
return segment;
|
||||
}
|
||||
|
||||
private static long allocateMemoryWrapper(long size) {
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (c) 1996, 2024, Oracle and/or its affiliates. All rights reserved.
|
||||
* Copyright (c) 1996, 2025, Oracle and/or its affiliates. All rights reserved.
|
||||
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
|
||||
*
|
||||
* This code is free software; you can redistribute it and/or modify it
|
||||
@ -151,12 +151,12 @@ public class VM {
|
||||
// aligned. The "-XX:+PageAlignDirectMemory" option can be used to force
|
||||
// buffers, allocated by ByteBuffer.allocateDirect, to be page aligned.
|
||||
@Stable
|
||||
private static boolean pageAlignDirectMemory;
|
||||
private static Boolean pageAlignDirectMemory;
|
||||
|
||||
// Returns {@code true} if the direct buffers should be page aligned. This
|
||||
// variable is initialized by saveAndRemoveProperties.
|
||||
public static boolean isDirectMemoryPageAligned() {
|
||||
return pageAlignDirectMemory;
|
||||
return pageAlignDirectMemory != null && pageAlignDirectMemory;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -261,9 +261,7 @@ public class VM {
|
||||
}
|
||||
|
||||
// Check if direct buffers should be page aligned
|
||||
s = props.get("sun.nio.PageAlignDirectMemory");
|
||||
if ("true".equals(s))
|
||||
pageAlignDirectMemory = true;
|
||||
pageAlignDirectMemory = "true".equals(props.get("sun.nio.PageAlignDirectMemory"));
|
||||
}
|
||||
|
||||
// Initialize any miscellaneous operating system settings that need to be
|
||||
|
||||
@ -30,9 +30,7 @@ import org.openjdk.jmh.annotations.Measurement;
|
||||
import org.openjdk.jmh.annotations.Mode;
|
||||
import org.openjdk.jmh.annotations.OutputTimeUnit;
|
||||
import org.openjdk.jmh.annotations.Param;
|
||||
import org.openjdk.jmh.annotations.Setup;
|
||||
import org.openjdk.jmh.annotations.State;
|
||||
import org.openjdk.jmh.annotations.TearDown;
|
||||
import org.openjdk.jmh.annotations.Warmup;
|
||||
|
||||
import java.lang.foreign.Arena;
|
||||
@ -40,7 +38,6 @@ import java.lang.foreign.FunctionDescriptor;
|
||||
import java.lang.foreign.Linker;
|
||||
import java.lang.foreign.MemorySegment;
|
||||
import java.lang.foreign.MemorySegment.Scope;
|
||||
import java.lang.foreign.SegmentAllocator;
|
||||
import java.lang.foreign.ValueLayout;
|
||||
import java.lang.invoke.MethodHandle;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@ -53,16 +50,9 @@ import java.util.concurrent.TimeUnit;
|
||||
@Fork(value = 3, jvmArgs = { "--enable-native-access=ALL-UNNAMED", "--add-opens=java.base/jdk.internal.misc=ALL-UNNAMED" })
|
||||
public class AllocTest extends CLayouts {
|
||||
|
||||
Arena arena = Arena.ofConfined();
|
||||
|
||||
@Param({"5", "20", "100", "500", "1000"})
|
||||
@Param({"5", "20", "100", "500", "2000", "8000"})
|
||||
public int size;
|
||||
|
||||
@TearDown
|
||||
public void tearDown() {
|
||||
arena.close();
|
||||
}
|
||||
|
||||
@Benchmark
|
||||
public long alloc_confined() {
|
||||
try (Arena arena = Arena.ofConfined()) {
|
||||
@ -84,7 +74,7 @@ public class AllocTest extends CLayouts {
|
||||
}
|
||||
}
|
||||
|
||||
public static class CallocArena implements Arena {
|
||||
private static class CallocArena implements Arena {
|
||||
|
||||
static final MethodHandle CALLOC = Linker.nativeLinker()
|
||||
.downcallHandle(
|
||||
@ -118,7 +108,7 @@ public class AllocTest extends CLayouts {
|
||||
}
|
||||
}
|
||||
|
||||
public static class UnsafeArena implements Arena {
|
||||
private static class UnsafeArena implements Arena {
|
||||
|
||||
final Arena arena = Arena.ofConfined();
|
||||
|
||||
@ -134,9 +124,9 @@ public class AllocTest extends CLayouts {
|
||||
|
||||
@Override
|
||||
public MemorySegment allocate(long byteSize, long byteAlignment) {
|
||||
MemorySegment segment = MemorySegment.ofAddress(Utils.unsafe.allocateMemory(byteSize));
|
||||
Utils.unsafe.setMemory(segment.address(), byteSize, (byte)0);
|
||||
return segment.reinterpret(byteSize, arena, ms -> Utils.unsafe.freeMemory(segment.address()));
|
||||
long address = Utils.unsafe.allocateMemory(byteSize);
|
||||
Utils.unsafe.setMemory(address, byteSize, (byte)0);
|
||||
return MemorySegment.ofAddress(address).reinterpret(byteSize, arena, ms -> Utils.unsafe.freeMemory(ms.address()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user