mirror of
https://github.com/intel/compute-runtime.git
synced 2025-12-26 23:33:20 +08:00
Remove Drm32Bit allocator.
- not used anymore. Change-Id: Ibb7da1758feb67224ac0b172c72f45c2f1c229d9
This commit is contained in:
committed by
sys_ocldev
parent
fbcc782a91
commit
fca7b4e044
@@ -74,7 +74,6 @@ DECLARE_DEBUG_VARIABLE(bool, ForceCsrFlushing, false, "Forces flushing of comman
|
||||
DECLARE_DEBUG_VARIABLE(bool, ForceCsrReprogramming, false, "Forces reprogramming of command stream receiver")
|
||||
DECLARE_DEBUG_VARIABLE(bool, DisableStatelessToStatefulOptimization, false, "Disables stateless to stateful optimization for buffers")
|
||||
DECLARE_DEBUG_VARIABLE(bool, DisableConcurrentBlockExecution, false, "disables concurrent block kernel execution")
|
||||
DECLARE_DEBUG_VARIABLE(bool, UseNewHeapAllocator, true, "Custom 4GB heap allocator is used")
|
||||
DECLARE_DEBUG_VARIABLE(bool, UseNoRingFlushesKmdMode, true, "Windows only, passes flag to KMD that informs KMD to not emit any ring buffer flushes.")
|
||||
DECLARE_DEBUG_VARIABLE(bool, DisableZeroCopyForUseHostPtr, false, "When active all buffer allocations created with CL_MEM_USE_HOST_PTR flag will not share memory with CPU.")
|
||||
DECLARE_DEBUG_VARIABLE(bool, DisableZeroCopyForBuffers, false, "When active all buffer allocations will not share memory with CPU.")
|
||||
|
||||
@@ -15,64 +15,13 @@
|
||||
#include <memory>
|
||||
#include <sys/mman.h>
|
||||
using namespace OCLRT;
|
||||
constexpr uintptr_t maxMmap32BitAddress = 0x80000000;
|
||||
constexpr uintptr_t lowerRangeStart = 0x10000000;
|
||||
|
||||
class Allocator32bit::OsInternals {
|
||||
public:
|
||||
uintptr_t upperRangeAddress = maxMmap32BitAddress;
|
||||
uintptr_t lowerRangeAddress = lowerRangeStart;
|
||||
decltype(&mmap) mmapFunction = mmap;
|
||||
decltype(&munmap) munmapFunction = munmap;
|
||||
void *heapBasePtr = nullptr;
|
||||
size_t heapSize = 0;
|
||||
|
||||
class Drm32BitAllocator {
|
||||
protected:
|
||||
Allocator32bit::OsInternals &outer;
|
||||
|
||||
public:
|
||||
Drm32BitAllocator(Allocator32bit::OsInternals &outer) : outer(outer) {
|
||||
}
|
||||
|
||||
void *allocate(size_t size) {
|
||||
auto ptr = outer.mmapFunction(nullptr, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_32BIT, -1, 0);
|
||||
|
||||
// In case we failed, retry with address provided as a hint
|
||||
if (ptr == MAP_FAILED) {
|
||||
ptr = outer.mmapFunction((void *)outer.upperRangeAddress, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
|
||||
if (((uintptr_t)ptr + alignUp(size, 4096)) >= max32BitAddress || ptr == MAP_FAILED) {
|
||||
outer.munmapFunction(ptr, size);
|
||||
|
||||
// Try to use lower range
|
||||
ptr = outer.mmapFunction((void *)outer.lowerRangeAddress, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
|
||||
if ((uintptr_t)ptr >= max32BitAddress) {
|
||||
outer.munmapFunction(ptr, size);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
outer.lowerRangeAddress = (uintptr_t)ptr + alignUp(size, 4096);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
outer.upperRangeAddress = (uintptr_t)ptr + alignUp(size, 4096);
|
||||
}
|
||||
return ptr;
|
||||
}
|
||||
|
||||
int free(void *ptr, uint64_t size) {
|
||||
auto alignedSize = alignUp(size, 4096);
|
||||
auto offsetedPtr = (uintptr_t)ptrOffset(ptr, alignedSize);
|
||||
|
||||
if (offsetedPtr == outer.upperRangeAddress) {
|
||||
outer.upperRangeAddress -= alignedSize;
|
||||
} else if (offsetedPtr == outer.lowerRangeAddress) {
|
||||
outer.lowerRangeAddress -= alignedSize;
|
||||
}
|
||||
return outer.munmapFunction(ptr, size);
|
||||
}
|
||||
};
|
||||
Drm32BitAllocator *drmAllocator = nullptr;
|
||||
};
|
||||
|
||||
bool OCLRT::is32BitOsAllocatorAvailable = true;
|
||||
@@ -85,65 +34,48 @@ OCLRT::Allocator32bit::Allocator32bit() : Allocator32bit(new OsInternals) {
|
||||
}
|
||||
|
||||
OCLRT::Allocator32bit::Allocator32bit(Allocator32bit::OsInternals *osInternalsIn) : osInternals(osInternalsIn) {
|
||||
size_t sizeToMap = getSizeToMap();
|
||||
void *ptr = this->osInternals->mmapFunction(nullptr, sizeToMap, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
|
||||
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get()) {
|
||||
size_t sizeToMap = getSizeToMap();
|
||||
void *ptr = this->osInternals->mmapFunction(nullptr, sizeToMap, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
|
||||
if (ptr == MAP_FAILED) {
|
||||
sizeToMap -= sizeToMap / 4;
|
||||
ptr = this->osInternals->mmapFunction(nullptr, sizeToMap, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
|
||||
|
||||
DebugManager.log(DebugManager.flags.PrintDebugMessages.get(), __FUNCTION__, " Allocator RETRY ptr == ", ptr);
|
||||
|
||||
if (ptr == MAP_FAILED) {
|
||||
sizeToMap -= sizeToMap / 4;
|
||||
ptr = this->osInternals->mmapFunction(nullptr, sizeToMap, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE, -1, 0);
|
||||
|
||||
DebugManager.log(DebugManager.flags.PrintDebugMessages.get(), __FUNCTION__, " Allocator RETRY ptr == ", ptr);
|
||||
|
||||
if (ptr == MAP_FAILED) {
|
||||
ptr = nullptr;
|
||||
sizeToMap = 0;
|
||||
}
|
||||
ptr = nullptr;
|
||||
sizeToMap = 0;
|
||||
}
|
||||
|
||||
DebugManager.log(DebugManager.flags.PrintDebugMessages.get(), __FUNCTION__, "Allocator ptr == ", ptr);
|
||||
|
||||
osInternals->heapBasePtr = ptr;
|
||||
osInternals->heapSize = sizeToMap;
|
||||
base = reinterpret_cast<uint64_t>(ptr);
|
||||
size = sizeToMap;
|
||||
|
||||
heapAllocator = std::unique_ptr<HeapAllocator>(new HeapAllocator(base, sizeToMap));
|
||||
} else {
|
||||
this->osInternals->drmAllocator = new Allocator32bit::OsInternals::Drm32BitAllocator(*this->osInternals);
|
||||
}
|
||||
|
||||
DebugManager.log(DebugManager.flags.PrintDebugMessages.get(), __FUNCTION__, "Allocator ptr == ", ptr);
|
||||
|
||||
osInternals->heapBasePtr = ptr;
|
||||
osInternals->heapSize = sizeToMap;
|
||||
base = reinterpret_cast<uint64_t>(ptr);
|
||||
size = sizeToMap;
|
||||
|
||||
heapAllocator = std::unique_ptr<HeapAllocator>(new HeapAllocator(base, sizeToMap));
|
||||
}
|
||||
|
||||
OCLRT::Allocator32bit::~Allocator32bit() {
|
||||
if (this->osInternals.get() != nullptr) {
|
||||
if (this->osInternals->heapBasePtr != nullptr)
|
||||
this->osInternals->munmapFunction(this->osInternals->heapBasePtr, this->osInternals->heapSize);
|
||||
|
||||
if (this->osInternals->drmAllocator != nullptr)
|
||||
delete this->osInternals->drmAllocator;
|
||||
}
|
||||
}
|
||||
|
||||
uint64_t OCLRT::Allocator32bit::allocate(size_t &size) {
|
||||
uint64_t ptr = 0llu;
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get()) {
|
||||
ptr = this->heapAllocator->allocate(size);
|
||||
} else {
|
||||
ptr = reinterpret_cast<uint64_t>(this->osInternals->drmAllocator->allocate(size));
|
||||
}
|
||||
return ptr;
|
||||
return this->heapAllocator->allocate(size);
|
||||
}
|
||||
|
||||
int Allocator32bit::free(uint64_t ptr, size_t size) {
|
||||
if (ptr == reinterpret_cast<uint64_t>(MAP_FAILED) || ptr == 0llu)
|
||||
if (ptr == reinterpret_cast<uint64_t>(MAP_FAILED))
|
||||
return 0;
|
||||
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get()) {
|
||||
this->heapAllocator->free(ptr, size);
|
||||
} else {
|
||||
return this->osInternals->drmAllocator->free(reinterpret_cast<void *>(ptr), size);
|
||||
}
|
||||
this->heapAllocator->free(ptr, size);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -15,14 +15,8 @@ namespace OCLRT {
|
||||
|
||||
constexpr uintptr_t startOf32MmapRegion = 0x40000000;
|
||||
static bool failMmap = false;
|
||||
static bool fail32BitMmap = false;
|
||||
static bool failUpperRange = false;
|
||||
static bool failLowerRanger = false;
|
||||
static size_t maxMmapLength = std::numeric_limits<size_t>::max();
|
||||
|
||||
static uintptr_t startUpperHeap = maxMmap32BitAddress;
|
||||
static uintptr_t lowerRangeHeapStart = lowerRangeStart;
|
||||
|
||||
static uintptr_t offsetIn32BitRange = 0;
|
||||
static uint32_t mmapCallCount = 0u;
|
||||
static uint32_t unmapCallCount = 0u;
|
||||
@@ -31,10 +25,8 @@ static uint32_t mmapFailCount = 0u;
|
||||
void *MockMmap(void *addr, size_t length, int prot, int flags,
|
||||
int fd, off_t offset) noexcept {
|
||||
|
||||
bool return32bitRange = true;
|
||||
bool returnUpperRange = false;
|
||||
bool returnLowerRange = false;
|
||||
mmapCallCount++;
|
||||
UNRECOVERABLE_IF(addr);
|
||||
|
||||
if (failMmap || length > maxMmapLength) {
|
||||
return MAP_FAILED;
|
||||
@@ -45,42 +37,10 @@ void *MockMmap(void *addr, size_t length, int prot, int flags,
|
||||
return MAP_FAILED;
|
||||
}
|
||||
|
||||
if (addr) {
|
||||
return32bitRange = false;
|
||||
if ((uintptr_t)addr >= maxMmap32BitAddress) {
|
||||
if (failUpperRange) {
|
||||
return MAP_FAILED;
|
||||
}
|
||||
returnUpperRange = true;
|
||||
}
|
||||
if ((uintptr_t)addr >= lowerRangeStart) {
|
||||
if (failLowerRanger) {
|
||||
return MAP_FAILED;
|
||||
}
|
||||
returnLowerRange = true;
|
||||
}
|
||||
}
|
||||
uintptr_t ptrToReturn = startOf32MmapRegion + offsetIn32BitRange;
|
||||
offsetIn32BitRange += alignUp(length, MemoryConstants::pageSize);
|
||||
|
||||
if (flags & MAP_32BIT) {
|
||||
if (fail32BitMmap) {
|
||||
return MAP_FAILED;
|
||||
}
|
||||
return32bitRange = true;
|
||||
}
|
||||
|
||||
uintptr_t ptrToReturn = (uintptr_t)addr;
|
||||
if (return32bitRange) {
|
||||
ptrToReturn = startOf32MmapRegion + offsetIn32BitRange;
|
||||
offsetIn32BitRange += alignUp(length, MemoryConstants::pageSize);
|
||||
} else if (returnUpperRange) {
|
||||
ptrToReturn = (uintptr_t)addr;
|
||||
} else if (returnLowerRange) {
|
||||
ptrToReturn = (uintptr_t)addr;
|
||||
} else {
|
||||
ptrToReturn = (uintptr_t)MAP_FAILED;
|
||||
}
|
||||
|
||||
return (void *)ptrToReturn;
|
||||
return reinterpret_cast<void *>(ptrToReturn);
|
||||
}
|
||||
int MockMunmap(void *addr, size_t length) noexcept {
|
||||
unmapCallCount++;
|
||||
@@ -104,13 +64,8 @@ class MockAllocator32Bit : public Allocator32bit {
|
||||
resetState();
|
||||
}
|
||||
static void resetState() {
|
||||
fail32BitMmap = false;
|
||||
failUpperRange = false;
|
||||
failLowerRanger = false;
|
||||
failMmap = false;
|
||||
maxMmapLength = std::numeric_limits<size_t>::max();
|
||||
startUpperHeap = maxMmap32BitAddress;
|
||||
lowerRangeHeapStart = lowerRangeStart;
|
||||
offsetIn32BitRange = 0u;
|
||||
mmapCallCount = 0u;
|
||||
unmapCallCount = 0u;
|
||||
@@ -123,4 +78,4 @@ class MockAllocator32Bit : public Allocator32bit {
|
||||
|
||||
OsInternals *getOsInternals() const { return this->osInternals.get(); }
|
||||
};
|
||||
} // namespace OCLRT
|
||||
} // namespace OCLRT
|
||||
|
||||
@@ -857,15 +857,9 @@ TEST_F(DrmMemoryManagerTest, Given32bitAllocatorWhenAskedForBufferAllocationThen
|
||||
|
||||
TEST_F(DrmMemoryManagerTest, Given32bitAllocatorWhenAskedForBufferCreatedFromHostPtrThen32BitBufferIsReturned) {
|
||||
DebugManagerStateRestore dbgRestorer;
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get()) {
|
||||
mock->ioctl_expected.gemUserptr = 1;
|
||||
mock->ioctl_expected.gemWait = 1;
|
||||
mock->ioctl_expected.gemClose = 1;
|
||||
} else {
|
||||
mock->ioctl_expected.gemUserptr = 2;
|
||||
mock->ioctl_expected.gemWait = 2;
|
||||
mock->ioctl_expected.gemClose = 2;
|
||||
}
|
||||
mock->ioctl_expected.gemUserptr = 1;
|
||||
mock->ioctl_expected.gemWait = 1;
|
||||
mock->ioctl_expected.gemClose = 1;
|
||||
|
||||
DebugManager.flags.Force32bitAddressing.set(true);
|
||||
MockContext context;
|
||||
@@ -907,11 +901,7 @@ TEST_F(DrmMemoryManagerTest, Given32bitAllocatorWhenAskedForBufferCreatedFromHos
|
||||
|
||||
auto bufferObject = drmAllocation->getBO();
|
||||
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get()) {
|
||||
EXPECT_NE(0u, bufferObject->peekUnmapSize());
|
||||
} else {
|
||||
EXPECT_EQ(0u, bufferObject->peekUnmapSize());
|
||||
}
|
||||
EXPECT_NE(0u, bufferObject->peekUnmapSize());
|
||||
EXPECT_EQ(drmAllocation->getUnderlyingBuffer(), reinterpret_cast<void *>(offsetedPtr));
|
||||
|
||||
// Gpu address should be different
|
||||
@@ -977,10 +967,6 @@ TEST_F(DrmMemoryManagerTest, Given32bitAllocatorWhenAskedForBufferCreatedFrom64B
|
||||
|
||||
EXPECT_NE(0u, bufferObject->peekUnmapSize());
|
||||
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get() == false) {
|
||||
EXPECT_NE(drmAllocation->getUnderlyingBuffer(), reinterpret_cast<void *>(offsetedPtr));
|
||||
}
|
||||
|
||||
EXPECT_EQ(allocationPageOffset, ptrOffset);
|
||||
EXPECT_FALSE(bufferObject->peekIsAllocated());
|
||||
|
||||
@@ -1138,11 +1124,6 @@ TEST_F(DrmMemoryManagerTest, GivenSizeAbove2GBWhenUseHostPtrAndAllocHostPtrAreCr
|
||||
if (is32BitOsAllocatorAvailable && buffer) {
|
||||
auto bufferPtr = buffer->getGraphicsAllocation()->getGpuAddress();
|
||||
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get() == false) {
|
||||
uintptr_t maxMmap32BitAddress = 0x80000000;
|
||||
EXPECT_EQ((uintptr_t)bufferPtr, maxMmap32BitAddress);
|
||||
}
|
||||
|
||||
EXPECT_TRUE(buffer->getGraphicsAllocation()->is32BitAllocation);
|
||||
auto baseAddress = buffer->getGraphicsAllocation()->gpuBaseAddress;
|
||||
EXPECT_LT((uintptr_t)(bufferPtr - baseAddress), max32BitAddress);
|
||||
@@ -1185,11 +1166,6 @@ TEST_F(DrmMemoryManagerTest, GivenSizeAbove2GBWhenAllocHostPtrAndUseHostPtrAreCr
|
||||
if (is32BitOsAllocatorAvailable && buffer) {
|
||||
auto bufferPtr = buffer->getGraphicsAllocation()->getGpuAddress();
|
||||
|
||||
if (DebugManager.flags.UseNewHeapAllocator.get() == false) {
|
||||
uintptr_t maxMmap32BitAddress = 0x80000000;
|
||||
EXPECT_EQ((uintptr_t)bufferPtr, maxMmap32BitAddress);
|
||||
}
|
||||
|
||||
EXPECT_TRUE(buffer->getGraphicsAllocation()->is32BitAllocation);
|
||||
auto baseAddress = buffer->getGraphicsAllocation()->gpuBaseAddress;
|
||||
EXPECT_LT((uintptr_t)(bufferPtr - baseAddress), max32BitAddress);
|
||||
@@ -2096,7 +2072,6 @@ TEST_F(DrmMemoryManagerTest, given32BitAllocatorWithHeapAllocatorWhenLargerFragm
|
||||
mock->ioctl_expected.gemClose = 1;
|
||||
|
||||
DebugManagerStateRestore dbgFlagsKeeper;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(true);
|
||||
memoryManager->setForce32BitAllocations(true);
|
||||
|
||||
size_t allocationSize = 4 * MemoryConstants::pageSize;
|
||||
@@ -2359,9 +2334,6 @@ TEST_F(DrmMemoryManagerWithExplicitExpectationsTest, givenDefaultDrmMemoryManage
|
||||
}
|
||||
|
||||
TEST(Allocator32BitUsingHeapAllocator, given32BitAllocatorWhenMMapFailsThenNullptrIsReturned) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(true);
|
||||
|
||||
MockAllocator32Bit::resetState();
|
||||
failMmap = true;
|
||||
MockAllocator32Bit::OsInternalsPublic *osInternals = MockAllocator32Bit::createOsInternals();
|
||||
@@ -2375,9 +2347,6 @@ TEST(Allocator32BitUsingHeapAllocator, given32BitAllocatorWhenMMapFailsThenNullp
|
||||
}
|
||||
|
||||
TEST(Allocator32BitUsingHeapAllocator, given32BitAllocatorWhenFirstMMapFailsThenSecondIsCalledWithSmallerSize) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(true);
|
||||
|
||||
MockAllocator32Bit::resetState();
|
||||
maxMmapLength = getSizeToMap() - 1;
|
||||
MockAllocator32Bit::OsInternalsPublic *osInternals = MockAllocator32Bit::createOsInternals();
|
||||
@@ -2393,178 +2362,10 @@ TEST(Allocator32BitUsingHeapAllocator, given32BitAllocatorWhenFirstMMapFailsThen
|
||||
EXPECT_NE(0u, osInternals->heapSize);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, allocateReturnsPointer) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
size_t size = 100u;
|
||||
auto ptr = mock32BitAllocator.allocate(size);
|
||||
EXPECT_NE(0u, (uintptr_t)ptr);
|
||||
EXPECT_EQ(1u, mmapCallCount);
|
||||
mock32BitAllocator.free(ptr, size);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, freeMapFailedPointer) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
size_t size = 100u;
|
||||
int result = mock32BitAllocator.free(reinterpret_cast<uint64_t>(MAP_FAILED), size);
|
||||
EXPECT_EQ(0, result);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, freeNullPtrPointer) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
uint32_t size = 100u;
|
||||
int result = mock32BitAllocator.free(0llu, size);
|
||||
EXPECT_EQ(0, result);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, freeLowerRangeAfterTwoMmapFails) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
mmapFailCount = 2;
|
||||
size_t size = 100u;
|
||||
auto ptr = mock32BitAllocator.allocate(size);
|
||||
EXPECT_EQ(3u, mmapCallCount);
|
||||
int result = mock32BitAllocator.free(ptr, size);
|
||||
EXPECT_EQ(0, result);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, given32BitAllocatorWhenMMapFailsThenUpperHeapIsBrowsedForAllocations) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
fail32BitMmap = true;
|
||||
size_t size = 100u;
|
||||
auto ptr = mock32BitAllocator.allocate(size);
|
||||
EXPECT_EQ(maxMmap32BitAddress, (uintptr_t)ptr);
|
||||
EXPECT_EQ(2u, mmapCallCount);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, given32BitAllocatorWith32AndUpperHeapsExhaustedThenPointerFromLowerHeapIsReturned) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
fail32BitMmap = true;
|
||||
failUpperRange = true;
|
||||
size_t size = 100u;
|
||||
auto ptr = mock32BitAllocator.allocate(size);
|
||||
EXPECT_EQ(lowerRangeStart, (uintptr_t)ptr);
|
||||
EXPECT_EQ(3u, mmapCallCount);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, given32bitRegionExhaustedWhenTwoAllocationsAreCreatedThenSecondIsAfterFirst) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
fail32BitMmap = true;
|
||||
size_t size = 100u;
|
||||
auto ptr = (uintptr_t)mock32BitAllocator.allocate(size);
|
||||
EXPECT_EQ(maxMmap32BitAddress, ptr);
|
||||
|
||||
auto alignedSize = alignUp(size, MemoryConstants::pageSize);
|
||||
auto ptr2 = (uintptr_t)mock32BitAllocator.allocate(size);
|
||||
EXPECT_EQ(maxMmap32BitAddress + alignedSize, ptr2);
|
||||
|
||||
EXPECT_EQ(4u, mmapCallCount);
|
||||
mock32BitAllocator.free(ptr2, size);
|
||||
|
||||
auto getInternals = mock32BitAllocator.getOsInternals();
|
||||
EXPECT_EQ(ptr2, getInternals->upperRangeAddress);
|
||||
|
||||
mock32BitAllocator.free(ptr, size);
|
||||
EXPECT_EQ(ptr, getInternals->upperRangeAddress);
|
||||
|
||||
EXPECT_EQ(2u, unmapCallCount);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, given32bitRegionAndUpperRegionExhaustedWhenTwoAllocationsAreCreatedThenSecondIsAfterFirst) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
fail32BitMmap = true;
|
||||
failUpperRange = true;
|
||||
size_t size = 100u;
|
||||
auto ptr = (uintptr_t)mock32BitAllocator.allocate(size);
|
||||
EXPECT_EQ(lowerRangeStart, ptr);
|
||||
|
||||
auto alignedSize = alignUp(size, MemoryConstants::pageSize);
|
||||
auto ptr2 = (uintptr_t)mock32BitAllocator.allocate(size);
|
||||
EXPECT_EQ(lowerRangeStart + alignedSize, ptr2);
|
||||
|
||||
EXPECT_EQ(6u, mmapCallCount);
|
||||
mock32BitAllocator.free(ptr2, size);
|
||||
|
||||
auto getInternals = mock32BitAllocator.getOsInternals();
|
||||
EXPECT_EQ(ptr2, getInternals->lowerRangeAddress);
|
||||
|
||||
mock32BitAllocator.free(ptr, size);
|
||||
EXPECT_EQ(ptr, getInternals->lowerRangeAddress);
|
||||
|
||||
EXPECT_EQ(4u, unmapCallCount);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, given32bitAllocatorWithAllHeapsExhaustedWhenAskedForAllocationThenNullptrIsReturned) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
fail32BitMmap = true;
|
||||
failLowerRanger = true;
|
||||
failUpperRange = true;
|
||||
size_t size = 100u;
|
||||
|
||||
auto ptr = mock32BitAllocator.allocate(size);
|
||||
|
||||
EXPECT_EQ(0llu, ptr);
|
||||
|
||||
EXPECT_EQ(3u, mmapCallCount);
|
||||
EXPECT_EQ(2u, unmapCallCount);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, given32bitAllocatorWithUpperHeapCloseToFullWhenAskedForAllocationThenAllocationFromLowerHeapIsReturned) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
fail32BitMmap = true;
|
||||
size_t size = 3 * 1024 * 1024 * 1029u;
|
||||
|
||||
auto ptr = mock32BitAllocator.allocate(size);
|
||||
|
||||
EXPECT_EQ(lowerRangeHeapStart, (uintptr_t)ptr);
|
||||
EXPECT_EQ(3u, mmapCallCount);
|
||||
EXPECT_EQ(1u, unmapCallCount);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, givenMapFailedAsInputToFreeFunctionWhenItIsCalledThenUnmapIsNotCalled) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
mock32BitAllocator.free(reinterpret_cast<uint64_t>(MAP_FAILED), 100u);
|
||||
EXPECT_EQ(0u, unmapCallCount);
|
||||
}
|
||||
|
||||
TEST(DrmAllocator32Bit, givenNullptrAsInputToFreeFunctionWhenItIsCalledThenUnmapIsNotCalled) {
|
||||
DebugManagerStateRestore restore;
|
||||
DebugManager.flags.UseNewHeapAllocator.set(false);
|
||||
|
||||
MockAllocator32Bit mock32BitAllocator;
|
||||
mock32BitAllocator.free(0llu, 100u);
|
||||
EXPECT_EQ(0u, unmapCallCount);
|
||||
TEST(Allocator32BitUsingHeapAllocator, given32bitAllocatorWhenFreeIsCalledWithMapFailedThenZeroIsReturned) {
|
||||
MockAllocator32Bit::OsInternalsPublic *osInternals = MockAllocator32Bit::createOsInternals();
|
||||
MockAllocator32Bit mock32BitAllocator{osInternals};
|
||||
EXPECT_EQ(0, mock32BitAllocator.free(castToUint64(MAP_FAILED), 4096u));
|
||||
}
|
||||
|
||||
#include <chrono>
|
||||
|
||||
@@ -18,7 +18,6 @@ ForceSLML3Config = 0
|
||||
SetCommandStreamReceiver = 0
|
||||
ForceOCLVersion = 0
|
||||
Force32bitAddressing = 0
|
||||
UseNewHeapAllocator = 1
|
||||
EnableVaLibCalls = 1
|
||||
EnableNV12 = 1
|
||||
EnablePackedYuv = 1
|
||||
|
||||
Reference in New Issue
Block a user