performance: redesign usm alloc reuse mechanism
Dedicated pools for different allocations size ranges. Additional reused allocations will create their own pools. Do not reuse allocations >256MB. Related-To: NEO-6893, NEO-12299, NEO-12349 Signed-off-by: Dominik Dabek <dominik.dabek@intel.com>
This commit is contained in:
parent
dfbad8029b
commit
b2fc7345cf
|
@ -269,6 +269,15 @@ ze_result_t ContextImp::allocDeviceMem(ze_device_handle_t hDevice,
|
||||||
unifiedMemoryProperties.allocationFlags.flags.resource48Bit = productHelper.is48bResourceNeededForRayTracing();
|
unifiedMemoryProperties.allocationFlags.flags.resource48Bit = productHelper.is48bResourceNeededForRayTracing();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (false == lookupTable.exportMemory &&
|
||||||
|
neoDevice->getUsmMemAllocPoolsManager()) {
|
||||||
|
neoDevice->getUsmMemAllocPoolsManager()->ensureInitialized(this->driverHandle->svmAllocsManager);
|
||||||
|
if (auto usmPtrFromPool = neoDevice->getUsmMemAllocPoolsManager()->createUnifiedMemoryAllocation(size, unifiedMemoryProperties)) {
|
||||||
|
*ptr = usmPtrFromPool;
|
||||||
|
return ZE_RESULT_SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void *usmPtr =
|
void *usmPtr =
|
||||||
this->driverHandle->svmAllocsManager->createUnifiedMemoryAllocation(size, unifiedMemoryProperties);
|
this->driverHandle->svmAllocsManager->createUnifiedMemoryAllocation(size, unifiedMemoryProperties);
|
||||||
if (usmPtr == nullptr) {
|
if (usmPtr == nullptr) {
|
||||||
|
@ -433,10 +442,22 @@ ze_result_t ContextImp::freeMem(const void *ptr, bool blocking) {
|
||||||
for (auto &pairDevice : this->devices) {
|
for (auto &pairDevice : this->devices) {
|
||||||
this->freePeerAllocations(ptr, blocking, Device::fromHandle(pairDevice.second));
|
this->freePeerAllocations(ptr, blocking, Device::fromHandle(pairDevice.second));
|
||||||
}
|
}
|
||||||
|
if (InternalMemoryType::hostUnifiedMemory == allocation->memoryType) {
|
||||||
if (this->driverHandle->usmHostMemAllocPool.freeSVMAlloc(ptr, blocking)) {
|
if (this->driverHandle->usmHostMemAllocPool.freeSVMAlloc(ptr, blocking)) {
|
||||||
return ZE_RESULT_SUCCESS;
|
return ZE_RESULT_SUCCESS;
|
||||||
}
|
}
|
||||||
|
} else if (InternalMemoryType::deviceUnifiedMemory == allocation->memoryType) {
|
||||||
|
if (auto deviceUsmPoolsManager = allocation->device->getUsmMemAllocPoolsManager()) {
|
||||||
|
DEBUG_BREAK_IF(false == deviceUsmPoolsManager->isInitialized());
|
||||||
|
if (deviceUsmPoolsManager->freeSVMAlloc(ptr, blocking)) {
|
||||||
|
return ZE_RESULT_SUCCESS;
|
||||||
|
}
|
||||||
|
if (deviceUsmPoolsManager->recycleSVMAlloc(const_cast<void *>(ptr),
|
||||||
|
blocking)) {
|
||||||
|
return ZE_RESULT_SUCCESS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
this->driverHandle->svmAllocsManager->freeSVMAlloc(const_cast<void *>(ptr), blocking);
|
this->driverHandle->svmAllocsManager->freeSVMAlloc(const_cast<void *>(ptr), blocking);
|
||||||
|
|
||||||
return ZE_RESULT_SUCCESS;
|
return ZE_RESULT_SUCCESS;
|
||||||
|
@ -603,6 +624,17 @@ void ContextImp::setIPCHandleData(NEO::GraphicsAllocation *graphicsAllocation, u
|
||||||
|
|
||||||
if (this->driverHandle->usmHostMemAllocPool.isInPool(addrToPtr(ptrAddress))) {
|
if (this->driverHandle->usmHostMemAllocPool.isInPool(addrToPtr(ptrAddress))) {
|
||||||
ipcData.poolOffset = this->driverHandle->usmHostMemAllocPool.getOffsetInPool(addrToPtr(ptrAddress));
|
ipcData.poolOffset = this->driverHandle->usmHostMemAllocPool.getOffsetInPool(addrToPtr(ptrAddress));
|
||||||
|
} else {
|
||||||
|
for (auto const &devicePair : this->getDevices()) {
|
||||||
|
auto device = Device::fromHandle(devicePair.second);
|
||||||
|
auto neoDevice = device->getNEODevice();
|
||||||
|
if (auto deviceUsmMemAllocPoolsManager = neoDevice->getUsmMemAllocPoolsManager()) {
|
||||||
|
if (auto poolOffset = deviceUsmMemAllocPoolsManager->getOffsetInPool(addrToPtr(ptrAddress))) {
|
||||||
|
ipcData.poolOffset = poolOffset;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
auto lock = this->driverHandle->lockIPCHandleMap();
|
auto lock = this->driverHandle->lockIPCHandleMap();
|
||||||
|
|
|
@ -404,7 +404,7 @@ void DriverHandleImp::initHostUsmAllocPool() {
|
||||||
if (usmHostAllocPoolingEnabled) {
|
if (usmHostAllocPoolingEnabled) {
|
||||||
NEO::SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M,
|
NEO::SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M,
|
||||||
rootDeviceIndices, deviceBitfields);
|
rootDeviceIndices, deviceBitfields);
|
||||||
usmHostMemAllocPool.initialize(svmAllocsManager, memoryProperties, poolSize);
|
usmHostMemAllocPool.initialize(svmAllocsManager, memoryProperties, poolSize, 0u, 1 * MemoryConstants::megaByte);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ bool ApiSpecificConfig::isHostAllocationCacheEnabled() {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ApiSpecificConfig::isDeviceUsmPoolingEnabled() {
|
bool ApiSpecificConfig::isDeviceUsmPoolingEnabled() {
|
||||||
return false;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ApiSpecificConfig::isHostUsmPoolingEnabled() {
|
bool ApiSpecificConfig::isHostUsmPoolingEnabled() {
|
||||||
|
|
|
@ -46,7 +46,7 @@ TEST(ApiSpecificConfigL0Tests, WhenCheckingIfHostDeviceAllocationCacheIsEnabledT
|
||||||
|
|
||||||
TEST(ApiSpecificConfigL0Tests, WhenCheckingIfUsmAllocPoolingIsEnabledThenReturnFalse) {
|
TEST(ApiSpecificConfigL0Tests, WhenCheckingIfUsmAllocPoolingIsEnabledThenReturnFalse) {
|
||||||
EXPECT_FALSE(ApiSpecificConfig::isHostUsmPoolingEnabled());
|
EXPECT_FALSE(ApiSpecificConfig::isHostUsmPoolingEnabled());
|
||||||
EXPECT_FALSE(ApiSpecificConfig::isDeviceUsmPoolingEnabled());
|
EXPECT_TRUE(ApiSpecificConfig::isDeviceUsmPoolingEnabled());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(ApiSpecificConfigL0Tests, GivenDebugFlagCombinationsGetCorrectSharedAllocPrefetchEnabled) {
|
TEST(ApiSpecificConfigL0Tests, GivenDebugFlagCombinationsGetCorrectSharedAllocPrefetchEnabled) {
|
||||||
|
|
|
@ -10,9 +10,11 @@
|
||||||
#include "shared/test/common/mocks/mock_device.h"
|
#include "shared/test/common/mocks/mock_device.h"
|
||||||
#include "shared/test/common/mocks/mock_driver_model.h"
|
#include "shared/test/common/mocks/mock_driver_model.h"
|
||||||
#include "shared/test/common/mocks/mock_memory_manager.h"
|
#include "shared/test/common/mocks/mock_memory_manager.h"
|
||||||
|
#include "shared/test/common/mocks/mock_product_helper.h"
|
||||||
#include "shared/test/common/mocks/mock_usm_memory_pool.h"
|
#include "shared/test/common/mocks/mock_usm_memory_pool.h"
|
||||||
#include "shared/test/common/mocks/ult_device_factory.h"
|
#include "shared/test/common/mocks/ult_device_factory.h"
|
||||||
#include "shared/test/common/test_macros/hw_test.h"
|
#include "shared/test/common/test_macros/hw_test.h"
|
||||||
|
#include "shared/test/common/test_macros/test_checks_shared.h"
|
||||||
|
|
||||||
#include "level_zero/core/source/context/context_imp.h"
|
#include "level_zero/core/source/context/context_imp.h"
|
||||||
#include "level_zero/core/source/device/device_imp.h"
|
#include "level_zero/core/source/device/device_imp.h"
|
||||||
|
@ -21,22 +23,32 @@
|
||||||
#include "level_zero/core/test/unit_tests/mocks/mock_driver_handle.h"
|
#include "level_zero/core/test/unit_tests/mocks/mock_driver_handle.h"
|
||||||
namespace L0 {
|
namespace L0 {
|
||||||
namespace ult {
|
namespace ult {
|
||||||
template <int hostUsmPoolFlag = -1, int deviceUsmPoolFlag = -1>
|
template <int hostUsmPoolFlag = -1, int deviceUsmPoolFlag = -1, int poolingVersionFlag = -1>
|
||||||
struct AllocUsmPoolMemoryTest : public ::testing::Test {
|
struct AllocUsmPoolMemoryTest : public ::testing::Test {
|
||||||
void SetUp() override {
|
void SetUp() override {
|
||||||
|
REQUIRE_SVM_OR_SKIP(NEO::defaultHwInfo);
|
||||||
NEO::debugManager.flags.EnableHostUsmAllocationPool.set(hostUsmPoolFlag);
|
NEO::debugManager.flags.EnableHostUsmAllocationPool.set(hostUsmPoolFlag);
|
||||||
NEO::debugManager.flags.EnableDeviceUsmAllocationPool.set(deviceUsmPoolFlag);
|
NEO::debugManager.flags.EnableDeviceUsmAllocationPool.set(deviceUsmPoolFlag);
|
||||||
|
NEO::debugManager.flags.ExperimentalUSMAllocationReuseVersion.set(poolingVersionFlag);
|
||||||
|
|
||||||
executionEnvironment = new NEO::ExecutionEnvironment();
|
executionEnvironment = new NEO::ExecutionEnvironment();
|
||||||
executionEnvironment->prepareRootDeviceEnvironments(numRootDevices);
|
executionEnvironment->prepareRootDeviceEnvironments(numRootDevices);
|
||||||
for (auto i = 0u; i < executionEnvironment->rootDeviceEnvironments.size(); i++) {
|
for (auto i = 0u; i < executionEnvironment->rootDeviceEnvironments.size(); i++) {
|
||||||
|
mockProductHelpers.push_back(new MockProductHelper);
|
||||||
|
executionEnvironment->rootDeviceEnvironments[i]->productHelper.reset(mockProductHelpers[i]);
|
||||||
executionEnvironment->rootDeviceEnvironments[i]->setHwInfoAndInitHelpers(NEO::defaultHwInfo.get());
|
executionEnvironment->rootDeviceEnvironments[i]->setHwInfoAndInitHelpers(NEO::defaultHwInfo.get());
|
||||||
executionEnvironment->rootDeviceEnvironments[i]->initGmm();
|
executionEnvironment->rootDeviceEnvironments[i]->initGmm();
|
||||||
|
if (1 == deviceUsmPoolFlag) {
|
||||||
|
mockProductHelpers[i]->isUsmPoolAllocatorSupportedResult = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (auto i = 0u; i < executionEnvironment->rootDeviceEnvironments.size(); i++) {
|
for (auto i = 0u; i < executionEnvironment->rootDeviceEnvironments.size(); i++) {
|
||||||
devices.push_back(std::unique_ptr<NEO::MockDevice>(NEO::MockDevice::createWithExecutionEnvironment<NEO::MockDevice>(NEO::defaultHwInfo.get(),
|
auto device = std::unique_ptr<NEO::MockDevice>(NEO::MockDevice::createWithExecutionEnvironment<NEO::MockDevice>(NEO::defaultHwInfo.get(),
|
||||||
executionEnvironment, i)));
|
executionEnvironment, i));
|
||||||
|
device->deviceInfo.localMemSize = 4 * MemoryConstants::gigaByte;
|
||||||
|
device->deviceInfo.globalMemSize = 4 * MemoryConstants::gigaByte;
|
||||||
|
devices.push_back(std::move(device));
|
||||||
}
|
}
|
||||||
|
|
||||||
driverHandle = std::make_unique<Mock<L0::DriverHandleImp>>();
|
driverHandle = std::make_unique<Mock<L0::DriverHandleImp>>();
|
||||||
|
@ -58,7 +70,9 @@ struct AllocUsmPoolMemoryTest : public ::testing::Test {
|
||||||
const uint32_t numRootDevices = 2u;
|
const uint32_t numRootDevices = 2u;
|
||||||
L0::ContextImp *context = nullptr;
|
L0::ContextImp *context = nullptr;
|
||||||
std::vector<std::unique_ptr<NEO::Device>> devices;
|
std::vector<std::unique_ptr<NEO::Device>> devices;
|
||||||
|
std::vector<MockProductHelper *> mockProductHelpers;
|
||||||
NEO::ExecutionEnvironment *executionEnvironment;
|
NEO::ExecutionEnvironment *executionEnvironment;
|
||||||
|
constexpr static auto poolAllocationThreshold = 1 * MemoryConstants::megaByte;
|
||||||
};
|
};
|
||||||
|
|
||||||
using AllocUsmHostDefaultMemoryTest = AllocUsmPoolMemoryTest<-1, -1>;
|
using AllocUsmHostDefaultMemoryTest = AllocUsmPoolMemoryTest<-1, -1>;
|
||||||
|
@ -107,7 +121,7 @@ TEST_F(AllocUsmHostEnabledMemoryTest, givenDriverHandleWhenCallingAllocHostMemWi
|
||||||
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
|
||||||
void *ptrThreshold = nullptr;
|
void *ptrThreshold = nullptr;
|
||||||
result = context->allocHostMem(&hostDesc, UsmMemAllocPool::allocationThreshold, 0u, &ptrThreshold);
|
result = context->allocHostMem(&hostDesc, poolAllocationThreshold, 0u, &ptrThreshold);
|
||||||
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
EXPECT_NE(nullptr, ptrThreshold);
|
EXPECT_NE(nullptr, ptrThreshold);
|
||||||
EXPECT_TRUE(driverHandle->usmHostMemAllocPool.isInPool(ptrThreshold));
|
EXPECT_TRUE(driverHandle->usmHostMemAllocPool.isInPool(ptrThreshold));
|
||||||
|
@ -116,7 +130,7 @@ TEST_F(AllocUsmHostEnabledMemoryTest, givenDriverHandleWhenCallingAllocHostMemWi
|
||||||
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
|
||||||
void *ptrOverThreshold = nullptr;
|
void *ptrOverThreshold = nullptr;
|
||||||
result = context->allocHostMem(&hostDesc, UsmMemAllocPool::allocationThreshold + 1u, 0u, &ptrOverThreshold);
|
result = context->allocHostMem(&hostDesc, poolAllocationThreshold + 1u, 0u, &ptrOverThreshold);
|
||||||
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
EXPECT_NE(nullptr, ptrOverThreshold);
|
EXPECT_NE(nullptr, ptrOverThreshold);
|
||||||
EXPECT_FALSE(driverHandle->usmHostMemAllocPool.isInPool(ptrOverThreshold));
|
EXPECT_FALSE(driverHandle->usmHostMemAllocPool.isInPool(ptrOverThreshold));
|
||||||
|
@ -129,7 +143,7 @@ TEST_F(AllocUsmHostEnabledMemoryTest, givenDriverHandleWhenCallingAllocHostMemWi
|
||||||
externalMemoryDesc.stype = ZE_STRUCTURE_TYPE_EXTERNAL_MEMORY_EXPORT_DESC;
|
externalMemoryDesc.stype = ZE_STRUCTURE_TYPE_EXTERNAL_MEMORY_EXPORT_DESC;
|
||||||
externalMemoryDesc.flags = ZE_EXTERNAL_MEMORY_TYPE_FLAG_DMA_BUF;
|
externalMemoryDesc.flags = ZE_EXTERNAL_MEMORY_TYPE_FLAG_DMA_BUF;
|
||||||
hostDesc.pNext = &externalMemoryDesc;
|
hostDesc.pNext = &externalMemoryDesc;
|
||||||
result = context->allocHostMem(&hostDesc, UsmMemAllocPool::allocationThreshold, 0u, &ptrExportMemory);
|
result = context->allocHostMem(&hostDesc, poolAllocationThreshold, 0u, &ptrExportMemory);
|
||||||
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
EXPECT_NE(nullptr, ptrExportMemory);
|
EXPECT_NE(nullptr, ptrExportMemory);
|
||||||
EXPECT_FALSE(driverHandle->usmHostMemAllocPool.isInPool(ptrExportMemory));
|
EXPECT_FALSE(driverHandle->usmHostMemAllocPool.isInPool(ptrExportMemory));
|
||||||
|
@ -173,5 +187,109 @@ TEST_F(AllocUsmHostEnabledMemoryTest, givenDrmDriverModelWhenOpeningIpcHandleFro
|
||||||
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
using AllocUsmDeviceEnabledMemoryNewVersionTest = AllocUsmPoolMemoryTest<-1, 1, 2>;
|
||||||
|
|
||||||
|
TEST_F(AllocUsmDeviceEnabledMemoryNewVersionTest, givenContextWhenAllocatingAndFreeingDeviceUsmThenPoolingIsUsed) {
|
||||||
|
executionEnvironment->rootDeviceEnvironments[0]->osInterface.reset(new NEO::OSInterface());
|
||||||
|
executionEnvironment->rootDeviceEnvironments[0]->osInterface->setDriverModel(std::make_unique<NEO::MockDriverModelDRM>());
|
||||||
|
auto usmMemAllocPoolsManager = driverHandle->devices[0]->getNEODevice()->getUsmMemAllocPoolsManager();
|
||||||
|
ASSERT_NE(nullptr, usmMemAllocPoolsManager);
|
||||||
|
auto deviceHandle = driverHandle->devices[0]->toHandle();
|
||||||
|
ze_device_mem_alloc_desc_t deviceAllocDesc{};
|
||||||
|
void *allocation = nullptr;
|
||||||
|
ze_result_t result = context->allocDeviceMem(deviceHandle, &deviceAllocDesc, 1u, 0u, &allocation);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_NE(nullptr, allocation);
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->isInitialized());
|
||||||
|
EXPECT_EQ(allocation, usmMemAllocPoolsManager->getPooledAllocationBasePtr(allocation));
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->getPooledAllocationSize(allocation));
|
||||||
|
ze_ipc_mem_handle_t ipcHandle{};
|
||||||
|
result = context->getIpcMemHandle(allocation, &ipcHandle);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
IpcMemoryData &ipcData = *reinterpret_cast<IpcMemoryData *>(ipcHandle.data);
|
||||||
|
auto pooledAllocationOffset = usmMemAllocPoolsManager->getOffsetInPool(allocation);
|
||||||
|
EXPECT_EQ(pooledAllocationOffset, ipcData.poolOffset);
|
||||||
|
|
||||||
|
ze_ipc_memory_flags_t ipcFlags{};
|
||||||
|
void *ipcPointer = nullptr;
|
||||||
|
result = context->openIpcMemHandle(driverHandle->devices[0]->toHandle(), ipcHandle, ipcFlags, &ipcPointer);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_EQ(ptrOffset(addrToPtr(0x1u), pooledAllocationOffset), ipcPointer);
|
||||||
|
|
||||||
|
result = context->closeIpcMemHandle(addrToPtr(0x1u));
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
result = context->freeMem(allocation);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
|
||||||
|
void *allocation2MB = nullptr;
|
||||||
|
result = context->allocDeviceMem(deviceHandle, &deviceAllocDesc, 2 * MemoryConstants::megaByte, 0u, &allocation2MB);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_NE(nullptr, allocation2MB);
|
||||||
|
EXPECT_EQ(allocation2MB, usmMemAllocPoolsManager->getPooledAllocationBasePtr(allocation2MB));
|
||||||
|
EXPECT_EQ(2 * MemoryConstants::megaByte, usmMemAllocPoolsManager->getPooledAllocationSize(allocation2MB));
|
||||||
|
|
||||||
|
void *allocation4MB = nullptr;
|
||||||
|
result = context->allocDeviceMem(deviceHandle, &deviceAllocDesc, 4 * MemoryConstants::megaByte, 0u, &allocation4MB);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_NE(nullptr, allocation4MB);
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager->getPooledAllocationBasePtr(allocation4MB));
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->getPooledAllocationSize(allocation4MB));
|
||||||
|
|
||||||
|
result = context->freeMem(allocation4MB);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
void *allocation4MBRecycled = nullptr;
|
||||||
|
result = context->allocDeviceMem(deviceHandle, &deviceAllocDesc, 4 * MemoryConstants::megaByte, 0u, &allocation4MBRecycled);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_NE(nullptr, allocation4MBRecycled);
|
||||||
|
EXPECT_EQ(allocation4MBRecycled, usmMemAllocPoolsManager->getPooledAllocationBasePtr(allocation4MBRecycled));
|
||||||
|
EXPECT_EQ(4 * MemoryConstants::megaByte, usmMemAllocPoolsManager->getPooledAllocationSize(allocation4MBRecycled));
|
||||||
|
EXPECT_EQ(allocation4MBRecycled, allocation4MB);
|
||||||
|
|
||||||
|
result = context->freeMem(allocation4MBRecycled);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
void *allocation3MBRecycled = nullptr;
|
||||||
|
result = context->allocDeviceMem(deviceHandle, &deviceAllocDesc, 3 * MemoryConstants::megaByte, 0u, &allocation3MBRecycled);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_NE(nullptr, allocation3MBRecycled);
|
||||||
|
EXPECT_EQ(allocation3MBRecycled, usmMemAllocPoolsManager->getPooledAllocationBasePtr(allocation3MBRecycled));
|
||||||
|
EXPECT_EQ(3 * MemoryConstants::megaByte, usmMemAllocPoolsManager->getPooledAllocationSize(allocation3MBRecycled));
|
||||||
|
auto address4MB = castToUint64(allocation4MB);
|
||||||
|
auto address3MB = castToUint64(allocation3MBRecycled);
|
||||||
|
EXPECT_GE(address3MB, address4MB);
|
||||||
|
EXPECT_LT(address3MB, address4MB + MemoryConstants::megaByte);
|
||||||
|
|
||||||
|
void *allocation2MB1B = nullptr;
|
||||||
|
result = context->allocDeviceMem(deviceHandle, &deviceAllocDesc, 2 * MemoryConstants::megaByte + 1, 0u, &allocation2MB1B);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_NE(nullptr, allocation2MB1B);
|
||||||
|
auto mockMemoryManager = reinterpret_cast<MockMemoryManager *>(driverHandle->getMemoryManager());
|
||||||
|
mockMemoryManager->localMemAllocsSize[mockRootDeviceIndex] = driverHandle->getMemoryManager()->getLocalMemorySize(mockRootDeviceIndex, static_cast<uint32_t>(driverHandle->devices[0]->getNEODevice()->getDeviceBitfield().to_ulong()));
|
||||||
|
result = context->freeMem(allocation2MB1B); // should not be recycled, because too much device memory is used
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
void *allocation2MB1BNotRecycled = nullptr;
|
||||||
|
result = context->allocDeviceMem(deviceHandle, &deviceAllocDesc, 2 * MemoryConstants::megaByte + 1, 0u, &allocation2MB1BNotRecycled);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_NE(nullptr, allocation2MB1BNotRecycled);
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager->getPooledAllocationBasePtr(allocation2MB1BNotRecycled));
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->getPooledAllocationSize(allocation2MB1BNotRecycled));
|
||||||
|
|
||||||
|
result = context->getIpcMemHandle(allocation2MB1BNotRecycled, &ipcHandle);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
ipcData = *reinterpret_cast<IpcMemoryData *>(ipcHandle.data);
|
||||||
|
pooledAllocationOffset = usmMemAllocPoolsManager->getOffsetInPool(allocation2MB1BNotRecycled);
|
||||||
|
EXPECT_EQ(0u, pooledAllocationOffset);
|
||||||
|
EXPECT_EQ(0u, ipcData.poolOffset);
|
||||||
|
|
||||||
|
ipcPointer = nullptr;
|
||||||
|
result = context->openIpcMemHandle(driverHandle->devices[0]->toHandle(), ipcHandle, ipcFlags, &ipcPointer);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
EXPECT_EQ(addrToPtr(0x1u), ipcPointer);
|
||||||
|
result = context->closeIpcMemHandle(addrToPtr(0x1u));
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
|
||||||
|
result = context->freeMem(allocation2MB1BNotRecycled);
|
||||||
|
EXPECT_EQ(ZE_RESULT_SUCCESS, result);
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace ult
|
} // namespace ult
|
||||||
} // namespace L0
|
} // namespace L0
|
|
@ -539,7 +539,7 @@ void Context::initializeUsmAllocationPools() {
|
||||||
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::deviceUnifiedMemory, MemoryConstants::pageSize2M,
|
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::deviceUnifiedMemory, MemoryConstants::pageSize2M,
|
||||||
getRootDeviceIndices(), subDeviceBitfields);
|
getRootDeviceIndices(), subDeviceBitfields);
|
||||||
memoryProperties.device = &neoDevice;
|
memoryProperties.device = &neoDevice;
|
||||||
usmDeviceMemAllocPool.initialize(svmMemoryManager, memoryProperties, poolSize);
|
usmDeviceMemAllocPool.initialize(svmMemoryManager, memoryProperties, poolSize, 0u, 1 * MemoryConstants::megaByte);
|
||||||
}
|
}
|
||||||
|
|
||||||
enabled = ApiSpecificConfig::isHostUsmPoolingEnabled() && productHelper.isUsmPoolAllocatorSupported();
|
enabled = ApiSpecificConfig::isHostUsmPoolingEnabled() && productHelper.isUsmPoolAllocatorSupported();
|
||||||
|
@ -554,7 +554,7 @@ void Context::initializeUsmAllocationPools() {
|
||||||
subDeviceBitfields[neoDevice.getRootDeviceIndex()] = neoDevice.getDeviceBitfield();
|
subDeviceBitfields[neoDevice.getRootDeviceIndex()] = neoDevice.getDeviceBitfield();
|
||||||
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M,
|
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M,
|
||||||
getRootDeviceIndices(), subDeviceBitfields);
|
getRootDeviceIndices(), subDeviceBitfields);
|
||||||
usmHostMemAllocPool.initialize(svmMemoryManager, memoryProperties, poolSize);
|
usmHostMemAllocPool.initialize(svmMemoryManager, memoryProperties, poolSize, 0u, 1 * MemoryConstants::megaByte);
|
||||||
}
|
}
|
||||||
this->usmPoolInitialized = true;
|
this->usmPoolInitialized = true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,6 +39,7 @@ struct ContextUsmPoolFlagValuesTest : public ::testing::Test {
|
||||||
DebugManagerStateRestore restorer;
|
DebugManagerStateRestore restorer;
|
||||||
MockUsmMemAllocPool *mockDeviceUsmMemAllocPool;
|
MockUsmMemAllocPool *mockDeviceUsmMemAllocPool;
|
||||||
MockUsmMemAllocPool *mockHostUsmMemAllocPool;
|
MockUsmMemAllocPool *mockHostUsmMemAllocPool;
|
||||||
|
constexpr static auto poolAllocationThreshold = 1 * MemoryConstants::megaByte;
|
||||||
};
|
};
|
||||||
|
|
||||||
using ContextUsmPoolDefaultFlagsTest = ContextUsmPoolFlagValuesTest<-1, -1>;
|
using ContextUsmPoolDefaultFlagsTest = ContextUsmPoolFlagValuesTest<-1, -1>;
|
||||||
|
@ -71,12 +72,12 @@ HWTEST2_F(ContextUsmPoolDefaultFlagsTest, givenDefaultDebugFlagsWhenCreatingCont
|
||||||
using ContextUsmPoolEnabledFlagsTest = ContextUsmPoolFlagValuesTest<1, 3>;
|
using ContextUsmPoolEnabledFlagsTest = ContextUsmPoolFlagValuesTest<1, 3>;
|
||||||
TEST_F(ContextUsmPoolEnabledFlagsTest, givenEnabledDebugFlagsWhenCreatingAllocaitonsThenPoolsAreInitialized) {
|
TEST_F(ContextUsmPoolEnabledFlagsTest, givenEnabledDebugFlagsWhenCreatingAllocaitonsThenPoolsAreInitialized) {
|
||||||
cl_int retVal = CL_SUCCESS;
|
cl_int retVal = CL_SUCCESS;
|
||||||
void *pooledDeviceAlloc = clDeviceMemAllocINTEL(mockContext.get(), static_cast<cl_device_id>(mockContext->getDevice(0)), nullptr, UsmMemAllocPool::allocationThreshold, 0, &retVal);
|
void *pooledDeviceAlloc = clDeviceMemAllocINTEL(mockContext.get(), static_cast<cl_device_id>(mockContext->getDevice(0)), nullptr, poolAllocationThreshold, 0, &retVal);
|
||||||
EXPECT_EQ(CL_SUCCESS, retVal);
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
||||||
EXPECT_NE(nullptr, pooledDeviceAlloc);
|
EXPECT_NE(nullptr, pooledDeviceAlloc);
|
||||||
clMemFreeINTEL(mockContext.get(), pooledDeviceAlloc);
|
clMemFreeINTEL(mockContext.get(), pooledDeviceAlloc);
|
||||||
|
|
||||||
void *pooledHostAlloc = clHostMemAllocINTEL(mockContext.get(), nullptr, UsmMemAllocPool::allocationThreshold, 0, &retVal);
|
void *pooledHostAlloc = clHostMemAllocINTEL(mockContext.get(), nullptr, poolAllocationThreshold, 0, &retVal);
|
||||||
EXPECT_EQ(CL_SUCCESS, retVal);
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
||||||
EXPECT_NE(nullptr, pooledHostAlloc);
|
EXPECT_NE(nullptr, pooledHostAlloc);
|
||||||
clMemFreeINTEL(mockContext.get(), pooledHostAlloc);
|
clMemFreeINTEL(mockContext.get(), pooledHostAlloc);
|
||||||
|
@ -98,12 +99,12 @@ TEST_F(ContextUsmPoolEnabledFlagsTestDefault, givenDefaultDebugSettingsThenPoolI
|
||||||
EXPECT_FALSE(mockHostUsmMemAllocPool->isInitialized());
|
EXPECT_FALSE(mockHostUsmMemAllocPool->isInitialized());
|
||||||
|
|
||||||
cl_int retVal = CL_SUCCESS;
|
cl_int retVal = CL_SUCCESS;
|
||||||
void *pooledDeviceAlloc = clDeviceMemAllocINTEL(mockContext.get(), static_cast<cl_device_id>(mockContext->getDevice(0)), nullptr, UsmMemAllocPool::allocationThreshold, 0, &retVal);
|
void *pooledDeviceAlloc = clDeviceMemAllocINTEL(mockContext.get(), static_cast<cl_device_id>(mockContext->getDevice(0)), nullptr, poolAllocationThreshold, 0, &retVal);
|
||||||
EXPECT_EQ(CL_SUCCESS, retVal);
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
||||||
EXPECT_NE(nullptr, pooledDeviceAlloc);
|
EXPECT_NE(nullptr, pooledDeviceAlloc);
|
||||||
clMemFreeINTEL(mockContext.get(), pooledDeviceAlloc);
|
clMemFreeINTEL(mockContext.get(), pooledDeviceAlloc);
|
||||||
|
|
||||||
void *pooledHostAlloc = clHostMemAllocINTEL(mockContext.get(), nullptr, UsmMemAllocPool::allocationThreshold, 0, &retVal);
|
void *pooledHostAlloc = clHostMemAllocINTEL(mockContext.get(), nullptr, poolAllocationThreshold, 0, &retVal);
|
||||||
EXPECT_EQ(CL_SUCCESS, retVal);
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
||||||
EXPECT_NE(nullptr, pooledHostAlloc);
|
EXPECT_NE(nullptr, pooledHostAlloc);
|
||||||
clMemFreeINTEL(mockContext.get(), pooledHostAlloc);
|
clMemFreeINTEL(mockContext.get(), pooledHostAlloc);
|
||||||
|
|
|
@ -556,6 +556,7 @@ DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalSetWalkerPartitionType, -1, "Experim
|
||||||
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalEnableCustomLocalMemoryAlignment, 0, "Align local memory allocations to a given value. Works only with allocations at least as big as the value. 0: no effect, 2097152: 2 megabytes, 1073741824: 1 gigabyte")
|
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalEnableCustomLocalMemoryAlignment, 0, "Align local memory allocations to a given value. Works only with allocations at least as big as the value. 0: no effect, 2097152: 2 megabytes, 1073741824: 1 gigabyte")
|
||||||
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalEnableDeviceAllocationCache, -1, "Experimentally enable device usm allocation cache. Use X% of device memory.")
|
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalEnableDeviceAllocationCache, -1, "Experimentally enable device usm allocation cache. Use X% of device memory.")
|
||||||
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalEnableHostAllocationCache, -1, "Experimentally enable host usm allocation cache. Use X% of shared system memory.")
|
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalEnableHostAllocationCache, -1, "Experimentally enable host usm allocation cache. Use X% of shared system memory.")
|
||||||
|
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalUSMAllocationReuseVersion, -1, "Version of mechanism to use for usm allocation reuse.")
|
||||||
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalH2DCpuCopyThreshold, -1, "Override default threshold (in bytes) for H2D CPU copy.")
|
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalH2DCpuCopyThreshold, -1, "Override default threshold (in bytes) for H2D CPU copy.")
|
||||||
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalD2HCpuCopyThreshold, -1, "Override default threshold (in bytes) for D2H CPU copy.")
|
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalD2HCpuCopyThreshold, -1, "Override default threshold (in bytes) for D2H CPU copy.")
|
||||||
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalCopyThroughLock, -1, "Experimentally copy memory through locked ptr. -1: default 0: disable 1: enable ")
|
DECLARE_DEBUG_VARIABLE(int32_t, ExperimentalCopyThroughLock, -1, "Experimentally copy memory through locked ptr. -1: default 0: disable 1: enable ")
|
||||||
|
|
|
@ -23,6 +23,7 @@
|
||||||
#include "shared/source/helpers/ray_tracing_helper.h"
|
#include "shared/source/helpers/ray_tracing_helper.h"
|
||||||
#include "shared/source/memory_manager/allocation_properties.h"
|
#include "shared/source/memory_manager/allocation_properties.h"
|
||||||
#include "shared/source/memory_manager/memory_manager.h"
|
#include "shared/source/memory_manager/memory_manager.h"
|
||||||
|
#include "shared/source/memory_manager/unified_memory_pooling.h"
|
||||||
#include "shared/source/os_interface/driver_info.h"
|
#include "shared/source/os_interface/driver_info.h"
|
||||||
#include "shared/source/os_interface/os_context.h"
|
#include "shared/source/os_interface/os_context.h"
|
||||||
#include "shared/source/os_interface/os_interface.h"
|
#include "shared/source/os_interface/os_interface.h"
|
||||||
|
@ -66,6 +67,9 @@ Device::~Device() {
|
||||||
|
|
||||||
syncBufferHandler.reset();
|
syncBufferHandler.reset();
|
||||||
isaPoolAllocator.releasePools();
|
isaPoolAllocator.releasePools();
|
||||||
|
if (deviceUsmMemAllocPoolsManager) {
|
||||||
|
deviceUsmMemAllocPoolsManager->cleanup();
|
||||||
|
}
|
||||||
secondaryCsrs.clear();
|
secondaryCsrs.clear();
|
||||||
executionEnvironment->memoryManager->releaseSecondaryOsContexts(this->getRootDeviceIndex());
|
executionEnvironment->memoryManager->releaseSecondaryOsContexts(this->getRootDeviceIndex());
|
||||||
commandStreamReceivers.clear();
|
commandStreamReceivers.clear();
|
||||||
|
@ -278,6 +282,17 @@ void Device::initializeCommonResources() {
|
||||||
false,
|
false,
|
||||||
getDeviceBitfield()});
|
getDeviceBitfield()});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (ApiSpecificConfig::isDeviceUsmPoolingEnabled() &&
|
||||||
|
getProductHelper().isUsmPoolAllocatorSupported() &&
|
||||||
|
NEO::debugManager.flags.ExperimentalUSMAllocationReuseVersion.get() == 2) {
|
||||||
|
|
||||||
|
RootDeviceIndicesContainer rootDeviceIndices;
|
||||||
|
rootDeviceIndices.pushUnique(getRootDeviceIndex());
|
||||||
|
std::map<uint32_t, DeviceBitfield> deviceBitfields;
|
||||||
|
deviceBitfields.emplace(getRootDeviceIndex(), getDeviceBitfield());
|
||||||
|
deviceUsmMemAllocPoolsManager.reset(new UsmMemAllocPoolsManager(getMemoryManager(), rootDeviceIndices, deviceBitfields, this, InternalMemoryType::deviceUnifiedMemory));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Device::initDeviceFully() {
|
bool Device::initDeviceFully() {
|
||||||
|
|
|
@ -38,6 +38,7 @@ class GfxCoreHelper;
|
||||||
class ProductHelper;
|
class ProductHelper;
|
||||||
class CompilerProductHelper;
|
class CompilerProductHelper;
|
||||||
class ReleaseHelper;
|
class ReleaseHelper;
|
||||||
|
class UsmMemAllocPoolsManager;
|
||||||
|
|
||||||
struct SelectorCopyEngine : NonCopyableOrMovableClass {
|
struct SelectorCopyEngine : NonCopyableOrMovableClass {
|
||||||
std::atomic<bool> isMainUsed = false;
|
std::atomic<bool> isMainUsed = false;
|
||||||
|
@ -194,6 +195,9 @@ class Device : public ReferenceTrackedObject<Device> {
|
||||||
ISAPoolAllocator &getIsaPoolAllocator() {
|
ISAPoolAllocator &getIsaPoolAllocator() {
|
||||||
return isaPoolAllocator;
|
return isaPoolAllocator;
|
||||||
}
|
}
|
||||||
|
UsmMemAllocPoolsManager *getUsmMemAllocPoolsManager() {
|
||||||
|
return deviceUsmMemAllocPoolsManager.get();
|
||||||
|
}
|
||||||
MOCKABLE_VIRTUAL void stopDirectSubmissionAndWaitForCompletion();
|
MOCKABLE_VIRTUAL void stopDirectSubmissionAndWaitForCompletion();
|
||||||
bool isAnyDirectSubmissionEnabled();
|
bool isAnyDirectSubmissionEnabled();
|
||||||
bool isStateSipRequired() const {
|
bool isStateSipRequired() const {
|
||||||
|
@ -290,6 +294,7 @@ class Device : public ReferenceTrackedObject<Device> {
|
||||||
std::vector<RTDispatchGlobalsInfo *> rtDispatchGlobalsInfos;
|
std::vector<RTDispatchGlobalsInfo *> rtDispatchGlobalsInfos;
|
||||||
|
|
||||||
ISAPoolAllocator isaPoolAllocator;
|
ISAPoolAllocator isaPoolAllocator;
|
||||||
|
std::unique_ptr<UsmMemAllocPoolsManager> deviceUsmMemAllocPoolsManager;
|
||||||
|
|
||||||
struct {
|
struct {
|
||||||
bool isValid = false;
|
bool isValid = false;
|
||||||
|
|
|
@ -8,32 +8,48 @@
|
||||||
#include "shared/source/memory_manager/unified_memory_pooling.h"
|
#include "shared/source/memory_manager/unified_memory_pooling.h"
|
||||||
|
|
||||||
#include "shared/source/debug_settings/debug_settings_manager.h"
|
#include "shared/source/debug_settings/debug_settings_manager.h"
|
||||||
|
#include "shared/source/device/device.h"
|
||||||
|
#include "shared/source/helpers/hw_info.h"
|
||||||
#include "shared/source/helpers/ptr_math.h"
|
#include "shared/source/helpers/ptr_math.h"
|
||||||
|
#include "shared/source/memory_manager/memory_manager.h"
|
||||||
#include "shared/source/memory_manager/unified_memory_manager.h"
|
#include "shared/source/memory_manager/unified_memory_manager.h"
|
||||||
#include "shared/source/utilities/heap_allocator.h"
|
#include "shared/source/utilities/heap_allocator.h"
|
||||||
|
|
||||||
namespace NEO {
|
namespace NEO {
|
||||||
|
|
||||||
bool UsmMemAllocPool::initialize(SVMAllocsManager *svmMemoryManager, const UnifiedMemoryProperties &memoryProperties, size_t poolSize) {
|
bool UsmMemAllocPool::initialize(SVMAllocsManager *svmMemoryManager, const UnifiedMemoryProperties &memoryProperties, size_t poolSize, size_t minServicedSize, size_t maxServicedSize) {
|
||||||
this->pool = svmMemoryManager->createUnifiedMemoryAllocation(poolSize, memoryProperties);
|
auto poolAllocation = svmMemoryManager->createUnifiedMemoryAllocation(poolSize, memoryProperties);
|
||||||
if (nullptr == this->pool) {
|
if (nullptr == poolAllocation) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
auto svmData = svmMemoryManager->getSVMAlloc(poolAllocation);
|
||||||
|
return initialize(svmMemoryManager, poolAllocation, svmData, minServicedSize, maxServicedSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPool::initialize(SVMAllocsManager *svmMemoryManager, void *ptr, SvmAllocationData *svmData, size_t minServicedSize, size_t maxServicedSize) {
|
||||||
|
DEBUG_BREAK_IF(nullptr == ptr);
|
||||||
|
this->pool = ptr;
|
||||||
this->svmMemoryManager = svmMemoryManager;
|
this->svmMemoryManager = svmMemoryManager;
|
||||||
this->poolEnd = ptrOffset(this->pool, poolSize);
|
this->poolEnd = ptrOffset(this->pool, svmData->size);
|
||||||
this->chunkAllocator.reset(new HeapAllocator(castToUint64(this->pool),
|
this->chunkAllocator.reset(new HeapAllocator(castToUint64(this->pool),
|
||||||
poolSize,
|
svmData->size,
|
||||||
chunkAlignment,
|
chunkAlignment,
|
||||||
allocationThreshold / 2));
|
maxServicedSize / 2));
|
||||||
this->poolSize = poolSize;
|
this->poolSize = svmData->size;
|
||||||
this->poolMemoryType = memoryProperties.memoryType;
|
this->poolMemoryType = svmData->memoryType;
|
||||||
|
this->minServicedSize = minServicedSize;
|
||||||
|
this->maxServicedSize = maxServicedSize;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool UsmMemAllocPool::isInitialized() {
|
bool UsmMemAllocPool::isInitialized() const {
|
||||||
return this->pool;
|
return this->pool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
size_t UsmMemAllocPool::getPoolSize() const {
|
||||||
|
return this->poolSize;
|
||||||
|
}
|
||||||
|
|
||||||
void UsmMemAllocPool::cleanup() {
|
void UsmMemAllocPool::cleanup() {
|
||||||
if (isInitialized()) {
|
if (isInitialized()) {
|
||||||
this->svmMemoryManager->freeSVMAlloc(this->pool, true);
|
this->svmMemoryManager->freeSVMAlloc(this->pool, true);
|
||||||
|
@ -49,14 +65,32 @@ bool UsmMemAllocPool::alignmentIsAllowed(size_t alignment) {
|
||||||
return alignment % chunkAlignment == 0;
|
return alignment % chunkAlignment == 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool UsmMemAllocPool::canBePooled(size_t size, const UnifiedMemoryProperties &memoryProperties) {
|
bool UsmMemAllocPool::sizeIsAllowed(size_t size) {
|
||||||
return size <= allocationThreshold &&
|
return size >= minServicedSize && size <= maxServicedSize;
|
||||||
alignmentIsAllowed(memoryProperties.alignment) &&
|
}
|
||||||
memoryProperties.memoryType == this->poolMemoryType &&
|
|
||||||
memoryProperties.allocationFlags.allFlags == 0u &&
|
bool UsmMemAllocPool::flagsAreAllowed(const UnifiedMemoryProperties &memoryProperties) {
|
||||||
|
return memoryProperties.allocationFlags.allFlags == 0u &&
|
||||||
memoryProperties.allocationFlags.allAllocFlags == 0u;
|
memoryProperties.allocationFlags.allAllocFlags == 0u;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
double UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(InternalMemoryType memoryType) {
|
||||||
|
if (InternalMemoryType::deviceUnifiedMemory == memoryType) {
|
||||||
|
return 0.08;
|
||||||
|
}
|
||||||
|
if (InternalMemoryType::hostUnifiedMemory == memoryType) {
|
||||||
|
return 0.02;
|
||||||
|
}
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPool::canBePooled(size_t size, const UnifiedMemoryProperties &memoryProperties) {
|
||||||
|
return sizeIsAllowed(size) &&
|
||||||
|
alignmentIsAllowed(memoryProperties.alignment) &&
|
||||||
|
flagsAreAllowed(memoryProperties) &&
|
||||||
|
memoryProperties.memoryType == this->poolMemoryType;
|
||||||
|
}
|
||||||
|
|
||||||
void *UsmMemAllocPool::createUnifiedMemoryAllocation(size_t requestedSize, const UnifiedMemoryProperties &memoryProperties) {
|
void *UsmMemAllocPool::createUnifiedMemoryAllocation(size_t requestedSize, const UnifiedMemoryProperties &memoryProperties) {
|
||||||
void *pooledPtr = nullptr;
|
void *pooledPtr = nullptr;
|
||||||
if (isInitialized()) {
|
if (isInitialized()) {
|
||||||
|
@ -78,10 +112,14 @@ void *UsmMemAllocPool::createUnifiedMemoryAllocation(size_t requestedSize, const
|
||||||
return pooledPtr;
|
return pooledPtr;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool UsmMemAllocPool::isInPool(const void *ptr) {
|
bool UsmMemAllocPool::isInPool(const void *ptr) const {
|
||||||
return ptr >= this->pool && ptr < this->poolEnd;
|
return ptr >= this->pool && ptr < this->poolEnd;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPool::isEmpty() {
|
||||||
|
return 0u == this->allocations.getNumAllocs();
|
||||||
|
}
|
||||||
|
|
||||||
bool UsmMemAllocPool::freeSVMAlloc(const void *ptr, bool blocking) {
|
bool UsmMemAllocPool::freeSVMAlloc(const void *ptr, bool blocking) {
|
||||||
if (isInitialized() && isInPool(ptr)) {
|
if (isInitialized() && isInPool(ptr)) {
|
||||||
std::unique_lock<std::mutex> lock(mtx);
|
std::unique_lock<std::mutex> lock(mtx);
|
||||||
|
@ -117,11 +155,176 @@ void *UsmMemAllocPool::getPooledAllocationBasePtr(const void *ptr) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t UsmMemAllocPool::getOffsetInPool(const void *ptr) {
|
size_t UsmMemAllocPool::getOffsetInPool(const void *ptr) const {
|
||||||
if (isInitialized() && isInPool(ptr)) {
|
if (isInitialized() && isInPool(ptr)) {
|
||||||
return ptrDiff(ptr, this->pool);
|
return ptrDiff(ptr, this->pool);
|
||||||
}
|
}
|
||||||
return 0u;
|
return 0u;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPoolsManager::PoolInfo::isPreallocated() const {
|
||||||
|
return 0u != preallocateSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPoolsManager::ensureInitialized(SVMAllocsManager *svmMemoryManager) {
|
||||||
|
DEBUG_BREAK_IF(poolMemoryType != InternalMemoryType::deviceUnifiedMemory &&
|
||||||
|
poolMemoryType != InternalMemoryType::hostUnifiedMemory);
|
||||||
|
if (isInitialized()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
std::unique_lock<std::mutex> lock(mtx);
|
||||||
|
if (isInitialized()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
bool allPoolAllocationsSucceeded = true;
|
||||||
|
this->totalSize = 0u;
|
||||||
|
SVMAllocsManager::UnifiedMemoryProperties poolsMemoryProperties(poolMemoryType, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitFields);
|
||||||
|
poolsMemoryProperties.device = device;
|
||||||
|
for (const auto &poolInfo : this->poolInfos) {
|
||||||
|
this->pools[poolInfo] = std::vector<std::unique_ptr<UsmMemAllocPool>>();
|
||||||
|
if (poolInfo.isPreallocated()) {
|
||||||
|
auto pool = std::make_unique<UsmMemAllocPool>();
|
||||||
|
allPoolAllocationsSucceeded &= pool->initialize(svmMemoryManager, poolsMemoryProperties, poolInfo.preallocateSize, poolInfo.minServicedSize, poolInfo.maxServicedSize);
|
||||||
|
this->pools[poolInfo].push_back(std::move(pool));
|
||||||
|
this->totalSize += poolInfo.preallocateSize;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (false == allPoolAllocationsSucceeded) {
|
||||||
|
cleanup();
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
this->svmMemoryManager = svmMemoryManager;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPoolsManager::isInitialized() const {
|
||||||
|
return nullptr != this->svmMemoryManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
void UsmMemAllocPoolsManager::trim() {
|
||||||
|
std::unique_lock<std::mutex> lock(mtx);
|
||||||
|
for (const auto &poolInfo : this->poolInfos) {
|
||||||
|
if (false == poolInfo.isPreallocated()) {
|
||||||
|
trim(this->pools[poolInfo]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void UsmMemAllocPoolsManager::trim(std::vector<std::unique_ptr<UsmMemAllocPool>> &poolVector) {
|
||||||
|
auto poolIterator = poolVector.begin();
|
||||||
|
while (poolIterator != poolVector.end()) {
|
||||||
|
if ((*poolIterator)->isEmpty()) {
|
||||||
|
this->totalSize -= (*poolIterator)->getPoolSize();
|
||||||
|
(*poolIterator)->cleanup();
|
||||||
|
poolIterator = poolVector.erase(poolIterator);
|
||||||
|
} else {
|
||||||
|
++poolIterator;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void UsmMemAllocPoolsManager::cleanup() {
|
||||||
|
for (const auto &poolInfo : this->poolInfos) {
|
||||||
|
for (const auto &pool : this->pools[poolInfo]) {
|
||||||
|
pool->cleanup();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this->svmMemoryManager = nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
void *UsmMemAllocPoolsManager::createUnifiedMemoryAllocation(size_t size, const UnifiedMemoryProperties &memoryProperties) {
|
||||||
|
DEBUG_BREAK_IF(false == isInitialized());
|
||||||
|
if (!canBePooled(size, memoryProperties)) {
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
std::unique_lock<std::mutex> lock(mtx);
|
||||||
|
for (const auto &poolInfo : this->poolInfos) {
|
||||||
|
if (size <= poolInfo.maxServicedSize) {
|
||||||
|
for (auto &pool : this->pools[poolInfo]) {
|
||||||
|
if (void *ptr = pool->createUnifiedMemoryAllocation(size, memoryProperties)) {
|
||||||
|
return ptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPoolsManager::freeSVMAlloc(const void *ptr, bool blocking) {
|
||||||
|
if (UsmMemAllocPool *pool = this->getPoolContainingAlloc(ptr)) {
|
||||||
|
return pool->freeSVMAlloc(ptr, blocking);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t UsmMemAllocPoolsManager::getPooledAllocationSize(const void *ptr) {
|
||||||
|
if (UsmMemAllocPool *pool = this->getPoolContainingAlloc(ptr)) {
|
||||||
|
return pool->getPooledAllocationSize(ptr);
|
||||||
|
}
|
||||||
|
return 0u;
|
||||||
|
}
|
||||||
|
|
||||||
|
void *UsmMemAllocPoolsManager::getPooledAllocationBasePtr(const void *ptr) {
|
||||||
|
if (UsmMemAllocPool *pool = this->getPoolContainingAlloc(ptr)) {
|
||||||
|
return pool->getPooledAllocationBasePtr(ptr);
|
||||||
|
}
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t UsmMemAllocPoolsManager::getOffsetInPool(const void *ptr) {
|
||||||
|
if (UsmMemAllocPool *pool = this->getPoolContainingAlloc(ptr)) {
|
||||||
|
return pool->getOffsetInPool(ptr);
|
||||||
|
}
|
||||||
|
return 0u;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint64_t UsmMemAllocPoolsManager::getFreeMemory() {
|
||||||
|
const auto isIntegrated = device->getHardwareInfo().capabilityTable.isIntegratedDevice;
|
||||||
|
const uint64_t deviceMemory = isIntegrated ? device->getDeviceInfo().globalMemSize : device->getDeviceInfo().localMemSize;
|
||||||
|
const uint64_t usedMemory = memoryManager->getUsedLocalMemorySize(device->getRootDeviceIndex());
|
||||||
|
DEBUG_BREAK_IF(usedMemory > deviceMemory);
|
||||||
|
const uint64_t freeMemory = deviceMemory - usedMemory;
|
||||||
|
return freeMemory;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool UsmMemAllocPoolsManager::recycleSVMAlloc(void *ptr, bool blocking) {
|
||||||
|
if (false == isInitialized()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
auto svmData = this->svmMemoryManager->getSVMAlloc(ptr);
|
||||||
|
DEBUG_BREAK_IF(svmData->memoryType != this->poolMemoryType);
|
||||||
|
if (svmData->size > maxPoolableSize || belongsInPreallocatedPool(svmData->size)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (this->totalSize + svmData->size > getFreeMemory() * UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(svmData->memoryType)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
std::unique_lock<std::mutex> lock(mtx);
|
||||||
|
for (auto poolInfoIndex = firstNonPreallocatedIndex; poolInfoIndex < this->poolInfos.size(); ++poolInfoIndex) {
|
||||||
|
const auto &poolInfo = this->poolInfos[poolInfoIndex];
|
||||||
|
if (svmData->size <= poolInfo.maxServicedSize) {
|
||||||
|
auto pool = std::make_unique<UsmMemAllocPool>();
|
||||||
|
pool->initialize(this->svmMemoryManager, ptr, svmData, poolInfo.minServicedSize, svmData->size);
|
||||||
|
this->pools[poolInfo].push_back(std::move(pool));
|
||||||
|
this->totalSize += svmData->size;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DEBUG_BREAK_IF(true);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
UsmMemAllocPool *UsmMemAllocPoolsManager::getPoolContainingAlloc(const void *ptr) {
|
||||||
|
std::unique_lock<std::mutex> lock(mtx);
|
||||||
|
for (const auto &poolInfo : this->poolInfos) {
|
||||||
|
for (auto &pool : this->pools[poolInfo]) {
|
||||||
|
if (pool->isInPool(ptr)) {
|
||||||
|
return pool.get();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace NEO
|
} // namespace NEO
|
|
@ -11,6 +11,9 @@
|
||||||
#include "shared/source/utilities/heap_allocator.h"
|
#include "shared/source/utilities/heap_allocator.h"
|
||||||
#include "shared/source/utilities/sorted_vector.h"
|
#include "shared/source/utilities/sorted_vector.h"
|
||||||
|
|
||||||
|
#include <array>
|
||||||
|
#include <map>
|
||||||
|
|
||||||
namespace NEO {
|
namespace NEO {
|
||||||
class UsmMemAllocPool {
|
class UsmMemAllocPool {
|
||||||
public:
|
public:
|
||||||
|
@ -23,21 +26,25 @@ class UsmMemAllocPool {
|
||||||
using AllocationsInfoStorage = BaseSortedPointerWithValueVector<AllocationInfo>;
|
using AllocationsInfoStorage = BaseSortedPointerWithValueVector<AllocationInfo>;
|
||||||
|
|
||||||
UsmMemAllocPool() = default;
|
UsmMemAllocPool() = default;
|
||||||
bool initialize(SVMAllocsManager *svmMemoryManager, const UnifiedMemoryProperties &memoryProperties, size_t poolSize);
|
bool initialize(SVMAllocsManager *svmMemoryManager, const UnifiedMemoryProperties &memoryProperties, size_t poolSize, size_t minServicedSize, size_t maxServicedSize);
|
||||||
bool isInitialized();
|
bool initialize(SVMAllocsManager *svmMemoryManager, void *ptr, SvmAllocationData *svmData, size_t minServicedSize, size_t maxServicedSize);
|
||||||
|
bool isInitialized() const;
|
||||||
|
size_t getPoolSize() const;
|
||||||
void cleanup();
|
void cleanup();
|
||||||
bool alignmentIsAllowed(size_t alignment);
|
static bool alignmentIsAllowed(size_t alignment);
|
||||||
|
static bool flagsAreAllowed(const UnifiedMemoryProperties &memoryProperties);
|
||||||
|
static double getPercentOfFreeMemoryForRecycling(InternalMemoryType memoryType);
|
||||||
|
bool sizeIsAllowed(size_t size);
|
||||||
bool canBePooled(size_t size, const UnifiedMemoryProperties &memoryProperties);
|
bool canBePooled(size_t size, const UnifiedMemoryProperties &memoryProperties);
|
||||||
void *createUnifiedMemoryAllocation(size_t size, const UnifiedMemoryProperties &memoryProperties);
|
void *createUnifiedMemoryAllocation(size_t size, const UnifiedMemoryProperties &memoryProperties);
|
||||||
bool isInPool(const void *ptr);
|
bool isInPool(const void *ptr) const;
|
||||||
|
bool isEmpty();
|
||||||
bool freeSVMAlloc(const void *ptr, bool blocking);
|
bool freeSVMAlloc(const void *ptr, bool blocking);
|
||||||
size_t getPooledAllocationSize(const void *ptr);
|
size_t getPooledAllocationSize(const void *ptr);
|
||||||
void *getPooledAllocationBasePtr(const void *ptr);
|
void *getPooledAllocationBasePtr(const void *ptr);
|
||||||
size_t getOffsetInPool(const void *ptr);
|
size_t getOffsetInPool(const void *ptr) const;
|
||||||
|
|
||||||
static constexpr auto allocationThreshold = 1 * MemoryConstants::megaByte;
|
|
||||||
static constexpr auto chunkAlignment = 512u;
|
static constexpr auto chunkAlignment = 512u;
|
||||||
static constexpr auto startingOffset = 2 * allocationThreshold;
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
size_t poolSize{};
|
size_t poolSize{};
|
||||||
|
@ -48,6 +55,76 @@ class UsmMemAllocPool {
|
||||||
AllocationsInfoStorage allocations;
|
AllocationsInfoStorage allocations;
|
||||||
std::mutex mtx;
|
std::mutex mtx;
|
||||||
InternalMemoryType poolMemoryType;
|
InternalMemoryType poolMemoryType;
|
||||||
|
size_t minServicedSize;
|
||||||
|
size_t maxServicedSize;
|
||||||
|
};
|
||||||
|
|
||||||
|
class UsmMemAllocPoolsManager {
|
||||||
|
public:
|
||||||
|
struct PoolInfo {
|
||||||
|
size_t minServicedSize;
|
||||||
|
size_t maxServicedSize;
|
||||||
|
size_t preallocateSize;
|
||||||
|
bool isPreallocated() const;
|
||||||
|
bool operator<(const PoolInfo &rhs) const {
|
||||||
|
return this->minServicedSize < rhs.minServicedSize;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// clang-format off
|
||||||
|
const std::array<const PoolInfo, 6> poolInfos = {
|
||||||
|
PoolInfo{ 0, 4 * KB, 2 * MB},
|
||||||
|
PoolInfo{ 4 * KB+1, 64 * KB, 2 * MB},
|
||||||
|
PoolInfo{64 * KB+1, 2 * MB, 16 * MB},
|
||||||
|
PoolInfo{ 2 * MB+1, 16 * MB, 0},
|
||||||
|
PoolInfo{16 * MB+1, 64 * MB, 0},
|
||||||
|
PoolInfo{64 * MB+1, 256 * MB, 0}};
|
||||||
|
// clang-format on
|
||||||
|
const size_t firstNonPreallocatedIndex = 3u;
|
||||||
|
|
||||||
|
using UnifiedMemoryProperties = SVMAllocsManager::UnifiedMemoryProperties;
|
||||||
|
static constexpr uint64_t KB = MemoryConstants::kiloByte; // NOLINT(readability-identifier-naming)
|
||||||
|
static constexpr uint64_t MB = MemoryConstants::megaByte; // NOLINT(readability-identifier-naming)
|
||||||
|
static constexpr uint64_t maxPoolableSize = 256 * MB;
|
||||||
|
UsmMemAllocPoolsManager(MemoryManager *memoryManager,
|
||||||
|
RootDeviceIndicesContainer rootDeviceIndices,
|
||||||
|
std::map<uint32_t, NEO::DeviceBitfield> deviceBitFields,
|
||||||
|
Device *device,
|
||||||
|
InternalMemoryType poolMemoryType) : memoryManager(memoryManager), rootDeviceIndices(rootDeviceIndices), deviceBitFields(deviceBitFields), device(device), poolMemoryType(poolMemoryType){};
|
||||||
|
MOCKABLE_VIRTUAL ~UsmMemAllocPoolsManager() = default;
|
||||||
|
bool ensureInitialized(SVMAllocsManager *svmMemoryManager);
|
||||||
|
bool isInitialized() const;
|
||||||
|
void trim();
|
||||||
|
void trim(std::vector<std::unique_ptr<UsmMemAllocPool>> &poolVector);
|
||||||
|
void cleanup();
|
||||||
|
void *createUnifiedMemoryAllocation(size_t size, const UnifiedMemoryProperties &memoryProperties);
|
||||||
|
bool freeSVMAlloc(const void *ptr, bool blocking);
|
||||||
|
MOCKABLE_VIRTUAL uint64_t getFreeMemory();
|
||||||
|
bool recycleSVMAlloc(void *ptr, bool blocking);
|
||||||
|
size_t getPooledAllocationSize(const void *ptr);
|
||||||
|
void *getPooledAllocationBasePtr(const void *ptr);
|
||||||
|
size_t getOffsetInPool(const void *ptr);
|
||||||
|
|
||||||
|
protected:
|
||||||
|
static bool canBePooled(size_t size, const UnifiedMemoryProperties &memoryProperties) {
|
||||||
|
return size <= maxPoolableSize &&
|
||||||
|
UsmMemAllocPool::alignmentIsAllowed(memoryProperties.alignment) &&
|
||||||
|
UsmMemAllocPool::flagsAreAllowed(memoryProperties);
|
||||||
|
}
|
||||||
|
bool belongsInPreallocatedPool(size_t size) {
|
||||||
|
return size <= poolInfos[firstNonPreallocatedIndex - 1].maxServicedSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
UsmMemAllocPool *getPoolContainingAlloc(const void *ptr);
|
||||||
|
|
||||||
|
SVMAllocsManager *svmMemoryManager{};
|
||||||
|
MemoryManager *memoryManager;
|
||||||
|
RootDeviceIndicesContainer rootDeviceIndices;
|
||||||
|
std::map<uint32_t, NEO::DeviceBitfield> deviceBitFields;
|
||||||
|
Device *device;
|
||||||
|
InternalMemoryType poolMemoryType;
|
||||||
|
size_t totalSize;
|
||||||
|
std::mutex mtx;
|
||||||
|
std::map<PoolInfo, std::vector<std::unique_ptr<UsmMemAllocPool>>> pools;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace NEO
|
} // namespace NEO
|
|
@ -172,6 +172,9 @@ GraphicsAllocation *MockMemoryManager::allocateGraphicsMemoryInDevicePool(const
|
||||||
}
|
}
|
||||||
|
|
||||||
GraphicsAllocation *MockMemoryManager::allocateGraphicsMemoryWithAlignment(const AllocationData &allocationData) {
|
GraphicsAllocation *MockMemoryManager::allocateGraphicsMemoryWithAlignment(const AllocationData &allocationData) {
|
||||||
|
if (returnMockGAFromHostPool) {
|
||||||
|
return mockGa;
|
||||||
|
}
|
||||||
if (failInAllocateWithSizeAndAlignment) {
|
if (failInAllocateWithSizeAndAlignment) {
|
||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,6 +58,7 @@ class MockMemoryManager : public MemoryManagerCreate<OsAgnosticMemoryManager> {
|
||||||
using MemoryManager::isAllocationTypeToCapture;
|
using MemoryManager::isAllocationTypeToCapture;
|
||||||
using MemoryManager::isCopyRequired;
|
using MemoryManager::isCopyRequired;
|
||||||
using MemoryManager::latestContextId;
|
using MemoryManager::latestContextId;
|
||||||
|
using MemoryManager::localMemAllocsSize;
|
||||||
using MemoryManager::localMemorySupported;
|
using MemoryManager::localMemorySupported;
|
||||||
using MemoryManager::reservedMemory;
|
using MemoryManager::reservedMemory;
|
||||||
using MemoryManager::secondaryEngines;
|
using MemoryManager::secondaryEngines;
|
||||||
|
@ -279,6 +280,7 @@ class MockMemoryManager : public MemoryManagerCreate<OsAgnosticMemoryManager> {
|
||||||
bool callBaseAllocateGraphicsMemoryForNonSvmHostPtr = true;
|
bool callBaseAllocateGraphicsMemoryForNonSvmHostPtr = true;
|
||||||
bool failMapPhysicalToVirtualMemory = false;
|
bool failMapPhysicalToVirtualMemory = false;
|
||||||
bool returnMockGAFromDevicePool = false;
|
bool returnMockGAFromDevicePool = false;
|
||||||
|
bool returnMockGAFromHostPool = false;
|
||||||
std::unique_ptr<MockExecutionEnvironment> mockExecutionEnvironment;
|
std::unique_ptr<MockExecutionEnvironment> mockExecutionEnvironment;
|
||||||
DeviceBitfield recentlyPassedDeviceBitfield{};
|
DeviceBitfield recentlyPassedDeviceBitfield{};
|
||||||
std::unique_ptr<MultiGraphicsAllocation> waitAllocations = nullptr;
|
std::unique_ptr<MultiGraphicsAllocation> waitAllocations = nullptr;
|
||||||
|
|
|
@ -23,5 +23,6 @@ struct MockProductHelper : ProductHelperHw<IGFX_UNKNOWN> {
|
||||||
ADDMETHOD_CONST_NOBASE(isBlitCopyRequiredForLocalMemory, bool, true, (const RootDeviceEnvironment &rootDeviceEnvironment, const GraphicsAllocation &allocation));
|
ADDMETHOD_CONST_NOBASE(isBlitCopyRequiredForLocalMemory, bool, true, (const RootDeviceEnvironment &rootDeviceEnvironment, const GraphicsAllocation &allocation));
|
||||||
ADDMETHOD_CONST_NOBASE(isDeviceUsmAllocationReuseSupported, bool, false, ());
|
ADDMETHOD_CONST_NOBASE(isDeviceUsmAllocationReuseSupported, bool, false, ());
|
||||||
ADDMETHOD_CONST_NOBASE(isHostUsmAllocationReuseSupported, bool, false, ());
|
ADDMETHOD_CONST_NOBASE(isHostUsmAllocationReuseSupported, bool, false, ());
|
||||||
|
ADDMETHOD_CONST_NOBASE(isUsmPoolAllocatorSupported, bool, false, ());
|
||||||
};
|
};
|
||||||
} // namespace NEO
|
} // namespace NEO
|
||||||
|
|
|
@ -12,9 +12,30 @@ namespace NEO {
|
||||||
class MockUsmMemAllocPool : public UsmMemAllocPool {
|
class MockUsmMemAllocPool : public UsmMemAllocPool {
|
||||||
public:
|
public:
|
||||||
using UsmMemAllocPool::allocations;
|
using UsmMemAllocPool::allocations;
|
||||||
|
using UsmMemAllocPool::maxServicedSize;
|
||||||
|
using UsmMemAllocPool::minServicedSize;
|
||||||
using UsmMemAllocPool::pool;
|
using UsmMemAllocPool::pool;
|
||||||
using UsmMemAllocPool::poolEnd;
|
using UsmMemAllocPool::poolEnd;
|
||||||
using UsmMemAllocPool::poolMemoryType;
|
using UsmMemAllocPool::poolMemoryType;
|
||||||
using UsmMemAllocPool::poolSize;
|
using UsmMemAllocPool::poolSize;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class MockUsmMemAllocPoolsManager : public UsmMemAllocPoolsManager {
|
||||||
|
public:
|
||||||
|
using UsmMemAllocPoolsManager::canBePooled;
|
||||||
|
using UsmMemAllocPoolsManager::device;
|
||||||
|
using UsmMemAllocPoolsManager::getPoolContainingAlloc;
|
||||||
|
using UsmMemAllocPoolsManager::memoryManager;
|
||||||
|
using UsmMemAllocPoolsManager::pools;
|
||||||
|
using UsmMemAllocPoolsManager::totalSize;
|
||||||
|
using UsmMemAllocPoolsManager::UsmMemAllocPoolsManager;
|
||||||
|
uint64_t getFreeMemory() override {
|
||||||
|
if (callBaseGetFreeMemory) {
|
||||||
|
return UsmMemAllocPoolsManager::getFreeMemory();
|
||||||
|
}
|
||||||
|
return mockFreeMemory;
|
||||||
|
}
|
||||||
|
uint64_t mockFreeMemory = 0u;
|
||||||
|
bool callBaseGetFreeMemory = false;
|
||||||
|
};
|
||||||
} // namespace NEO
|
} // namespace NEO
|
|
@ -625,4 +625,5 @@ DeferStateInitSubmissionToFirstRegularUsage = -1
|
||||||
WaitForPagingFenceInController = -1
|
WaitForPagingFenceInController = -1
|
||||||
DirectSubmissionPrintSemaphoreUsage = -1
|
DirectSubmissionPrintSemaphoreUsage = -1
|
||||||
ForceNonCoherentModeForTimestamps = 0
|
ForceNonCoherentModeForTimestamps = 0
|
||||||
|
ExperimentalUSMAllocationReuseVersion = -1
|
||||||
# Please don't edit below this line
|
# Please don't edit below this line
|
||||||
|
|
|
@ -19,6 +19,7 @@ ApiSpecificConfig::ApiType apiTypeForUlts = ApiSpecificConfig::OCL;
|
||||||
bool globalStatelessL0 = false;
|
bool globalStatelessL0 = false;
|
||||||
bool globalStatelessOcl = false;
|
bool globalStatelessOcl = false;
|
||||||
bool isStatelessCompressionSupportedForUlts = true;
|
bool isStatelessCompressionSupportedForUlts = true;
|
||||||
|
bool isDeviceUsmPoolingEnabledForUlts = true;
|
||||||
|
|
||||||
StackVec<const char *, 4> validUltL0Prefixes = {"NEO_L0_", "NEO_", ""};
|
StackVec<const char *, 4> validUltL0Prefixes = {"NEO_L0_", "NEO_", ""};
|
||||||
StackVec<NEO::DebugVarPrefix, 4> validUltL0PrefixTypes = {DebugVarPrefix::neoL0, DebugVarPrefix::neo, DebugVarPrefix::none};
|
StackVec<NEO::DebugVarPrefix, 4> validUltL0PrefixTypes = {DebugVarPrefix::neoL0, DebugVarPrefix::neo, DebugVarPrefix::none};
|
||||||
|
@ -59,7 +60,7 @@ bool ApiSpecificConfig::isHostAllocationCacheEnabled() {
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ApiSpecificConfig::isDeviceUsmPoolingEnabled() {
|
bool ApiSpecificConfig::isDeviceUsmPoolingEnabled() {
|
||||||
return false;
|
return isDeviceUsmPoolingEnabledForUlts;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ApiSpecificConfig::isHostUsmPoolingEnabled() {
|
bool ApiSpecificConfig::isHostUsmPoolingEnabled() {
|
||||||
|
|
|
@ -7,10 +7,12 @@
|
||||||
|
|
||||||
#include "shared/source/device/device.h"
|
#include "shared/source/device/device.h"
|
||||||
#include "shared/source/gmm_helper/gmm.h"
|
#include "shared/source/gmm_helper/gmm.h"
|
||||||
|
#include "shared/source/helpers/api_specific_config.h"
|
||||||
#include "shared/source/helpers/array_count.h"
|
#include "shared/source/helpers/array_count.h"
|
||||||
#include "shared/source/helpers/gfx_core_helper.h"
|
#include "shared/source/helpers/gfx_core_helper.h"
|
||||||
#include "shared/source/memory_manager/allocations_list.h"
|
#include "shared/source/memory_manager/allocations_list.h"
|
||||||
#include "shared/source/memory_manager/gfx_partition.h"
|
#include "shared/source/memory_manager/gfx_partition.h"
|
||||||
|
#include "shared/source/memory_manager/unified_memory_pooling.h"
|
||||||
#include "shared/source/os_interface/device_factory.h"
|
#include "shared/source/os_interface/device_factory.h"
|
||||||
#include "shared/source/os_interface/driver_info.h"
|
#include "shared/source/os_interface/driver_info.h"
|
||||||
#include "shared/source/os_interface/os_context.h"
|
#include "shared/source/os_interface/os_context.h"
|
||||||
|
@ -19,6 +21,7 @@
|
||||||
#include "shared/test/common/fixtures/device_fixture.h"
|
#include "shared/test/common/fixtures/device_fixture.h"
|
||||||
#include "shared/test/common/helpers/debug_manager_state_restore.h"
|
#include "shared/test/common/helpers/debug_manager_state_restore.h"
|
||||||
#include "shared/test/common/helpers/default_hw_info.h"
|
#include "shared/test/common/helpers/default_hw_info.h"
|
||||||
|
#include "shared/test/common/helpers/raii_product_helper.h"
|
||||||
#include "shared/test/common/helpers/ult_hw_config.h"
|
#include "shared/test/common/helpers/ult_hw_config.h"
|
||||||
#include "shared/test/common/helpers/variable_backup.h"
|
#include "shared/test/common/helpers/variable_backup.h"
|
||||||
#include "shared/test/common/mocks/mock_allocation_properties.h"
|
#include "shared/test/common/mocks/mock_allocation_properties.h"
|
||||||
|
@ -33,8 +36,11 @@
|
||||||
#include "shared/test/common/mocks/ult_device_factory.h"
|
#include "shared/test/common/mocks/ult_device_factory.h"
|
||||||
#include "shared/test/common/test_macros/hw_test.h"
|
#include "shared/test/common/test_macros/hw_test.h"
|
||||||
#include "shared/test/common/test_macros/test.h"
|
#include "shared/test/common/test_macros/test.h"
|
||||||
|
|
||||||
using namespace NEO;
|
using namespace NEO;
|
||||||
|
extern ApiSpecificConfig::ApiType apiTypeForUlts;
|
||||||
|
namespace NEO {
|
||||||
|
extern bool isDeviceUsmPoolingEnabledForUlts;
|
||||||
|
}
|
||||||
|
|
||||||
TEST(DeviceBlitterTest, whenBlitterOperationsSupportIsDisabledThenNoInternalCopyEngineIsReturned) {
|
TEST(DeviceBlitterTest, whenBlitterOperationsSupportIsDisabledThenNoInternalCopyEngineIsReturned) {
|
||||||
VariableBackup<HardwareInfo> backupHwInfo(defaultHwInfo.get());
|
VariableBackup<HardwareInfo> backupHwInfo(defaultHwInfo.get());
|
||||||
|
@ -1803,6 +1809,60 @@ TEST_F(DeviceTests, givenDebuggerRequestedByUserWhenDeviceWithSubDevicesCreatedT
|
||||||
EXPECT_NE(nullptr, deviceFactory.rootDevices[0]->getL0Debugger());
|
EXPECT_NE(nullptr, deviceFactory.rootDevices[0]->getL0Debugger());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TEST_F(DeviceTests, givenNewUsmPoolingEnabledWhenDeviceInitializedThenUsmMemAllocPoolsManagerIsCreatedButNotInitialized) {
|
||||||
|
VariableBackup<bool> backupIsDeviceUsmPoolingEnabledForUlts(&isDeviceUsmPoolingEnabledForUlts);
|
||||||
|
isDeviceUsmPoolingEnabledForUlts = true;
|
||||||
|
{
|
||||||
|
DebugManagerStateRestore restorer;
|
||||||
|
debugManager.flags.ExperimentalUSMAllocationReuseVersion.set(2);
|
||||||
|
auto executionEnvironment = MockDevice::prepareExecutionEnvironment(defaultHwInfo.get(), 0u);
|
||||||
|
auto mockProductHelper = new MockProductHelper;
|
||||||
|
executionEnvironment->rootDeviceEnvironments[0]->productHelper.reset(mockProductHelper);
|
||||||
|
mockProductHelper->isUsmPoolAllocatorSupportedResult = true;
|
||||||
|
UltDeviceFactory deviceFactory{1, 1, *executionEnvironment};
|
||||||
|
auto device = deviceFactory.rootDevices[0];
|
||||||
|
auto usmMemAllocPoolsManager = device->getUsmMemAllocPoolsManager();
|
||||||
|
ASSERT_NE(nullptr, usmMemAllocPoolsManager);
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->isInitialized());
|
||||||
|
}
|
||||||
|
{
|
||||||
|
DebugManagerStateRestore restorer;
|
||||||
|
debugManager.flags.ExperimentalUSMAllocationReuseVersion.set(-1);
|
||||||
|
auto executionEnvironment = MockDevice::prepareExecutionEnvironment(defaultHwInfo.get(), 0u);
|
||||||
|
auto mockProductHelper = new MockProductHelper;
|
||||||
|
executionEnvironment->rootDeviceEnvironments[0]->productHelper.reset(mockProductHelper);
|
||||||
|
mockProductHelper->isUsmPoolAllocatorSupportedResult = true;
|
||||||
|
UltDeviceFactory deviceFactory{1, 1, *executionEnvironment};
|
||||||
|
auto device = deviceFactory.rootDevices[0];
|
||||||
|
auto usmMemAllocPoolsManager = device->getUsmMemAllocPoolsManager();
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
DebugManagerStateRestore restorer;
|
||||||
|
debugManager.flags.ExperimentalUSMAllocationReuseVersion.set(2);
|
||||||
|
auto executionEnvironment = MockDevice::prepareExecutionEnvironment(defaultHwInfo.get(), 0u);
|
||||||
|
auto mockProductHelper = new MockProductHelper;
|
||||||
|
executionEnvironment->rootDeviceEnvironments[0]->productHelper.reset(mockProductHelper);
|
||||||
|
mockProductHelper->isUsmPoolAllocatorSupportedResult = false;
|
||||||
|
UltDeviceFactory deviceFactory{1, 1, *executionEnvironment};
|
||||||
|
auto device = deviceFactory.rootDevices[0];
|
||||||
|
auto usmMemAllocPoolsManager = device->getUsmMemAllocPoolsManager();
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager);
|
||||||
|
}
|
||||||
|
isDeviceUsmPoolingEnabledForUlts = false;
|
||||||
|
{
|
||||||
|
DebugManagerStateRestore restorer;
|
||||||
|
debugManager.flags.ExperimentalUSMAllocationReuseVersion.set(2);
|
||||||
|
auto executionEnvironment = MockDevice::prepareExecutionEnvironment(defaultHwInfo.get(), 0u);
|
||||||
|
auto mockProductHelper = new MockProductHelper;
|
||||||
|
executionEnvironment->rootDeviceEnvironments[0]->productHelper.reset(mockProductHelper);
|
||||||
|
mockProductHelper->isUsmPoolAllocatorSupportedResult = true;
|
||||||
|
UltDeviceFactory deviceFactory{1, 1, *executionEnvironment};
|
||||||
|
auto device = deviceFactory.rootDevices[0];
|
||||||
|
auto usmMemAllocPoolsManager = device->getUsmMemAllocPoolsManager();
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager);
|
||||||
|
}
|
||||||
|
}
|
||||||
TEST(DeviceWithoutAILTest, givenNoAILWhenCreateDeviceThenDeviceIsCreated) {
|
TEST(DeviceWithoutAILTest, givenNoAILWhenCreateDeviceThenDeviceIsCreated) {
|
||||||
DebugManagerStateRestore dbgRestorer;
|
DebugManagerStateRestore dbgRestorer;
|
||||||
debugManager.flags.EnableAIL.set(false);
|
debugManager.flags.EnableAIL.set(false);
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
#include "shared/source/helpers/aligned_memory.h"
|
||||||
#include "shared/source/memory_manager/unified_memory_pooling.h"
|
#include "shared/source/memory_manager/unified_memory_pooling.h"
|
||||||
#include "shared/test/common/helpers/debug_manager_state_restore.h"
|
#include "shared/test/common/helpers/debug_manager_state_restore.h"
|
||||||
#include "shared/test/common/mocks/mock_device.h"
|
#include "shared/test/common/mocks/mock_device.h"
|
||||||
|
@ -13,12 +14,35 @@
|
||||||
#include "shared/test/common/mocks/mock_usm_memory_pool.h"
|
#include "shared/test/common/mocks/mock_usm_memory_pool.h"
|
||||||
#include "shared/test/common/mocks/ult_device_factory.h"
|
#include "shared/test/common/mocks/ult_device_factory.h"
|
||||||
#include "shared/test/common/test_macros/test.h"
|
#include "shared/test/common/test_macros/test.h"
|
||||||
|
#include "shared/test/common/test_macros/test_checks_shared.h"
|
||||||
|
|
||||||
#include "gtest/gtest.h"
|
#include "gtest/gtest.h"
|
||||||
|
|
||||||
#include <array>
|
#include <array>
|
||||||
using namespace NEO;
|
using namespace NEO;
|
||||||
|
|
||||||
|
using UnifiedMemoryPoolingStaticTest = ::testing::Test;
|
||||||
|
TEST_F(UnifiedMemoryPoolingStaticTest, givenUsmAllocPoolWhenCallingStaticMethodsThenReturnCorrectValues) {
|
||||||
|
EXPECT_EQ(0.08, UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(InternalMemoryType::deviceUnifiedMemory));
|
||||||
|
EXPECT_EQ(0.02, UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(InternalMemoryType::hostUnifiedMemory));
|
||||||
|
EXPECT_EQ(0.00, UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(InternalMemoryType::sharedUnifiedMemory));
|
||||||
|
|
||||||
|
EXPECT_TRUE(UsmMemAllocPool::alignmentIsAllowed(UsmMemAllocPool::chunkAlignment));
|
||||||
|
EXPECT_TRUE(UsmMemAllocPool::alignmentIsAllowed(UsmMemAllocPool::chunkAlignment * 2));
|
||||||
|
EXPECT_FALSE(UsmMemAllocPool::alignmentIsAllowed(UsmMemAllocPool::chunkAlignment / 2));
|
||||||
|
|
||||||
|
const RootDeviceIndicesContainer rootDeviceIndices;
|
||||||
|
const std::map<uint32_t, DeviceBitfield> deviceBitfields;
|
||||||
|
SVMAllocsManager::UnifiedMemoryProperties unifiedMemoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitfields);
|
||||||
|
EXPECT_TRUE(UsmMemAllocPool::flagsAreAllowed(unifiedMemoryProperties));
|
||||||
|
unifiedMemoryProperties.allocationFlags.allFlags = 1u;
|
||||||
|
unifiedMemoryProperties.allocationFlags.allAllocFlags = 0u;
|
||||||
|
EXPECT_FALSE(UsmMemAllocPool::flagsAreAllowed(unifiedMemoryProperties));
|
||||||
|
unifiedMemoryProperties.allocationFlags.allFlags = 0u;
|
||||||
|
unifiedMemoryProperties.allocationFlags.allAllocFlags = 1u;
|
||||||
|
EXPECT_FALSE(UsmMemAllocPool::flagsAreAllowed(unifiedMemoryProperties));
|
||||||
|
}
|
||||||
|
|
||||||
using UnifiedMemoryPoolingTest = Test<SVMMemoryAllocatorFixture<true>>;
|
using UnifiedMemoryPoolingTest = Test<SVMMemoryAllocatorFixture<true>>;
|
||||||
TEST_F(UnifiedMemoryPoolingTest, givenUsmAllocPoolWhenCallingIsInitializedThenReturnCorrectValue) {
|
TEST_F(UnifiedMemoryPoolingTest, givenUsmAllocPoolWhenCallingIsInitializedThenReturnCorrectValue) {
|
||||||
UsmMemAllocPool usmMemAllocPool;
|
UsmMemAllocPool usmMemAllocPool;
|
||||||
|
@ -31,7 +55,7 @@ TEST_F(UnifiedMemoryPoolingTest, givenUsmAllocPoolWhenCallingIsInitializedThenRe
|
||||||
SVMAllocsManager::UnifiedMemoryProperties unifiedMemoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitfields);
|
SVMAllocsManager::UnifiedMemoryProperties unifiedMemoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitfields);
|
||||||
unifiedMemoryProperties.device = device;
|
unifiedMemoryProperties.device = device;
|
||||||
|
|
||||||
EXPECT_TRUE(usmMemAllocPool.initialize(svmManager.get(), unifiedMemoryProperties, 1 * MemoryConstants::megaByte));
|
EXPECT_TRUE(usmMemAllocPool.initialize(svmManager.get(), unifiedMemoryProperties, 1 * MemoryConstants::megaByte, 0u, 1 * MemoryConstants::megaByte));
|
||||||
EXPECT_TRUE(usmMemAllocPool.isInitialized());
|
EXPECT_TRUE(usmMemAllocPool.isInitialized());
|
||||||
|
|
||||||
usmMemAllocPool.cleanup();
|
usmMemAllocPool.cleanup();
|
||||||
|
@ -53,7 +77,7 @@ class InitializedUnifiedMemoryPoolingTest : public UnifiedMemoryPoolingTest {
|
||||||
|
|
||||||
poolMemoryProperties = std::make_unique<SVMAllocsManager::UnifiedMemoryProperties>(poolMemoryType, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitfields);
|
poolMemoryProperties = std::make_unique<SVMAllocsManager::UnifiedMemoryProperties>(poolMemoryType, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitfields);
|
||||||
poolMemoryProperties->device = device;
|
poolMemoryProperties->device = device;
|
||||||
ASSERT_EQ(!failAllocation, usmMemAllocPool.initialize(svmManager.get(), *poolMemoryProperties.get(), poolSize));
|
ASSERT_EQ(!failAllocation, usmMemAllocPool.initialize(svmManager.get(), *poolMemoryProperties.get(), poolSize, 0u, poolAllocationThreshold));
|
||||||
}
|
}
|
||||||
void TearDown() override {
|
void TearDown() override {
|
||||||
usmMemAllocPool.cleanup();
|
usmMemAllocPool.cleanup();
|
||||||
|
@ -66,30 +90,38 @@ class InitializedUnifiedMemoryPoolingTest : public UnifiedMemoryPoolingTest {
|
||||||
Device *device;
|
Device *device;
|
||||||
std::unique_ptr<MockSVMAllocsManager> svmManager;
|
std::unique_ptr<MockSVMAllocsManager> svmManager;
|
||||||
std::unique_ptr<SVMAllocsManager::UnifiedMemoryProperties> poolMemoryProperties;
|
std::unique_ptr<SVMAllocsManager::UnifiedMemoryProperties> poolMemoryProperties;
|
||||||
|
constexpr static auto poolAllocationThreshold = 1 * MemoryConstants::megaByte;
|
||||||
};
|
};
|
||||||
|
|
||||||
using InitializedHostUnifiedMemoryPoolingTest = InitializedUnifiedMemoryPoolingTest<InternalMemoryType::hostUnifiedMemory, false>;
|
using InitializedHostUnifiedMemoryPoolingTest = InitializedUnifiedMemoryPoolingTest<InternalMemoryType::hostUnifiedMemory, false>;
|
||||||
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenDifferentAllocationSizesWhenCallingCanBePooledThenCorrectValueIsReturned) {
|
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenDifferentAllocationSizesWhenCallingCanBePooledThenCorrectValueIsReturned) {
|
||||||
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
||||||
EXPECT_TRUE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold, memoryProperties));
|
EXPECT_TRUE(usmMemAllocPool.canBePooled(poolAllocationThreshold, memoryProperties));
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold + 1, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold + 1, memoryProperties));
|
||||||
|
|
||||||
memoryProperties.memoryType = InternalMemoryType::sharedUnifiedMemory;
|
memoryProperties.memoryType = InternalMemoryType::sharedUnifiedMemory;
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold, memoryProperties));
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold + 1, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold + 1, memoryProperties));
|
||||||
|
|
||||||
memoryProperties.memoryType = InternalMemoryType::deviceUnifiedMemory;
|
memoryProperties.memoryType = InternalMemoryType::deviceUnifiedMemory;
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold, memoryProperties));
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold + 1, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold + 1, memoryProperties));
|
||||||
|
|
||||||
memoryProperties.memoryType = InternalMemoryType::hostUnifiedMemory;
|
memoryProperties.memoryType = InternalMemoryType::hostUnifiedMemory;
|
||||||
memoryProperties.allocationFlags.allFlags = 1u;
|
memoryProperties.allocationFlags.allFlags = 1u;
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold, memoryProperties));
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold + 1, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold + 1, memoryProperties));
|
||||||
|
|
||||||
memoryProperties.allocationFlags.allFlags = 0u;
|
memoryProperties.allocationFlags.allFlags = 0u;
|
||||||
memoryProperties.allocationFlags.allAllocFlags = 1u;
|
memoryProperties.allocationFlags.allAllocFlags = 1u;
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold, memoryProperties));
|
||||||
EXPECT_FALSE(usmMemAllocPool.canBePooled(UsmMemAllocPool::allocationThreshold + 1, memoryProperties));
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold + 1, memoryProperties));
|
||||||
|
|
||||||
|
memoryProperties.allocationFlags.allAllocFlags = 0u;
|
||||||
|
constexpr auto notAllowedAlignment = UsmMemAllocPool::chunkAlignment / 2;
|
||||||
|
memoryProperties.alignment = notAllowedAlignment;
|
||||||
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold, memoryProperties));
|
||||||
|
EXPECT_FALSE(usmMemAllocPool.canBePooled(poolAllocationThreshold + 1, memoryProperties));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenVariousPointersWhenCallingIsInPoolAndGetOffsetInPoolThenCorrectValuesAreReturned) {
|
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenVariousPointersWhenCallingIsInPoolAndGetOffsetInPoolThenCorrectValuesAreReturned) {
|
||||||
|
@ -118,7 +150,7 @@ TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenAlignmentsWhenCallingAlignm
|
||||||
|
|
||||||
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenPoolableAllocationWhenUsingPoolThenAllocationIsPooledUnlessPoolIsFull) {
|
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenPoolableAllocationWhenUsingPoolThenAllocationIsPooledUnlessPoolIsFull) {
|
||||||
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
||||||
const auto allocationSize = UsmMemAllocPool::allocationThreshold;
|
const auto allocationSize = poolAllocationThreshold;
|
||||||
const auto allocationSizeAboveThreshold = allocationSize + 1;
|
const auto allocationSizeAboveThreshold = allocationSize + 1;
|
||||||
EXPECT_EQ(nullptr, usmMemAllocPool.createUnifiedMemoryAllocation(allocationSizeAboveThreshold, memoryProperties));
|
EXPECT_EQ(nullptr, usmMemAllocPool.createUnifiedMemoryAllocation(allocationSizeAboveThreshold, memoryProperties));
|
||||||
EXPECT_EQ(nullptr, usmMemAllocPool.allocations.get(reinterpret_cast<void *>(0x1)));
|
EXPECT_EQ(nullptr, usmMemAllocPool.allocations.get(reinterpret_cast<void *>(0x1)));
|
||||||
|
@ -153,7 +185,7 @@ TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenPoolableAllocationWhenUsing
|
||||||
|
|
||||||
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenVariousAlignmentsWhenUsingPoolThenAddressIsAligned) {
|
TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenVariousAlignmentsWhenUsingPoolThenAddressIsAligned) {
|
||||||
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, 0u, rootDeviceIndices, deviceBitfields);
|
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, 0u, rootDeviceIndices, deviceBitfields);
|
||||||
const auto allocationSize = UsmMemAllocPool::allocationThreshold;
|
const auto allocationSize = poolAllocationThreshold;
|
||||||
|
|
||||||
std::array<size_t, 8> alignmentsToCheck = {UsmMemAllocPool::chunkAlignment,
|
std::array<size_t, 8> alignmentsToCheck = {UsmMemAllocPool::chunkAlignment,
|
||||||
UsmMemAllocPool::chunkAlignment * 2,
|
UsmMemAllocPool::chunkAlignment * 2,
|
||||||
|
@ -164,7 +196,7 @@ TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenVariousAlignmentsWhenUsingP
|
||||||
UsmMemAllocPool::chunkAlignment * 64,
|
UsmMemAllocPool::chunkAlignment * 64,
|
||||||
UsmMemAllocPool::chunkAlignment * 128};
|
UsmMemAllocPool::chunkAlignment * 128};
|
||||||
for (const auto &alignment : alignmentsToCheck) {
|
for (const auto &alignment : alignmentsToCheck) {
|
||||||
if (alignment > UsmMemAllocPool::allocationThreshold) {
|
if (alignment > poolAllocationThreshold) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
memoryProperties.alignment = alignment;
|
memoryProperties.alignment = alignment;
|
||||||
|
@ -189,7 +221,7 @@ TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenPoolableAllocationWhenGetti
|
||||||
|
|
||||||
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
||||||
const auto requestedAllocSize = 1 * MemoryConstants::kiloByte;
|
const auto requestedAllocSize = 1 * MemoryConstants::kiloByte;
|
||||||
EXPECT_GT(usmMemAllocPool.allocationThreshold, requestedAllocSize + usmMemAllocPool.chunkAlignment);
|
EXPECT_GT(poolAllocationThreshold, requestedAllocSize + usmMemAllocPool.chunkAlignment);
|
||||||
|
|
||||||
// we want an allocation from the middle of the pool for testing
|
// we want an allocation from the middle of the pool for testing
|
||||||
auto unusedAlloc = usmMemAllocPool.createUnifiedMemoryAllocation(requestedAllocSize, memoryProperties);
|
auto unusedAlloc = usmMemAllocPool.createUnifiedMemoryAllocation(requestedAllocSize, memoryProperties);
|
||||||
|
@ -227,7 +259,7 @@ TEST_F(InitializedHostUnifiedMemoryPoolingTest, givenPoolableAllocationWhenGetti
|
||||||
using InitializationFailedUnifiedMemoryPoolingTest = InitializedUnifiedMemoryPoolingTest<InternalMemoryType::hostUnifiedMemory, true>;
|
using InitializationFailedUnifiedMemoryPoolingTest = InitializedUnifiedMemoryPoolingTest<InternalMemoryType::hostUnifiedMemory, true>;
|
||||||
TEST_F(InitializationFailedUnifiedMemoryPoolingTest, givenNotInitializedPoolWhenUsingPoolThenMethodsSucceed) {
|
TEST_F(InitializationFailedUnifiedMemoryPoolingTest, givenNotInitializedPoolWhenUsingPoolThenMethodsSucceed) {
|
||||||
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
SVMAllocsManager::UnifiedMemoryProperties memoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize64k, rootDeviceIndices, deviceBitfields);
|
||||||
const auto allocationSize = UsmMemAllocPool::allocationThreshold;
|
const auto allocationSize = poolAllocationThreshold;
|
||||||
EXPECT_EQ(nullptr, usmMemAllocPool.createUnifiedMemoryAllocation(allocationSize, memoryProperties));
|
EXPECT_EQ(nullptr, usmMemAllocPool.createUnifiedMemoryAllocation(allocationSize, memoryProperties));
|
||||||
const auto bogusPtr = reinterpret_cast<void *>(0x1);
|
const auto bogusPtr = reinterpret_cast<void *>(0x1);
|
||||||
EXPECT_FALSE(usmMemAllocPool.freeSVMAlloc(bogusPtr, true));
|
EXPECT_FALSE(usmMemAllocPool.freeSVMAlloc(bogusPtr, true));
|
||||||
|
@ -235,3 +267,346 @@ TEST_F(InitializationFailedUnifiedMemoryPoolingTest, givenNotInitializedPoolWhen
|
||||||
EXPECT_EQ(nullptr, usmMemAllocPool.getPooledAllocationBasePtr(bogusPtr));
|
EXPECT_EQ(nullptr, usmMemAllocPool.getPooledAllocationBasePtr(bogusPtr));
|
||||||
EXPECT_EQ(0u, usmMemAllocPool.getOffsetInPool(bogusPtr));
|
EXPECT_EQ(0u, usmMemAllocPool.getOffsetInPool(bogusPtr));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
using UnifiedMemoryPoolingManagerStaticTest = ::testing::Test;
|
||||||
|
TEST_F(UnifiedMemoryPoolingManagerStaticTest, givenUsmMemAllocPoolsManagerWhenCallingCanBePooledThenCorrectValueIsReturned) {
|
||||||
|
const RootDeviceIndicesContainer rootDeviceIndices;
|
||||||
|
const std::map<uint32_t, DeviceBitfield> deviceBitfields;
|
||||||
|
SVMAllocsManager::UnifiedMemoryProperties unifiedMemoryProperties(InternalMemoryType::hostUnifiedMemory, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitfields);
|
||||||
|
EXPECT_TRUE(MockUsmMemAllocPoolsManager::canBePooled(UsmMemAllocPoolsManager::maxPoolableSize, unifiedMemoryProperties));
|
||||||
|
EXPECT_FALSE(MockUsmMemAllocPoolsManager::canBePooled(UsmMemAllocPoolsManager::maxPoolableSize + 1, unifiedMemoryProperties));
|
||||||
|
|
||||||
|
unifiedMemoryProperties.alignment = UsmMemAllocPool::chunkAlignment / 2;
|
||||||
|
EXPECT_FALSE(MockUsmMemAllocPoolsManager::canBePooled(UsmMemAllocPoolsManager::maxPoolableSize, unifiedMemoryProperties));
|
||||||
|
|
||||||
|
unifiedMemoryProperties.alignment = UsmMemAllocPool::chunkAlignment;
|
||||||
|
unifiedMemoryProperties.allocationFlags.allFlags = 1u;
|
||||||
|
EXPECT_FALSE(MockUsmMemAllocPoolsManager::canBePooled(UsmMemAllocPoolsManager::maxPoolableSize, unifiedMemoryProperties));
|
||||||
|
|
||||||
|
unifiedMemoryProperties.allocationFlags.allFlags = 0u;
|
||||||
|
unifiedMemoryProperties.allocationFlags.allAllocFlags = 1u;
|
||||||
|
EXPECT_FALSE(MockUsmMemAllocPoolsManager::canBePooled(UsmMemAllocPoolsManager::maxPoolableSize, unifiedMemoryProperties));
|
||||||
|
}
|
||||||
|
|
||||||
|
class UnifiedMemoryPoolingManagerTest : public SVMMemoryAllocatorFixture<true>, public ::testing::TestWithParam<std::tuple<InternalMemoryType, bool>> {
|
||||||
|
public:
|
||||||
|
void SetUp() override {
|
||||||
|
REQUIRE_64BIT_OR_SKIP();
|
||||||
|
SVMMemoryAllocatorFixture::setUp();
|
||||||
|
poolMemoryType = std::get<0>(GetParam());
|
||||||
|
failAllocation = std::get<1>(GetParam());
|
||||||
|
|
||||||
|
deviceFactory = std::unique_ptr<UltDeviceFactory>(new UltDeviceFactory(1, 1));
|
||||||
|
device = deviceFactory->rootDevices[0];
|
||||||
|
|
||||||
|
RootDeviceIndicesContainer rootDeviceIndicesPool;
|
||||||
|
rootDeviceIndicesPool.pushUnique(device->getRootDeviceIndex());
|
||||||
|
std::map<uint32_t, DeviceBitfield> deviceBitfieldsPool;
|
||||||
|
deviceBitfieldsPool.emplace(device->getRootDeviceIndex(), device->getDeviceBitfield());
|
||||||
|
usmMemAllocPoolsManager.reset(new MockUsmMemAllocPoolsManager(device->getMemoryManager(),
|
||||||
|
rootDeviceIndicesPool,
|
||||||
|
deviceBitfieldsPool,
|
||||||
|
device,
|
||||||
|
poolMemoryType));
|
||||||
|
ASSERT_NE(nullptr, usmMemAllocPoolsManager);
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->isInitialized());
|
||||||
|
poolInfo0To4Kb = usmMemAllocPoolsManager->poolInfos[0];
|
||||||
|
poolInfo4KbTo64Kb = usmMemAllocPoolsManager->poolInfos[1];
|
||||||
|
poolInfo64KbTo2Mb = usmMemAllocPoolsManager->poolInfos[2];
|
||||||
|
poolInfo2MbTo16Mb = usmMemAllocPoolsManager->poolInfos[3];
|
||||||
|
poolInfo16MbTo64Mb = usmMemAllocPoolsManager->poolInfos[4];
|
||||||
|
poolInfo64MbTo256Mb = usmMemAllocPoolsManager->poolInfos[5];
|
||||||
|
svmManager = std::make_unique<MockSVMAllocsManager>(device->getMemoryManager(), false);
|
||||||
|
mockMemoryManager = static_cast<MockMemoryManager *>(device->getMemoryManager());
|
||||||
|
mockMemoryManager->failInDevicePoolWithError = failAllocation;
|
||||||
|
if (InternalMemoryType::deviceUnifiedMemory == poolMemoryType) {
|
||||||
|
mockMemoryManager->localMemorySupported[mockRootDeviceIndex] = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
poolMemoryProperties = std::make_unique<SVMAllocsManager::UnifiedMemoryProperties>(poolMemoryType, MemoryConstants::pageSize2M, rootDeviceIndices, deviceBitfields);
|
||||||
|
poolMemoryProperties->device = poolMemoryType == InternalMemoryType::deviceUnifiedMemory ? device : nullptr;
|
||||||
|
}
|
||||||
|
void TearDown() override {
|
||||||
|
SVMMemoryAllocatorFixture::tearDown();
|
||||||
|
}
|
||||||
|
|
||||||
|
void *createAlloc(size_t size, SVMAllocsManager::UnifiedMemoryProperties &unifiedMemoryProperties) {
|
||||||
|
void *ptr = nullptr;
|
||||||
|
auto mockGa = std::make_unique<MockGraphicsAllocation>(mockRootDeviceIndex, nullptr, size);
|
||||||
|
mockGa->gpuAddress = nextMockGraphicsAddress;
|
||||||
|
mockGa->cpuPtr = reinterpret_cast<void *>(nextMockGraphicsAddress);
|
||||||
|
if (InternalMemoryType::deviceUnifiedMemory == poolMemoryType) {
|
||||||
|
mockGa->setAllocationType(AllocationType::svmGpu);
|
||||||
|
mockMemoryManager->mockGa = mockGa.release();
|
||||||
|
mockMemoryManager->returnMockGAFromDevicePool = true;
|
||||||
|
ptr = svmManager->createUnifiedMemoryAllocation(size, unifiedMemoryProperties);
|
||||||
|
mockMemoryManager->returnMockGAFromDevicePool = false;
|
||||||
|
}
|
||||||
|
if (InternalMemoryType::hostUnifiedMemory == poolMemoryType) {
|
||||||
|
mockGa->setAllocationType(AllocationType::svmCpu);
|
||||||
|
mockMemoryManager->mockGa = mockGa.release();
|
||||||
|
mockMemoryManager->returnMockGAFromHostPool = true;
|
||||||
|
ptr = svmManager->createHostUnifiedMemoryAllocation(size, unifiedMemoryProperties);
|
||||||
|
mockMemoryManager->returnMockGAFromHostPool = false;
|
||||||
|
}
|
||||||
|
EXPECT_NE(nullptr, ptr);
|
||||||
|
nextMockGraphicsAddress = alignUp(nextMockGraphicsAddress + size + 1, MemoryConstants::pageSize2M);
|
||||||
|
return ptr;
|
||||||
|
}
|
||||||
|
const size_t poolSize = 2 * MemoryConstants::megaByte;
|
||||||
|
std::unique_ptr<MockUsmMemAllocPoolsManager> usmMemAllocPoolsManager;
|
||||||
|
std::unique_ptr<UltDeviceFactory> deviceFactory;
|
||||||
|
Device *device;
|
||||||
|
std::unique_ptr<MockSVMAllocsManager> svmManager;
|
||||||
|
std::unique_ptr<SVMAllocsManager::UnifiedMemoryProperties> poolMemoryProperties;
|
||||||
|
MockMemoryManager *mockMemoryManager;
|
||||||
|
InternalMemoryType poolMemoryType;
|
||||||
|
bool failAllocation;
|
||||||
|
uint64_t nextMockGraphicsAddress = alignUp(std::numeric_limits<uint64_t>::max() - MemoryConstants::teraByte, MemoryConstants::pageSize2M);
|
||||||
|
UsmMemAllocPoolsManager::PoolInfo poolInfo0To4Kb;
|
||||||
|
UsmMemAllocPoolsManager::PoolInfo poolInfo4KbTo64Kb;
|
||||||
|
UsmMemAllocPoolsManager::PoolInfo poolInfo64KbTo2Mb;
|
||||||
|
UsmMemAllocPoolsManager::PoolInfo poolInfo2MbTo16Mb;
|
||||||
|
UsmMemAllocPoolsManager::PoolInfo poolInfo16MbTo64Mb;
|
||||||
|
UsmMemAllocPoolsManager::PoolInfo poolInfo64MbTo256Mb;
|
||||||
|
};
|
||||||
|
|
||||||
|
INSTANTIATE_TEST_SUITE_P(
|
||||||
|
UnifiedMemoryPoolingManagerTestParameterized,
|
||||||
|
UnifiedMemoryPoolingManagerTest,
|
||||||
|
::testing::Combine(
|
||||||
|
::testing::Values(InternalMemoryType::deviceUnifiedMemory, InternalMemoryType::hostUnifiedMemory),
|
||||||
|
::testing::Values(false)));
|
||||||
|
|
||||||
|
TEST_P(UnifiedMemoryPoolingManagerTest, givenNotInitializedPoolsManagerWhenUsingPoolThenMethodsSucceed) {
|
||||||
|
void *ptr = reinterpret_cast<void *>(0x1u);
|
||||||
|
const void *constPtr = const_cast<const void *>(ptr);
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->freeSVMAlloc(constPtr, true));
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager->getPooledAllocationBasePtr(constPtr));
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->getPooledAllocationSize(constPtr));
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->recycleSVMAlloc(ptr, true));
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->getOffsetInPool(constPtr));
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager->getPoolContainingAlloc(constPtr));
|
||||||
|
usmMemAllocPoolsManager->trim();
|
||||||
|
|
||||||
|
auto mockDevice = reinterpret_cast<MockDevice *>(usmMemAllocPoolsManager->device);
|
||||||
|
usmMemAllocPoolsManager->callBaseGetFreeMemory = true;
|
||||||
|
mockDevice->deviceInfo.localMemSize = 4 * MemoryConstants::gigaByte;
|
||||||
|
mockDevice->deviceInfo.globalMemSize = 8 * MemoryConstants::gigaByte;
|
||||||
|
mockMemoryManager->localMemAllocsSize[mockDevice->getRootDeviceIndex()].store(1 * MemoryConstants::gigaByte);
|
||||||
|
auto mutableHwInfo = mockDevice->getRootDeviceEnvironment().getMutableHardwareInfo();
|
||||||
|
EXPECT_EQ(mutableHwInfo, &mockDevice->getHardwareInfo());
|
||||||
|
mutableHwInfo->capabilityTable.isIntegratedDevice = false;
|
||||||
|
EXPECT_EQ(3 * MemoryConstants::gigaByte, usmMemAllocPoolsManager->getFreeMemory());
|
||||||
|
|
||||||
|
mutableHwInfo->capabilityTable.isIntegratedDevice = true;
|
||||||
|
EXPECT_EQ(7 * MemoryConstants::gigaByte, usmMemAllocPoolsManager->getFreeMemory());
|
||||||
|
|
||||||
|
usmMemAllocPoolsManager->cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_P(UnifiedMemoryPoolingManagerTest, givenInitializationFailsForOneOfTheSmallPoolsWhenInitializingPoolsManagerThenPoolsAreCleanedUp) {
|
||||||
|
mockMemoryManager->maxSuccessAllocatedGraphicsMemoryIndex = mockMemoryManager->successAllocatedGraphicsMemoryIndex + 2;
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->ensureInitialized(svmManager.get()));
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->isInitialized());
|
||||||
|
ASSERT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo0To4Kb].size());
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->isInitialized());
|
||||||
|
ASSERT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb].size());
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->isInitialized());
|
||||||
|
ASSERT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb].size());
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->isInitialized());
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_P(UnifiedMemoryPoolingManagerTest, givenInitializedPoolsManagerWhenAllocatingNotGreaterThan2MBThenSmallPoolsAreUsed) {
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->ensureInitialized(svmManager.get()));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->isInitialized());
|
||||||
|
ASSERT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo0To4Kb].size());
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->isInitialized());
|
||||||
|
ASSERT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb].size());
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->isInitialized());
|
||||||
|
ASSERT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb].size());
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->isInitialized());
|
||||||
|
EXPECT_EQ(20 * MemoryConstants::megaByte, usmMemAllocPoolsManager->totalSize);
|
||||||
|
|
||||||
|
EXPECT_EQ(2 * MemoryConstants::megaByte, usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->getPoolSize());
|
||||||
|
EXPECT_EQ(2 * MemoryConstants::megaByte, usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->getPoolSize());
|
||||||
|
EXPECT_EQ(16 * MemoryConstants::megaByte, usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->getPoolSize());
|
||||||
|
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->isEmpty());
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->isEmpty());
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->isEmpty());
|
||||||
|
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->sizeIsAllowed(1));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->sizeIsAllowed(4 * MemoryConstants::kiloByte));
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->sizeIsAllowed(4 * MemoryConstants::kiloByte + 1));
|
||||||
|
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->sizeIsAllowed(4 * MemoryConstants::kiloByte));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->sizeIsAllowed(4 * MemoryConstants::kiloByte + 1));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->sizeIsAllowed(64 * MemoryConstants::kiloByte));
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->sizeIsAllowed(64 * MemoryConstants::kiloByte + 1));
|
||||||
|
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->sizeIsAllowed(64 * MemoryConstants::kiloByte));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->sizeIsAllowed(64 * MemoryConstants::kiloByte + 1));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->sizeIsAllowed(2 * MemoryConstants::megaByte));
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->sizeIsAllowed(2 * MemoryConstants::megaByte + 1));
|
||||||
|
|
||||||
|
auto firstPoolAlloc1B = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(1u, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, firstPoolAlloc1B);
|
||||||
|
EXPECT_EQ(firstPoolAlloc1B, usmMemAllocPoolsManager->getPooledAllocationBasePtr(firstPoolAlloc1B));
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->getPooledAllocationSize(firstPoolAlloc1B));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->isInPool(firstPoolAlloc1B));
|
||||||
|
EXPECT_EQ(20 * MemoryConstants::megaByte, usmMemAllocPoolsManager->totalSize);
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->freeSVMAlloc(firstPoolAlloc1B, true));
|
||||||
|
|
||||||
|
auto firstPoolAlloc4KB = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(4 * MemoryConstants::kiloByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, firstPoolAlloc4KB);
|
||||||
|
EXPECT_EQ(firstPoolAlloc4KB, usmMemAllocPoolsManager->getPooledAllocationBasePtr(firstPoolAlloc4KB));
|
||||||
|
EXPECT_EQ(4 * MemoryConstants::kiloByte, usmMemAllocPoolsManager->getPooledAllocationSize(firstPoolAlloc4KB));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo0To4Kb][0]->isInPool(firstPoolAlloc4KB));
|
||||||
|
EXPECT_EQ(20 * MemoryConstants::megaByte, usmMemAllocPoolsManager->totalSize);
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->freeSVMAlloc(firstPoolAlloc4KB, true));
|
||||||
|
|
||||||
|
auto secondPoolAlloc4KB1B = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(4 * MemoryConstants::kiloByte + 1, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, secondPoolAlloc4KB1B);
|
||||||
|
EXPECT_EQ(secondPoolAlloc4KB1B, usmMemAllocPoolsManager->getPooledAllocationBasePtr(secondPoolAlloc4KB1B));
|
||||||
|
EXPECT_EQ(4 * MemoryConstants::kiloByte + 1, usmMemAllocPoolsManager->getPooledAllocationSize(secondPoolAlloc4KB1B));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->isInPool(secondPoolAlloc4KB1B));
|
||||||
|
EXPECT_EQ(20 * MemoryConstants::megaByte, usmMemAllocPoolsManager->totalSize);
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->freeSVMAlloc(secondPoolAlloc4KB1B, true));
|
||||||
|
|
||||||
|
auto secondPoolAlloc64KB = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(64 * MemoryConstants::kiloByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, secondPoolAlloc64KB);
|
||||||
|
EXPECT_EQ(secondPoolAlloc64KB, usmMemAllocPoolsManager->getPooledAllocationBasePtr(secondPoolAlloc64KB));
|
||||||
|
EXPECT_EQ(64 * MemoryConstants::kiloByte, usmMemAllocPoolsManager->getPooledAllocationSize(secondPoolAlloc64KB));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo4KbTo64Kb][0]->isInPool(secondPoolAlloc64KB));
|
||||||
|
EXPECT_EQ(20 * MemoryConstants::megaByte, usmMemAllocPoolsManager->totalSize);
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->freeSVMAlloc(secondPoolAlloc64KB, true));
|
||||||
|
|
||||||
|
auto thirdPoolAlloc64KB1B = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(64 * MemoryConstants::kiloByte + 1, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, thirdPoolAlloc64KB1B);
|
||||||
|
EXPECT_EQ(thirdPoolAlloc64KB1B, usmMemAllocPoolsManager->getPooledAllocationBasePtr(thirdPoolAlloc64KB1B));
|
||||||
|
EXPECT_EQ(64 * MemoryConstants::kiloByte + 1, usmMemAllocPoolsManager->getPooledAllocationSize(thirdPoolAlloc64KB1B));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->isInPool(thirdPoolAlloc64KB1B));
|
||||||
|
EXPECT_EQ(20 * MemoryConstants::megaByte, usmMemAllocPoolsManager->totalSize);
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->freeSVMAlloc(thirdPoolAlloc64KB1B, true));
|
||||||
|
|
||||||
|
auto thirdPoolAlloc2MB = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(2 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, thirdPoolAlloc2MB);
|
||||||
|
EXPECT_EQ(thirdPoolAlloc2MB, usmMemAllocPoolsManager->getPooledAllocationBasePtr(thirdPoolAlloc2MB));
|
||||||
|
EXPECT_EQ(2 * MemoryConstants::megaByte, usmMemAllocPoolsManager->getPooledAllocationSize(thirdPoolAlloc2MB));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->pools[poolInfo64KbTo2Mb][0]->isInPool(thirdPoolAlloc2MB));
|
||||||
|
EXPECT_EQ(20 * MemoryConstants::megaByte, usmMemAllocPoolsManager->totalSize);
|
||||||
|
|
||||||
|
for (auto i = 0u; i < 7; ++i) { // use all memory in third pool
|
||||||
|
auto ptr = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(2 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
if (nullptr == ptr) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
auto thirdPoolAlloc2MBOverCapacity = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(2 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_EQ(nullptr, thirdPoolAlloc2MBOverCapacity);
|
||||||
|
|
||||||
|
usmMemAllocPoolsManager->cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
TEST_P(UnifiedMemoryPoolingManagerTest, givenInitializedPoolsManagerWhenAllocatingGreaterThan2MBAndNotGreaterThan256BMThenBigPoolsAreUsed) {
|
||||||
|
void *ptr = reinterpret_cast<void *>(0x1u);
|
||||||
|
const void *constPtr = const_cast<const void *>(ptr);
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->ensureInitialized(svmManager.get()));
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->isInitialized());
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->ensureInitialized(svmManager.get()));
|
||||||
|
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->pools[poolInfo2MbTo16Mb].size());
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->pools[poolInfo16MbTo64Mb].size());
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->pools[poolInfo64MbTo256Mb].size());
|
||||||
|
poolMemoryProperties->alignment = UsmMemAllocPool::chunkAlignment;
|
||||||
|
const auto allocSize = 14 * MemoryConstants::megaByte;
|
||||||
|
auto normalAlloc = createAlloc(allocSize, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, normalAlloc);
|
||||||
|
|
||||||
|
size_t freeMemoryThreshold = static_cast<size_t>((allocSize + usmMemAllocPoolsManager->totalSize) / UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(poolMemoryType));
|
||||||
|
const auto toleranceForFloatingPointArithmetic = static_cast<size_t>(1 / UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(poolMemoryType));
|
||||||
|
usmMemAllocPoolsManager->mockFreeMemory = freeMemoryThreshold - toleranceForFloatingPointArithmetic;
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->recycleSVMAlloc(normalAlloc, true));
|
||||||
|
EXPECT_EQ(0u, usmMemAllocPoolsManager->pools[poolInfo2MbTo16Mb].size());
|
||||||
|
|
||||||
|
const auto totalSizeStart = usmMemAllocPoolsManager->totalSize;
|
||||||
|
usmMemAllocPoolsManager->mockFreeMemory = freeMemoryThreshold + toleranceForFloatingPointArithmetic;
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->recycleSVMAlloc(normalAlloc, true));
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo2MbTo16Mb].size());
|
||||||
|
EXPECT_EQ(totalSizeStart + allocSize, usmMemAllocPoolsManager->totalSize);
|
||||||
|
auto firstPool = reinterpret_cast<MockUsmMemAllocPool *>(usmMemAllocPoolsManager->pools[poolInfo2MbTo16Mb][0].get());
|
||||||
|
EXPECT_EQ(2 * MemoryConstants::megaByte + 1, firstPool->minServicedSize);
|
||||||
|
EXPECT_EQ(allocSize, firstPool->maxServicedSize);
|
||||||
|
|
||||||
|
auto poolAlloc8MB = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(8 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, poolAlloc8MB);
|
||||||
|
EXPECT_TRUE(firstPool->isInPool(poolAlloc8MB));
|
||||||
|
EXPECT_EQ(8 * MemoryConstants::megaByte, usmMemAllocPoolsManager->getPooledAllocationSize(poolAlloc8MB));
|
||||||
|
EXPECT_EQ(poolAlloc8MB, usmMemAllocPoolsManager->getPooledAllocationBasePtr(poolAlloc8MB));
|
||||||
|
EXPECT_EQ(firstPool->getOffsetInPool(poolAlloc8MB), usmMemAllocPoolsManager->getOffsetInPool(poolAlloc8MB));
|
||||||
|
|
||||||
|
auto allocationNotFitInPool = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(8 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_EQ(nullptr, allocationNotFitInPool);
|
||||||
|
|
||||||
|
auto alloc64MB = createAlloc(64 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
freeMemoryThreshold = static_cast<size_t>((64 * MemoryConstants::megaByte + usmMemAllocPoolsManager->totalSize) / UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(poolMemoryType));
|
||||||
|
usmMemAllocPoolsManager->mockFreeMemory = freeMemoryThreshold + toleranceForFloatingPointArithmetic;
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->recycleSVMAlloc(alloc64MB, true));
|
||||||
|
auto poolAlloc64MB = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(64 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, poolAlloc64MB);
|
||||||
|
auto secondPool = reinterpret_cast<MockUsmMemAllocPool *>(usmMemAllocPoolsManager->pools[poolInfo16MbTo64Mb][0].get());
|
||||||
|
EXPECT_TRUE(secondPool->isInPool(poolAlloc64MB));
|
||||||
|
EXPECT_EQ(64 * MemoryConstants::megaByte, usmMemAllocPoolsManager->getPooledAllocationSize(poolAlloc64MB));
|
||||||
|
EXPECT_EQ(poolAlloc64MB, usmMemAllocPoolsManager->getPooledAllocationBasePtr(poolAlloc64MB));
|
||||||
|
allocationNotFitInPool = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(64 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_EQ(nullptr, allocationNotFitInPool);
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager->getPoolContainingAlloc(constPtr));
|
||||||
|
|
||||||
|
auto alloc256MB = createAlloc(256 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
freeMemoryThreshold = static_cast<size_t>((256 * MemoryConstants::megaByte + usmMemAllocPoolsManager->totalSize) / UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(poolMemoryType));
|
||||||
|
usmMemAllocPoolsManager->mockFreeMemory = freeMemoryThreshold + toleranceForFloatingPointArithmetic;
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->recycleSVMAlloc(alloc256MB, true));
|
||||||
|
auto poolAlloc256MB = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(256 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, poolAlloc256MB);
|
||||||
|
auto thirdPool = reinterpret_cast<MockUsmMemAllocPool *>(usmMemAllocPoolsManager->pools[poolInfo64MbTo256Mb][0].get());
|
||||||
|
EXPECT_TRUE(thirdPool->isInPool(poolAlloc256MB));
|
||||||
|
EXPECT_EQ(256 * MemoryConstants::megaByte, usmMemAllocPoolsManager->getPooledAllocationSize(poolAlloc256MB));
|
||||||
|
EXPECT_EQ(poolAlloc256MB, usmMemAllocPoolsManager->getPooledAllocationBasePtr(poolAlloc256MB));
|
||||||
|
allocationNotFitInPool = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(256 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
EXPECT_EQ(nullptr, allocationNotFitInPool);
|
||||||
|
|
||||||
|
auto alloc256MBForTrim = createAlloc(256 * MemoryConstants::megaByte, *poolMemoryProperties.get());
|
||||||
|
freeMemoryThreshold = static_cast<size_t>((256 * MemoryConstants::megaByte + usmMemAllocPoolsManager->totalSize) / UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(poolMemoryType));
|
||||||
|
usmMemAllocPoolsManager->mockFreeMemory = freeMemoryThreshold + toleranceForFloatingPointArithmetic;
|
||||||
|
EXPECT_TRUE(usmMemAllocPoolsManager->recycleSVMAlloc(alloc256MBForTrim, true));
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo2MbTo16Mb].size());
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo16MbTo64Mb].size());
|
||||||
|
EXPECT_EQ(2u, usmMemAllocPoolsManager->pools[poolInfo64MbTo256Mb].size());
|
||||||
|
usmMemAllocPoolsManager->trim();
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo2MbTo16Mb].size());
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo16MbTo64Mb].size());
|
||||||
|
EXPECT_EQ(1u, usmMemAllocPoolsManager->pools[poolInfo64MbTo256Mb].size());
|
||||||
|
|
||||||
|
auto smallAlloc = createAlloc(2 * MemoryConstants::megaByte - 1, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, smallAlloc);
|
||||||
|
freeMemoryThreshold = static_cast<size_t>((2 * MemoryConstants::megaByte + usmMemAllocPoolsManager->totalSize) / UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(poolMemoryType));
|
||||||
|
usmMemAllocPoolsManager->mockFreeMemory = freeMemoryThreshold + toleranceForFloatingPointArithmetic;
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->recycleSVMAlloc(smallAlloc, true));
|
||||||
|
|
||||||
|
auto bigAlloc = createAlloc(256 * MemoryConstants::megaByte + 1, *poolMemoryProperties.get());
|
||||||
|
EXPECT_NE(nullptr, bigAlloc);
|
||||||
|
freeMemoryThreshold = static_cast<size_t>((256 * MemoryConstants::megaByte + 1 + usmMemAllocPoolsManager->totalSize) / UsmMemAllocPool::getPercentOfFreeMemoryForRecycling(poolMemoryType));
|
||||||
|
usmMemAllocPoolsManager->mockFreeMemory = freeMemoryThreshold + toleranceForFloatingPointArithmetic;
|
||||||
|
EXPECT_FALSE(usmMemAllocPoolsManager->recycleSVMAlloc(bigAlloc, true));
|
||||||
|
|
||||||
|
svmManager->freeSVMAlloc(smallAlloc);
|
||||||
|
svmManager->freeSVMAlloc(bigAlloc);
|
||||||
|
|
||||||
|
auto allocationOverMaxSize = usmMemAllocPoolsManager->createUnifiedMemoryAllocation(256 * MemoryConstants::megaByte + 1, *poolMemoryProperties.get());
|
||||||
|
EXPECT_EQ(nullptr, allocationOverMaxSize);
|
||||||
|
|
||||||
|
EXPECT_EQ(nullptr, usmMemAllocPoolsManager->getPoolContainingAlloc(constPtr));
|
||||||
|
usmMemAllocPoolsManager->cleanup();
|
||||||
|
}
|
Loading…
Reference in New Issue