fix: unblock xekmd recoverable pagefaults vmbind

Related-To: HSD-13011898606
Signed-off-by: Naklicki, Mateusz <mateusz.naklicki@intel.com>
This commit is contained in:
Naklicki, Mateusz 2024-08-08 15:39:03 +00:00 committed by Compute-Runtime-Automation
parent 4048cbc0c2
commit cc3e6d5055
13 changed files with 292 additions and 53 deletions

View File

@ -220,6 +220,9 @@ class BufferObject {
void requireExplicitLockedMemory(bool locked) { requiresLocked = locked; }
bool isExplicitLockedMemoryRequired() { return requiresLocked; }
void setIsLockable(bool lockable) { this->lockable = lockable; };
bool isLockable() { return lockable; };
uint64_t peekPatIndex() const { return patIndex; }
void setPatIndex(uint64_t newPatIndex) { this->patIndex = newPatIndex; }
BOType peekBOType() const { return boType; }
@ -273,5 +276,6 @@ class BufferObject {
bool chunked = false;
bool isReused = false;
bool readOnlyGpuResource = false;
bool lockable = true;
};
} // namespace NEO

View File

@ -2435,7 +2435,7 @@ GraphicsAllocation *DrmMemoryManager::createSharedUnifiedMemoryAllocation(const
auto ioctlHelper = drm.getIoctlHelper();
const auto vmAdviseAttribute = ioctlHelper->getVmAdviseAtomicAttribute();
if (vmAdviseAttribute == 0) {
if (vmAdviseAttribute.has_value() && vmAdviseAttribute.value() == 0) {
return nullptr;
}
@ -2518,7 +2518,7 @@ GraphicsAllocation *DrmMemoryManager::createSharedUnifiedMemoryAllocation(const
std::unique_ptr<BufferObject, BufferObject::Deleter> bo(new BufferObject(allocationData.rootDeviceIndex, &drm, patIndex, handle, currentSize, maxOsContextCount));
if (!ioctlHelper->setVmBoAdvise(bo->peekHandle(), vmAdviseAttribute, nullptr)) {
if (vmAdviseAttribute.has_value() && !ioctlHelper->setVmBoAdvise(bo->peekHandle(), vmAdviseAttribute.value(), nullptr)) {
this->munmapFunction(cpuBasePointer, totalSizeToAlloc);
releaseGpuRange(reinterpret_cast<void *>(preferredAddress), totalSizeToAlloc, allocationData.rootDeviceIndex);
return nullptr;

View File

@ -1126,11 +1126,7 @@ void Drm::queryPageFaultSupport() {
return;
}
if (const auto paramId = ioctlHelper->getHasPageFaultParamId(); paramId) {
int support = 0;
const auto ret = getParamIoctl(*paramId, &support);
pageFaultSupported = (0 == ret) && (support > 0);
}
pageFaultSupported = this->ioctlHelper->isPageFaultSupported();
}
bool Drm::hasPageFaultSupport() const {
@ -1436,10 +1432,10 @@ int changeBufferObjectBinding(Drm *drm, OsContext *osContext, uint32_t vmHandleI
bool readOnlyResource = bo->isReadOnlyGpuResource();
if (drm->useVMBindImmediate()) {
bindMakeResident = bo->isExplicitResidencyRequired();
bindMakeResident = bo->isExplicitResidencyRequired() && bo->isLockable();
bindImmediate = true;
}
bool bindLock = bo->isExplicitLockedMemoryRequired();
bool bindLock = bo->isExplicitLockedMemoryRequired() && bo->isLockable();
flags |= ioctlHelper->getFlagsForVmBind(bindCapture, bindImmediate, bindMakeResident, bindLock, readOnlyResource);
}
@ -1533,8 +1529,14 @@ int changeBufferObjectBinding(Drm *drm, OsContext *osContext, uint32_t vmHandleI
int Drm::bindBufferObject(OsContext *osContext, uint32_t vmHandleId, BufferObject *bo) {
auto ret = changeBufferObjectBinding(this, osContext, vmHandleId, bo, true);
if (ret != 0) {
errno = 0;
static_cast<DrmMemoryOperationsHandlerBind *>(this->rootDeviceEnvironment.memoryOperationsInterface.get())->evictUnusedAllocations(false, false);
ret = changeBufferObjectBinding(this, osContext, vmHandleId, bo, true);
if (getErrno() == ENOMEM) {
DEBUG_BREAK_IF(true);
bo->setIsLockable(false);
ret = changeBufferObjectBinding(this, osContext, vmHandleId, bo, true);
}
}
return ret;
}

View File

@ -127,7 +127,7 @@ class IoctlHelper {
virtual uint16_t getWaitUserFenceSoftFlag() = 0;
virtual int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) = 0;
virtual bool completionFenceExtensionSupported(const bool isVmBindAvailable) = 0;
virtual std::optional<DrmParam> getHasPageFaultParamId() = 0;
virtual bool isPageFaultSupported() = 0;
virtual std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) = 0;
virtual uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) = 0;
virtual uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) = 0;
@ -135,7 +135,7 @@ class IoctlHelper {
virtual void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) = 0;
virtual void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) = 0;
virtual void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) = 0;
virtual uint32_t getVmAdviseAtomicAttribute() = 0;
virtual std::optional<uint32_t> getVmAdviseAtomicAttribute() = 0;
virtual int vmBind(const VmBindParams &vmBindParams) = 0;
virtual int vmUnbind(const VmBindParams &vmBindParams) = 0;
virtual int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) = 0;
@ -283,7 +283,7 @@ class IoctlHelperUpstream : public IoctlHelperI915 {
uint16_t getWaitUserFenceSoftFlag() override;
int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) override;
bool completionFenceExtensionSupported(const bool isVmBindAvailable) override;
std::optional<DrmParam> getHasPageFaultParamId() override;
bool isPageFaultSupported() override;
std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) override;
uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) override;
uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) override;
@ -291,7 +291,7 @@ class IoctlHelperUpstream : public IoctlHelperI915 {
void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) override;
void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) override;
void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) override;
uint32_t getVmAdviseAtomicAttribute() override;
std::optional<uint32_t> getVmAdviseAtomicAttribute() override;
int vmBind(const VmBindParams &vmBindParams) override;
int vmUnbind(const VmBindParams &vmBindParams) override;
int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) override;
@ -360,7 +360,7 @@ class IoctlHelperPrelim20 : public IoctlHelperI915 {
uint16_t getWaitUserFenceSoftFlag() override;
int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) override;
bool completionFenceExtensionSupported(const bool isVmBindAvailable) override;
std::optional<DrmParam> getHasPageFaultParamId() override;
bool isPageFaultSupported() override;
std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) override;
uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) override;
uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) override;
@ -368,7 +368,7 @@ class IoctlHelperPrelim20 : public IoctlHelperI915 {
void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) override;
void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) override;
void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) override;
uint32_t getVmAdviseAtomicAttribute() override;
std::optional<uint32_t> getVmAdviseAtomicAttribute() override;
int vmBind(const VmBindParams &vmBindParams) override;
int vmUnbind(const VmBindParams &vmBindParams) override;
int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) override;

View File

@ -554,8 +554,19 @@ int IoctlHelperPrelim20::queryDistances(std::vector<QueryItem> &queryItems, std:
return ret;
}
std::optional<DrmParam> IoctlHelperPrelim20::getHasPageFaultParamId() {
return DrmParam::paramHasPageFault;
bool IoctlHelperPrelim20::isPageFaultSupported() {
int pagefaultSupport{};
GetParam getParam{};
getParam.param = PRELIM_I915_PARAM_HAS_PAGE_FAULT;
getParam.value = &pagefaultSupport;
int retVal = ioctl(DrmIoctl::getparam, &getParam);
if (debugManager.flags.PrintIoctlEntries.get()) {
printf("DRM_IOCTL_I915_GETPARAM: param: PRELIM_I915_PARAM_HAS_PAGE_FAULT, output value: %d, retCode:% d\n",
*getParam.value,
retVal);
}
return (retVal == 0) && (pagefaultSupport > 0);
};
bool IoctlHelperPrelim20::isEuStallSupported() {
@ -732,7 +743,7 @@ EngineCapabilities::Flags IoctlHelperPrelim20::getEngineCapabilitiesFlags(uint64
return flags;
}
uint32_t IoctlHelperPrelim20::getVmAdviseAtomicAttribute() {
std::optional<uint32_t> IoctlHelperPrelim20::getVmAdviseAtomicAttribute() {
switch (NEO::debugManager.flags.SetVmAdviseAtomicAttribute.get()) {
case 0:
return PRELIM_I915_VM_ADVISE_ATOMIC_NONE;

View File

@ -191,8 +191,8 @@ bool IoctlHelperUpstream::completionFenceExtensionSupported(const bool isVmBindA
return false;
}
std::optional<DrmParam> IoctlHelperUpstream::getHasPageFaultParamId() {
return std::nullopt;
bool IoctlHelperUpstream::isPageFaultSupported() {
return false;
};
bool IoctlHelperUpstream::isEuStallSupported() {
@ -239,7 +239,7 @@ void IoctlHelperUpstream::fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtU
void IoctlHelperUpstream::setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence){};
uint32_t IoctlHelperUpstream::getVmAdviseAtomicAttribute() {
std::optional<uint32_t> IoctlHelperUpstream::getVmAdviseAtomicAttribute() {
return 0;
}

View File

@ -706,6 +706,7 @@ void IoctlHelperXe::setupXeWaitUserFenceStruct(void *arg, uint32_t ctxId, uint16
}
int IoctlHelperXe::xeWaitUserFence(uint32_t ctxId, uint16_t op, uint64_t addr, uint64_t value, int64_t timeout, bool userInterrupt, uint32_t externalInterruptId, GraphicsAllocation *allocForInterruptWait) {
UNRECOVERABLE_IF(addr == 0x0)
drm_xe_wait_user_fence waitUserFence = {};
setupXeWaitUserFenceStruct(&waitUserFence, ctxId, op, addr, value, timeout);
@ -748,11 +749,14 @@ std::optional<MemoryClassInstance> IoctlHelperXe::getPreferredLocationRegion(Pre
bool IoctlHelperXe::setVmBoAdvise(int32_t handle, uint32_t attribute, void *region) {
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
return false;
// There is no vmAdvise attribute in Xe, so return success
return true;
}
bool IoctlHelperXe::setVmBoAdviseForChunking(int32_t handle, uint64_t start, uint64_t length, uint32_t attribute, void *region) {
return false;
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
// There is no vmAdvise attribute in Xe, so return success
return true;
}
bool IoctlHelperXe::setVmPrefetch(uint64_t start, uint64_t length, uint32_t region, uint32_t vmId) {
@ -866,9 +870,10 @@ int IoctlHelperXe::queryDistances(std::vector<QueryItem> &queryItems, std::vecto
return 0;
}
std::optional<DrmParam> IoctlHelperXe::getHasPageFaultParamId() {
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
return {};
bool IoctlHelperXe::isPageFaultSupported() {
xeLog(" -> IoctlHelperXe::%s %d\n", __FUNCTION__, false);
return false;
};
uint32_t IoctlHelperXe::getEuStallFdParameter() {
@ -919,9 +924,10 @@ void IoctlHelperXe::setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT
return;
}
uint32_t IoctlHelperXe::getVmAdviseAtomicAttribute() {
std::optional<uint32_t> IoctlHelperXe::getVmAdviseAtomicAttribute() {
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
return 0;
// There is no vmAdvise attribute in Xe
return {};
}
int IoctlHelperXe::vmBind(const VmBindParams &vmBindParams) {
@ -1280,21 +1286,11 @@ int IoctlHelperXe::xeVmBind(const VmBindParams &vmBindParams, bool isBind) {
}
if (index != invalidIndex) {
drm_xe_sync sync[1] = {};
sync[0].type = DRM_XE_SYNC_TYPE_USER_FENCE;
sync[0].flags = DRM_XE_SYNC_FLAG_SIGNAL;
auto xeBindExtUserFence = reinterpret_cast<UserFenceExtension *>(vmBindParams.userFence);
UNRECOVERABLE_IF(!xeBindExtUserFence);
UNRECOVERABLE_IF(xeBindExtUserFence->tag != UserFenceExtension::tagValue);
sync[0].addr = xeBindExtUserFence->addr;
sync[0].timeline_value = xeBindExtUserFence->value;
drm_xe_vm_bind bind = {};
bind.vm_id = vmBindParams.vmId;
bind.num_binds = 1;
bind.num_syncs = 1;
bind.syncs = reinterpret_cast<uintptr_t>(&sync);
bind.num_binds = 1;
bind.bind.range = vmBindParams.length;
bind.bind.addr = gmmHelper->decanonize(vmBindParams.start);
bind.bind.obj_offset = vmBindParams.offset;
@ -1302,6 +1298,18 @@ int IoctlHelperXe::xeVmBind(const VmBindParams &vmBindParams, bool isBind) {
bind.bind.extensions = vmBindParams.extensions;
bind.bind.flags = static_cast<uint32_t>(vmBindParams.flags);
UNRECOVERABLE_IF(vmBindParams.userFence == 0x0);
drm_xe_sync sync[1] = {};
auto xeBindExtUserFence = reinterpret_cast<UserFenceExtension *>(vmBindParams.userFence);
UNRECOVERABLE_IF(xeBindExtUserFence->tag != UserFenceExtension::tagValue);
sync[0].type = DRM_XE_SYNC_TYPE_USER_FENCE;
sync[0].flags = DRM_XE_SYNC_FLAG_SIGNAL;
sync[0].addr = xeBindExtUserFence->addr;
sync[0].timeline_value = xeBindExtUserFence->value;
bind.syncs = reinterpret_cast<uintptr_t>(&sync);
if (isBind) {
bind.bind.op = DRM_XE_VM_BIND_OP_MAP;
bind.bind.obj = vmBindParams.handle;
@ -1658,6 +1666,11 @@ void IoctlHelperXe::querySupportedFeatures() {
struct drm_xe_vm_create vmCreate = {};
auto ret = IoctlHelper::ioctl(DrmIoctl::gemVmCreate, &vmCreate);
if (ret != 0) {
// if device is already in fault mode it may fail, need to retry with proper flags
vmCreate.flags = DRM_XE_VM_CREATE_FLAG_LR_MODE | DRM_XE_VM_CREATE_FLAG_FAULT_MODE;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmCreate, &vmCreate);
}
DEBUG_BREAK_IF(ret != 0);
auto checkVmBindFlagSupport = [&](uint32_t flag) -> bool {
@ -1688,5 +1701,21 @@ void IoctlHelperXe::querySupportedFeatures() {
vmDestroy.vm_id = vmCreate.vm_id;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmDestroy, &vmDestroy);
DEBUG_BREAK_IF(ret != 0);
auto checkVmCreateFlagsSupport = [&](uint32_t flags) -> bool {
struct drm_xe_vm_create vmCreate = {};
vmCreate.flags = flags;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmCreate, &vmCreate);
if (ret == 0) {
struct drm_xe_vm_destroy vmDestroy = {};
vmDestroy.vm_id = vmCreate.vm_id;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmDestroy, &vmDestroy);
DEBUG_BREAK_IF(ret != 0);
return true;
}
return false;
};
supportedFeatures.flags.pageFault = checkVmCreateFlagsSupport(DRM_XE_VM_CREATE_FLAG_LR_MODE | DRM_XE_VM_CREATE_FLAG_FAULT_MODE);
};
} // namespace NEO

View File

@ -69,7 +69,7 @@ class IoctlHelperXe : public IoctlHelper {
uint16_t getWaitUserFenceSoftFlag() override;
int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) override;
bool completionFenceExtensionSupported(const bool isVmBindAvailable) override;
std::optional<DrmParam> getHasPageFaultParamId() override;
bool isPageFaultSupported() override;
std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) override;
uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) override;
uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) override;
@ -77,7 +77,7 @@ class IoctlHelperXe : public IoctlHelper {
void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) override;
void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) override;
void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) override;
uint32_t getVmAdviseAtomicAttribute() override;
std::optional<uint32_t> getVmAdviseAtomicAttribute() override;
int vmBind(const VmBindParams &vmBindParams) override;
int vmUnbind(const VmBindParams &vmBindParams) override;
int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) override;
@ -214,7 +214,8 @@ class IoctlHelperXe : public IoctlHelper {
struct {
uint32_t vmBindReadOnly : 1;
uint32_t vmBindImmediate : 1;
uint32_t reserved : 30;
uint32_t pageFault : 1;
uint32_t reserved : 29;
} flags;
uint32_t allFlags = 0;
};

View File

@ -50,6 +50,12 @@ class MockIoctlHelper : public IoctlHelperPrelim20 {
return getDrmParamValueResult;
}
std::optional<uint32_t> getVmAdviseAtomicAttribute() override {
if (callBaseVmAdviseAtomicAttribute)
return IoctlHelperPrelim20::getVmAdviseAtomicAttribute();
return vmAdviseAtomicAttribute;
}
bool releaseInterrupt(uint32_t handle) override {
releaseInterruptCalled++;
latestReleaseInterruptHandle = handle;
@ -84,5 +90,7 @@ class MockIoctlHelper : public IoctlHelperPrelim20 {
uint32_t latestReleaseInterruptHandle = InterruptId::notUsed;
bool releaseInterruptResult = true;
bool callBaseVmAdviseAtomicAttribute = true;
std::optional<uint32_t> vmAdviseAtomicAttribute{};
};
} // namespace NEO

View File

@ -1136,3 +1136,53 @@ TEST(DrmBufferObjectHandleWrapperTest, GivenWrapperWhenMoveConstructingAnotherOb
EXPECT_EQ(2, secondBoHandleWrapper.controlBlock->refCount);
}
TEST_F(DrmBufferObjectTest, givenDrmWhenBindOperationFailsWithENOMEMThenBindWithoutLockingIsTried) {
struct DrmMockENOMEMFail : public DrmMock {
DrmMockENOMEMFail(RootDeviceEnvironment &rootDeviceEnvironment)
: DrmMock(rootDeviceEnvironment) {}
int getErrno() override { return ENOMEM; }
};
auto executionEnvironment = new ExecutionEnvironment;
executionEnvironment->setDebuggingMode(NEO::DebuggingMode::online);
executionEnvironment->prepareRootDeviceEnvironments(1);
executionEnvironment->rootDeviceEnvironments[0]->setHwInfoAndInitHelpers(defaultHwInfo.get());
executionEnvironment->rootDeviceEnvironments[0]->initGmm();
executionEnvironment->rootDeviceEnvironments[0]->osInterface = std::make_unique<OSInterface>();
auto drm = new DrmMockENOMEMFail(*executionEnvironment->rootDeviceEnvironments[0]);
drm->requirePerContextVM = false;
drm->isVMBindImmediateSupported = true;
auto ioctlHelper = std::make_unique<MockIoctlHelper>(*drm);
ioctlHelper->vmBindResult = -1;
ioctlHelper->isWaitBeforeBindRequiredResult = true;
drm->ioctlHelper.reset(ioctlHelper.release());
executionEnvironment->rootDeviceEnvironments[0]->osInterface->setDriverModel(std::unique_ptr<DriverModel>(drm));
executionEnvironment->rootDeviceEnvironments[0]->memoryOperationsInterface = DrmMemoryOperationsHandler::create(*drm, 0u, false);
uint64_t initFenceValue = 10u;
drm->fenceVal[0] = initFenceValue;
std::unique_ptr<Device> device(MockDevice::createWithExecutionEnvironment<MockDevice>(defaultHwInfo.get(), executionEnvironment, 0));
auto &engines = device->getExecutionEnvironment()->memoryManager->getRegisteredEngines(device->getRootDeviceIndex());
auto osContextCount = engines.size();
auto contextId = osContextCount / 2;
auto osContext = engines[contextId].osContext;
MockBufferObject bo(device->getRootDeviceIndex(), drm, 3, 0, 0, osContextCount);
EXPECT_EQ(bo.isLockable(), true);
drm->bindBufferObject(osContext, 0, &bo);
EXPECT_EQ(bo.isLockable(), false);
}
TEST_F(DrmBufferObjectTest, givenBufferObjectWhenSetIsLockableIsCalledThenIsLockableIsSet) {
MockExecutionEnvironment executionEnvironment(defaultHwInfo.get());
DrmMock drm(*(executionEnvironment.rootDeviceEnvironments[0].get()));
MockBufferObject bo(0, &drm, 3, 0, 0, 1);
for (auto isLockable : {false, true}) {
bo.setIsLockable(isLockable);
EXPECT_EQ(isLockable, bo.isLockable());
}
}

View File

@ -7992,3 +7992,52 @@ TEST_F(DrmMemoryManagerTest, givenUsmCompressionSupportedThenReturnFalse) {
debugManager.flags.RenderCompressedBuffersEnabled.set(1);
EXPECT_TRUE(memoryManager->usmCompressionSupported(device));
}
TEST_F(DrmMemoryManagerTest, givenVmAdviseAtomicAttributeEqualZeroWhenCreateSharedUnifiedMemoryAllocationIsCalledThenNullptrReturned) {
std::vector<MemoryRegion> regionInfo(1);
regionInfo[0].region = {drm_i915_gem_memory_class::I915_MEMORY_CLASS_SYSTEM, 0};
auto &drm = static_cast<DrmMockCustom &>(memoryManager->getDrm(mockRootDeviceIndex));
auto mockIoctlHelper = new MockIoctlHelper(*mock);
drm.memoryInfo.reset(new MemoryInfo(regionInfo, drm));
drm.ioctlHelper.reset(mockIoctlHelper);
AllocationData allocationData{};
allocationData.size = MemoryConstants::cacheLineSize;
allocationData.rootDeviceIndex = mockRootDeviceIndex;
allocationData.alignment = MemoryConstants::pageSize;
mockIoctlHelper->callBaseVmAdviseAtomicAttribute = false;
mockIoctlHelper->vmAdviseAtomicAttribute = 0;
auto sharedUSM = memoryManager->createSharedUnifiedMemoryAllocation(allocationData);
EXPECT_EQ(nullptr, sharedUSM);
}
TEST_F(DrmMemoryManagerTest, givenVmAdviseAtomicAttributeNotPresentWhenCreateSharedUnifiedMemoryAllocationIsCalledThenAllocationIsCreatedSuccessfully) {
mock->ioctlExpected.gemWait = 1;
mock->ioctlExpected.gemClose = 1;
mock->ioctlExpected.gemCreateExt = 1;
mock->ioctlExpected.gemMmapOffset = 1;
std::vector<MemoryRegion> regionInfo(1);
regionInfo[0].region = {drm_i915_gem_memory_class::I915_MEMORY_CLASS_SYSTEM, 0};
auto &drm = static_cast<DrmMockCustom &>(memoryManager->getDrm(mockRootDeviceIndex));
auto mockIoctlHelper = new MockIoctlHelper(*mock);
drm.memoryInfo.reset(new MemoryInfo(regionInfo, drm));
drm.ioctlHelper.reset(mockIoctlHelper);
AllocationData allocationData{};
allocationData.size = MemoryConstants::cacheLineSize;
allocationData.rootDeviceIndex = mockRootDeviceIndex;
allocationData.alignment = MemoryConstants::pageSize;
mockIoctlHelper->callBaseVmAdviseAtomicAttribute = false;
mockIoctlHelper->vmAdviseAtomicAttribute = std::nullopt;
auto sharedUSM = memoryManager->createSharedUnifiedMemoryAllocation(allocationData);
EXPECT_NE(nullptr, sharedUSM);
memoryManager->freeGraphicsMemory(sharedUSM);
}

View File

@ -387,6 +387,48 @@ TEST(DrmQueryTest, givenPageFaultSupportEnabledWhenCallingQueryPageFaultSupportT
}
}
TEST(DrmQueryTest, givenPrintIoctlDebugFlagSetWhenCallingQueryPageFaultSupportThenCaptureExpectedOutput) {
DebugManagerStateRestore restore;
debugManager.flags.PrintIoctlEntries.set(true);
auto executionEnvironment = std::make_unique<MockExecutionEnvironment>();
DrmQueryMock drm{*executionEnvironment->rootDeviceEnvironments[0]};
const auto &productHelper = executionEnvironment->rootDeviceEnvironments[0]->getHelper<ProductHelper>();
bool hasPageFaultSupport = true;
drm.context.hasPageFaultQueryValue = hasPageFaultSupport;
testing::internal::CaptureStdout(); // start capturing
drm.queryPageFaultSupport();
debugManager.flags.PrintIoctlEntries.set(false);
std::string outputString = testing::internal::GetCapturedStdout(); // stop capturing
if (productHelper.isPageFaultSupported()) {
std::string expectedString = "DRM_IOCTL_I915_GETPARAM: param: PRELIM_I915_PARAM_HAS_PAGE_FAULT, output value: 1, retCode: 0\n";
EXPECT_NE(std::string::npos, outputString.find(expectedString));
} else {
EXPECT_TRUE(outputString.empty());
}
}
TEST(DrmQueryTest, givenPrintIoctlDebugFlagNotSetWhenIsPageFaultSupportedCalledThenNoCapturedOutput) {
DebugManagerStateRestore restore;
debugManager.flags.PrintIoctlEntries.set(false);
auto executionEnvironment = std::make_unique<MockExecutionEnvironment>();
DrmQueryMock drm{*executionEnvironment->rootDeviceEnvironments[0]};
bool hasPageFaultSupport = true;
drm.context.hasPageFaultQueryValue = hasPageFaultSupport;
testing::internal::CaptureStdout(); // start capturing
drm.queryPageFaultSupport();
debugManager.flags.PrintIoctlEntries.set(false);
std::string outputString = testing::internal::GetCapturedStdout(); // stop capturing
EXPECT_TRUE(outputString.empty());
}
TEST(DrmQueryTest, WhenQueryPageFaultSupportFailsThenReturnFalse) {
auto executionEnvironment = std::make_unique<MockExecutionEnvironment>();
DrmQueryMock drm{*executionEnvironment->rootDeviceEnvironments[0]};

View File

@ -262,9 +262,9 @@ TEST(IoctlHelperXeTest, givenIoctlHelperXeWhenCallingAnyMethodThenDummyValueIsRe
EXPECT_EQ(std::nullopt, xeIoctlHelper->getPreferredLocationRegion(PreferredLocation::none, 0));
EXPECT_FALSE(xeIoctlHelper->setVmBoAdvise(0, 0, nullptr));
EXPECT_TRUE(xeIoctlHelper->setVmBoAdvise(0, 0, nullptr));
EXPECT_FALSE(xeIoctlHelper->setVmBoAdviseForChunking(0, 0, 0, 0, nullptr));
EXPECT_TRUE(xeIoctlHelper->setVmBoAdviseForChunking(0, 0, 0, 0, nullptr));
EXPECT_FALSE(xeIoctlHelper->isChunkingAvailable());
@ -285,7 +285,7 @@ TEST(IoctlHelperXeTest, givenIoctlHelperXeWhenCallingAnyMethodThenDummyValueIsRe
EXPECT_FALSE(xeIoctlHelper->completionFenceExtensionSupported(false));
EXPECT_EQ(std::nullopt, xeIoctlHelper->getHasPageFaultParamId());
EXPECT_EQ(false, xeIoctlHelper->isPageFaultSupported());
EXPECT_EQ(nullptr, xeIoctlHelper->createVmControlExtRegion({}));
@ -300,7 +300,7 @@ TEST(IoctlHelperXeTest, givenIoctlHelperXeWhenCallingAnyMethodThenDummyValueIsRe
VmBindExtUserFenceT vmBindExtUserFence{};
EXPECT_NO_THROW(xeIoctlHelper->fillVmBindExtUserFence(vmBindExtUserFence, 0, 0, 0));
EXPECT_EQ(0u, xeIoctlHelper->getVmAdviseAtomicAttribute());
EXPECT_EQ(std::nullopt, xeIoctlHelper->getVmAdviseAtomicAttribute());
VmBindParams vmBindParams{};
EXPECT_EQ(-1, xeIoctlHelper->vmBind(vmBindParams));
@ -2038,6 +2038,7 @@ TEST(IoctlHelperXeTest, givenMultipleBindInfosWhenVmBindIsCalledThenProperHandle
MockIoctlHelperXe::UserFenceExtension userFence{};
userFence.tag = userFence.tagValue;
userFence.addr = 0x1;
VmBindParams vmBindParams{};
vmBindParams.userFence = castToUint64(&userFence);
vmBindParams.handle = 0;
@ -2240,6 +2241,21 @@ struct DrmMockXeVmBind : public DrmMockXe {
}
return 0;
} break;
case DrmIoctl::gemVmCreate: {
auto vmCreate = static_cast<drm_xe_vm_create *>(arg);
if (deviceIsInFaultMode &&
((vmCreate->flags & DRM_XE_VM_CREATE_FLAG_LR_MODE) == 0 ||
(vmCreate->flags & DRM_XE_VM_CREATE_FLAG_FAULT_MODE) == 0)) {
return -EINVAL;
}
if ((vmCreate->flags & DRM_XE_VM_CREATE_FLAG_FAULT_MODE) == DRM_XE_VM_CREATE_FLAG_FAULT_MODE &&
(vmCreate->flags & DRM_XE_VM_CREATE_FLAG_LR_MODE) == DRM_XE_VM_CREATE_FLAG_LR_MODE &&
(!supportsRecoverablePageFault)) {
return -EINVAL;
}
return 0;
} break;
default:
return DrmMockXe::ioctl(request, arg);
@ -2247,6 +2263,9 @@ struct DrmMockXeVmBind : public DrmMockXe {
};
bool supportsBindImmediate = true;
bool supportsBindReadOnly = true;
bool supportsRecoverablePageFault = true;
bool deviceIsInFaultMode = false;
protected:
// Don't call directly, use the create() function
@ -2260,13 +2279,37 @@ TEST(IoctlHelperXeVmBindTest, whenInitializeIoctlHelperThenQueryBindFlagsSupport
for (const auto &bindImmediateSupport : ::testing::Bool()) {
for (const auto &bindReadOnlySupport : ::testing::Bool()) {
for (const auto &recoverablePageFault : ::testing::Bool()) {
drm->supportsBindImmediate = bindImmediateSupport;
drm->supportsBindReadOnly = bindReadOnlySupport;
drm->supportsRecoverablePageFault = recoverablePageFault;
auto xeIoctlHelper = std::make_unique<MockIoctlHelperXe>(*drm);
xeIoctlHelper->initialize();
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.vmBindImmediate, bindImmediateSupport);
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.vmBindReadOnly, bindReadOnlySupport);
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.pageFault, recoverablePageFault);
}
}
}
}
drm->supportsBindImmediate = bindImmediateSupport;
drm->supportsBindReadOnly = bindReadOnlySupport;
auto xeIoctlHelper = std::make_unique<MockIoctlHelperXe>(*drm);
xeIoctlHelper->initialize();
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.vmBindImmediate, bindImmediateSupport);
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.vmBindReadOnly, bindReadOnlySupport);
TEST(IoctlHelperXeVmBindTest, givenDeviceInFaultModeWhenInitializeIoctlHelperThenQueryFeaturesIsSuccessful) {
auto executionEnvironment = std::make_unique<MockExecutionEnvironment>();
auto drm = DrmMockXeVmBind::create(*executionEnvironment->rootDeviceEnvironments[0]);
drm->deviceIsInFaultMode = true;
for (const auto &bindImmediateSupport : ::testing::Bool()) {
for (const auto &bindReadOnlySupport : ::testing::Bool()) {
for (const auto &recoverablePageFault : ::testing::Bool()) {
drm->supportsBindImmediate = bindImmediateSupport;
drm->supportsBindReadOnly = bindReadOnlySupport;
drm->supportsRecoverablePageFault = recoverablePageFault;
auto xeIoctlHelper = std::make_unique<MockIoctlHelperXe>(*drm);
xeIoctlHelper->initialize();
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.vmBindImmediate, bindImmediateSupport);
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.vmBindReadOnly, bindReadOnlySupport);
EXPECT_EQ(xeIoctlHelper->supportedFeatures.flags.pageFault, recoverablePageFault);
}
}
}
}