Revert "fix: unblock xekmd recoverable pagefaults vmbind"

This reverts commit cc3e6d5055.

Signed-off-by: Compute-Runtime-Validation <compute-runtime-validation@intel.com>
This commit is contained in:
Compute-Runtime-Validation
2024-08-09 13:53:39 +02:00
committed by Compute-Runtime-Automation
parent 775b14a7f6
commit a860adc457
13 changed files with 53 additions and 292 deletions

View File

@@ -220,9 +220,6 @@ class BufferObject {
void requireExplicitLockedMemory(bool locked) { requiresLocked = locked; }
bool isExplicitLockedMemoryRequired() { return requiresLocked; }
void setIsLockable(bool lockable) { this->lockable = lockable; };
bool isLockable() { return lockable; };
uint64_t peekPatIndex() const { return patIndex; }
void setPatIndex(uint64_t newPatIndex) { this->patIndex = newPatIndex; }
BOType peekBOType() const { return boType; }
@@ -276,6 +273,5 @@ class BufferObject {
bool chunked = false;
bool isReused = false;
bool readOnlyGpuResource = false;
bool lockable = true;
};
} // namespace NEO

View File

@@ -2435,7 +2435,7 @@ GraphicsAllocation *DrmMemoryManager::createSharedUnifiedMemoryAllocation(const
auto ioctlHelper = drm.getIoctlHelper();
const auto vmAdviseAttribute = ioctlHelper->getVmAdviseAtomicAttribute();
if (vmAdviseAttribute.has_value() && vmAdviseAttribute.value() == 0) {
if (vmAdviseAttribute == 0) {
return nullptr;
}
@@ -2518,7 +2518,7 @@ GraphicsAllocation *DrmMemoryManager::createSharedUnifiedMemoryAllocation(const
std::unique_ptr<BufferObject, BufferObject::Deleter> bo(new BufferObject(allocationData.rootDeviceIndex, &drm, patIndex, handle, currentSize, maxOsContextCount));
if (vmAdviseAttribute.has_value() && !ioctlHelper->setVmBoAdvise(bo->peekHandle(), vmAdviseAttribute.value(), nullptr)) {
if (!ioctlHelper->setVmBoAdvise(bo->peekHandle(), vmAdviseAttribute, nullptr)) {
this->munmapFunction(cpuBasePointer, totalSizeToAlloc);
releaseGpuRange(reinterpret_cast<void *>(preferredAddress), totalSizeToAlloc, allocationData.rootDeviceIndex);
return nullptr;

View File

@@ -1126,7 +1126,11 @@ void Drm::queryPageFaultSupport() {
return;
}
pageFaultSupported = this->ioctlHelper->isPageFaultSupported();
if (const auto paramId = ioctlHelper->getHasPageFaultParamId(); paramId) {
int support = 0;
const auto ret = getParamIoctl(*paramId, &support);
pageFaultSupported = (0 == ret) && (support > 0);
}
}
bool Drm::hasPageFaultSupport() const {
@@ -1432,10 +1436,10 @@ int changeBufferObjectBinding(Drm *drm, OsContext *osContext, uint32_t vmHandleI
bool readOnlyResource = bo->isReadOnlyGpuResource();
if (drm->useVMBindImmediate()) {
bindMakeResident = bo->isExplicitResidencyRequired() && bo->isLockable();
bindMakeResident = bo->isExplicitResidencyRequired();
bindImmediate = true;
}
bool bindLock = bo->isExplicitLockedMemoryRequired() && bo->isLockable();
bool bindLock = bo->isExplicitLockedMemoryRequired();
flags |= ioctlHelper->getFlagsForVmBind(bindCapture, bindImmediate, bindMakeResident, bindLock, readOnlyResource);
}
@@ -1529,14 +1533,8 @@ int changeBufferObjectBinding(Drm *drm, OsContext *osContext, uint32_t vmHandleI
int Drm::bindBufferObject(OsContext *osContext, uint32_t vmHandleId, BufferObject *bo) {
auto ret = changeBufferObjectBinding(this, osContext, vmHandleId, bo, true);
if (ret != 0) {
errno = 0;
static_cast<DrmMemoryOperationsHandlerBind *>(this->rootDeviceEnvironment.memoryOperationsInterface.get())->evictUnusedAllocations(false, false);
ret = changeBufferObjectBinding(this, osContext, vmHandleId, bo, true);
if (getErrno() == ENOMEM) {
DEBUG_BREAK_IF(true);
bo->setIsLockable(false);
ret = changeBufferObjectBinding(this, osContext, vmHandleId, bo, true);
}
}
return ret;
}

View File

@@ -127,7 +127,7 @@ class IoctlHelper {
virtual uint16_t getWaitUserFenceSoftFlag() = 0;
virtual int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) = 0;
virtual bool completionFenceExtensionSupported(const bool isVmBindAvailable) = 0;
virtual bool isPageFaultSupported() = 0;
virtual std::optional<DrmParam> getHasPageFaultParamId() = 0;
virtual std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) = 0;
virtual uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) = 0;
virtual uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) = 0;
@@ -135,7 +135,7 @@ class IoctlHelper {
virtual void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) = 0;
virtual void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) = 0;
virtual void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) = 0;
virtual std::optional<uint32_t> getVmAdviseAtomicAttribute() = 0;
virtual uint32_t getVmAdviseAtomicAttribute() = 0;
virtual int vmBind(const VmBindParams &vmBindParams) = 0;
virtual int vmUnbind(const VmBindParams &vmBindParams) = 0;
virtual int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) = 0;
@@ -283,7 +283,7 @@ class IoctlHelperUpstream : public IoctlHelperI915 {
uint16_t getWaitUserFenceSoftFlag() override;
int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) override;
bool completionFenceExtensionSupported(const bool isVmBindAvailable) override;
bool isPageFaultSupported() override;
std::optional<DrmParam> getHasPageFaultParamId() override;
std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) override;
uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) override;
uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) override;
@@ -291,7 +291,7 @@ class IoctlHelperUpstream : public IoctlHelperI915 {
void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) override;
void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) override;
void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) override;
std::optional<uint32_t> getVmAdviseAtomicAttribute() override;
uint32_t getVmAdviseAtomicAttribute() override;
int vmBind(const VmBindParams &vmBindParams) override;
int vmUnbind(const VmBindParams &vmBindParams) override;
int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) override;
@@ -360,7 +360,7 @@ class IoctlHelperPrelim20 : public IoctlHelperI915 {
uint16_t getWaitUserFenceSoftFlag() override;
int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) override;
bool completionFenceExtensionSupported(const bool isVmBindAvailable) override;
bool isPageFaultSupported() override;
std::optional<DrmParam> getHasPageFaultParamId() override;
std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) override;
uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) override;
uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) override;
@@ -368,7 +368,7 @@ class IoctlHelperPrelim20 : public IoctlHelperI915 {
void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) override;
void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) override;
void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) override;
std::optional<uint32_t> getVmAdviseAtomicAttribute() override;
uint32_t getVmAdviseAtomicAttribute() override;
int vmBind(const VmBindParams &vmBindParams) override;
int vmUnbind(const VmBindParams &vmBindParams) override;
int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) override;

View File

@@ -554,19 +554,8 @@ int IoctlHelperPrelim20::queryDistances(std::vector<QueryItem> &queryItems, std:
return ret;
}
bool IoctlHelperPrelim20::isPageFaultSupported() {
int pagefaultSupport{};
GetParam getParam{};
getParam.param = PRELIM_I915_PARAM_HAS_PAGE_FAULT;
getParam.value = &pagefaultSupport;
int retVal = ioctl(DrmIoctl::getparam, &getParam);
if (debugManager.flags.PrintIoctlEntries.get()) {
printf("DRM_IOCTL_I915_GETPARAM: param: PRELIM_I915_PARAM_HAS_PAGE_FAULT, output value: %d, retCode:% d\n",
*getParam.value,
retVal);
}
return (retVal == 0) && (pagefaultSupport > 0);
std::optional<DrmParam> IoctlHelperPrelim20::getHasPageFaultParamId() {
return DrmParam::paramHasPageFault;
};
bool IoctlHelperPrelim20::isEuStallSupported() {
@@ -743,7 +732,7 @@ EngineCapabilities::Flags IoctlHelperPrelim20::getEngineCapabilitiesFlags(uint64
return flags;
}
std::optional<uint32_t> IoctlHelperPrelim20::getVmAdviseAtomicAttribute() {
uint32_t IoctlHelperPrelim20::getVmAdviseAtomicAttribute() {
switch (NEO::debugManager.flags.SetVmAdviseAtomicAttribute.get()) {
case 0:
return PRELIM_I915_VM_ADVISE_ATOMIC_NONE;

View File

@@ -191,8 +191,8 @@ bool IoctlHelperUpstream::completionFenceExtensionSupported(const bool isVmBindA
return false;
}
bool IoctlHelperUpstream::isPageFaultSupported() {
return false;
std::optional<DrmParam> IoctlHelperUpstream::getHasPageFaultParamId() {
return std::nullopt;
};
bool IoctlHelperUpstream::isEuStallSupported() {
@@ -239,7 +239,7 @@ void IoctlHelperUpstream::fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtU
void IoctlHelperUpstream::setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence){};
std::optional<uint32_t> IoctlHelperUpstream::getVmAdviseAtomicAttribute() {
uint32_t IoctlHelperUpstream::getVmAdviseAtomicAttribute() {
return 0;
}

View File

@@ -706,7 +706,6 @@ void IoctlHelperXe::setupXeWaitUserFenceStruct(void *arg, uint32_t ctxId, uint16
}
int IoctlHelperXe::xeWaitUserFence(uint32_t ctxId, uint16_t op, uint64_t addr, uint64_t value, int64_t timeout, bool userInterrupt, uint32_t externalInterruptId, GraphicsAllocation *allocForInterruptWait) {
UNRECOVERABLE_IF(addr == 0x0)
drm_xe_wait_user_fence waitUserFence = {};
setupXeWaitUserFenceStruct(&waitUserFence, ctxId, op, addr, value, timeout);
@@ -749,14 +748,11 @@ std::optional<MemoryClassInstance> IoctlHelperXe::getPreferredLocationRegion(Pre
bool IoctlHelperXe::setVmBoAdvise(int32_t handle, uint32_t attribute, void *region) {
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
// There is no vmAdvise attribute in Xe, so return success
return true;
return false;
}
bool IoctlHelperXe::setVmBoAdviseForChunking(int32_t handle, uint64_t start, uint64_t length, uint32_t attribute, void *region) {
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
// There is no vmAdvise attribute in Xe, so return success
return true;
return false;
}
bool IoctlHelperXe::setVmPrefetch(uint64_t start, uint64_t length, uint32_t region, uint32_t vmId) {
@@ -870,10 +866,9 @@ int IoctlHelperXe::queryDistances(std::vector<QueryItem> &queryItems, std::vecto
return 0;
}
bool IoctlHelperXe::isPageFaultSupported() {
xeLog(" -> IoctlHelperXe::%s %d\n", __FUNCTION__, false);
return false;
std::optional<DrmParam> IoctlHelperXe::getHasPageFaultParamId() {
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
return {};
};
uint32_t IoctlHelperXe::getEuStallFdParameter() {
@@ -924,10 +919,9 @@ void IoctlHelperXe::setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT
return;
}
std::optional<uint32_t> IoctlHelperXe::getVmAdviseAtomicAttribute() {
uint32_t IoctlHelperXe::getVmAdviseAtomicAttribute() {
xeLog(" -> IoctlHelperXe::%s\n", __FUNCTION__);
// There is no vmAdvise attribute in Xe
return {};
return 0;
}
int IoctlHelperXe::vmBind(const VmBindParams &vmBindParams) {
@@ -1286,11 +1280,21 @@ int IoctlHelperXe::xeVmBind(const VmBindParams &vmBindParams, bool isBind) {
}
if (index != invalidIndex) {
drm_xe_sync sync[1] = {};
sync[0].type = DRM_XE_SYNC_TYPE_USER_FENCE;
sync[0].flags = DRM_XE_SYNC_FLAG_SIGNAL;
auto xeBindExtUserFence = reinterpret_cast<UserFenceExtension *>(vmBindParams.userFence);
UNRECOVERABLE_IF(!xeBindExtUserFence);
UNRECOVERABLE_IF(xeBindExtUserFence->tag != UserFenceExtension::tagValue);
sync[0].addr = xeBindExtUserFence->addr;
sync[0].timeline_value = xeBindExtUserFence->value;
drm_xe_vm_bind bind = {};
bind.vm_id = vmBindParams.vmId;
bind.num_syncs = 1;
bind.num_binds = 1;
bind.num_syncs = 1;
bind.syncs = reinterpret_cast<uintptr_t>(&sync);
bind.bind.range = vmBindParams.length;
bind.bind.addr = gmmHelper->decanonize(vmBindParams.start);
bind.bind.obj_offset = vmBindParams.offset;
@@ -1298,18 +1302,6 @@ int IoctlHelperXe::xeVmBind(const VmBindParams &vmBindParams, bool isBind) {
bind.bind.extensions = vmBindParams.extensions;
bind.bind.flags = static_cast<uint32_t>(vmBindParams.flags);
UNRECOVERABLE_IF(vmBindParams.userFence == 0x0);
drm_xe_sync sync[1] = {};
auto xeBindExtUserFence = reinterpret_cast<UserFenceExtension *>(vmBindParams.userFence);
UNRECOVERABLE_IF(xeBindExtUserFence->tag != UserFenceExtension::tagValue);
sync[0].type = DRM_XE_SYNC_TYPE_USER_FENCE;
sync[0].flags = DRM_XE_SYNC_FLAG_SIGNAL;
sync[0].addr = xeBindExtUserFence->addr;
sync[0].timeline_value = xeBindExtUserFence->value;
bind.syncs = reinterpret_cast<uintptr_t>(&sync);
if (isBind) {
bind.bind.op = DRM_XE_VM_BIND_OP_MAP;
bind.bind.obj = vmBindParams.handle;
@@ -1666,11 +1658,6 @@ void IoctlHelperXe::querySupportedFeatures() {
struct drm_xe_vm_create vmCreate = {};
auto ret = IoctlHelper::ioctl(DrmIoctl::gemVmCreate, &vmCreate);
if (ret != 0) {
// if device is already in fault mode it may fail, need to retry with proper flags
vmCreate.flags = DRM_XE_VM_CREATE_FLAG_LR_MODE | DRM_XE_VM_CREATE_FLAG_FAULT_MODE;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmCreate, &vmCreate);
}
DEBUG_BREAK_IF(ret != 0);
auto checkVmBindFlagSupport = [&](uint32_t flag) -> bool {
@@ -1701,21 +1688,5 @@ void IoctlHelperXe::querySupportedFeatures() {
vmDestroy.vm_id = vmCreate.vm_id;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmDestroy, &vmDestroy);
DEBUG_BREAK_IF(ret != 0);
auto checkVmCreateFlagsSupport = [&](uint32_t flags) -> bool {
struct drm_xe_vm_create vmCreate = {};
vmCreate.flags = flags;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmCreate, &vmCreate);
if (ret == 0) {
struct drm_xe_vm_destroy vmDestroy = {};
vmDestroy.vm_id = vmCreate.vm_id;
ret = IoctlHelper::ioctl(DrmIoctl::gemVmDestroy, &vmDestroy);
DEBUG_BREAK_IF(ret != 0);
return true;
}
return false;
};
supportedFeatures.flags.pageFault = checkVmCreateFlagsSupport(DRM_XE_VM_CREATE_FLAG_LR_MODE | DRM_XE_VM_CREATE_FLAG_FAULT_MODE);
};
} // namespace NEO

View File

@@ -69,7 +69,7 @@ class IoctlHelperXe : public IoctlHelper {
uint16_t getWaitUserFenceSoftFlag() override;
int execBuffer(ExecBuffer *execBuffer, uint64_t completionGpuAddress, TaskCountType counterValue) override;
bool completionFenceExtensionSupported(const bool isVmBindAvailable) override;
bool isPageFaultSupported() override;
std::optional<DrmParam> getHasPageFaultParamId() override;
std::unique_ptr<uint8_t[]> createVmControlExtRegion(const std::optional<MemoryClassInstance> &regionInstanceClass) override;
uint32_t getFlagsForVmCreate(bool disableScratch, bool enablePageFault, bool useVmBind) override;
uint32_t createContextWithAccessCounters(GemContextCreateExt &gcc) override;
@@ -77,7 +77,7 @@ class IoctlHelperXe : public IoctlHelper {
void fillVmBindExtSetPat(VmBindExtSetPatT &vmBindExtSetPat, uint64_t patIndex, uint64_t nextExtension) override;
void fillVmBindExtUserFence(VmBindExtUserFenceT &vmBindExtUserFence, uint64_t fenceAddress, uint64_t fenceValue, uint64_t nextExtension) override;
void setVmBindUserFence(VmBindParams &vmBind, VmBindExtUserFenceT vmBindUserFence) override;
std::optional<uint32_t> getVmAdviseAtomicAttribute() override;
uint32_t getVmAdviseAtomicAttribute() override;
int vmBind(const VmBindParams &vmBindParams) override;
int vmUnbind(const VmBindParams &vmBindParams) override;
int getResetStats(ResetStats &resetStats, uint32_t *status, ResetStatsFault *resetStatsFault) override;
@@ -214,8 +214,7 @@ class IoctlHelperXe : public IoctlHelper {
struct {
uint32_t vmBindReadOnly : 1;
uint32_t vmBindImmediate : 1;
uint32_t pageFault : 1;
uint32_t reserved : 29;
uint32_t reserved : 30;
} flags;
uint32_t allFlags = 0;
};