2017-12-21 00:45:38 +01:00
|
|
|
/*
|
2020-02-22 09:28:27 +01:00
|
|
|
* Copyright (C) 2017-2020 Intel Corporation
|
2017-12-21 00:45:38 +01:00
|
|
|
*
|
2018-09-18 09:11:08 +02:00
|
|
|
* SPDX-License-Identifier: MIT
|
2017-12-21 00:45:38 +01:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2020-04-21 22:40:21 +02:00
|
|
|
#include "shared/test/unit_test/helpers/debug_manager_state_restore.h"
|
|
|
|
|
2020-02-22 22:50:57 +01:00
|
|
|
#include "opencl/source/kernel/kernel.h"
|
|
|
|
#include "opencl/source/mem_obj/buffer.h"
|
2020-05-28 14:05:12 +02:00
|
|
|
#include "opencl/test/unit_test/fixtures/cl_device_fixture.h"
|
2020-02-23 15:20:22 +01:00
|
|
|
#include "opencl/test/unit_test/fixtures/context_fixture.h"
|
|
|
|
#include "opencl/test/unit_test/fixtures/memory_management_fixture.h"
|
|
|
|
#include "opencl/test/unit_test/kernel/kernel_arg_buffer_fixture.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_buffer.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_context.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_kernel.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_program.h"
|
2019-02-27 11:39:32 +01:00
|
|
|
#include "test.h"
|
|
|
|
|
|
|
|
#include "CL/cl.h"
|
2017-12-21 00:45:38 +01:00
|
|
|
#include "gtest/gtest.h"
|
2020-07-30 15:02:11 +02:00
|
|
|
#include "hw_cmds.h"
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
#include <memory>
|
|
|
|
|
2019-03-26 11:59:46 +01:00
|
|
|
using namespace NEO;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
typedef Test<KernelArgBufferFixture> KernelArgBufferTest;
|
|
|
|
|
2020-04-27 10:22:25 +02:00
|
|
|
TEST_F(KernelArgBufferTest, GivenValidBufferWhenSettingKernelArgThenBufferAddressIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
Buffer *buffer = new MockBuffer();
|
|
|
|
|
|
|
|
auto val = (cl_mem)buffer;
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
auto retVal = this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
2020-11-18 18:39:32 +00:00
|
|
|
auto pKernelArg = (cl_mem **)(this->pKernel->getCrossThreadData(rootDeviceIndex) +
|
2017-12-21 00:45:38 +01:00
|
|
|
this->pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset);
|
|
|
|
EXPECT_EQ(buffer->getCpuAddress(), *pKernelArg);
|
|
|
|
|
|
|
|
delete buffer;
|
|
|
|
}
|
|
|
|
|
2020-04-27 10:22:25 +02:00
|
|
|
TEST_F(KernelArgBufferTest, GivenSvmPtrStatelessWhenSettingKernelArgThenArgumentsAreSetCorrectly) {
|
2017-12-21 00:45:38 +01:00
|
|
|
Buffer *buffer = new MockBuffer();
|
|
|
|
|
|
|
|
auto val = (cl_mem)buffer;
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
auto retVal = this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_FALSE(pKernel->requiresCoherency());
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
delete buffer;
|
|
|
|
}
|
|
|
|
|
2020-04-27 10:22:25 +02:00
|
|
|
HWTEST_F(KernelArgBufferTest, GivenSvmPtrStatefulWhenSettingKernelArgThenArgumentsAreSetCorrectly) {
|
2017-12-21 00:45:38 +01:00
|
|
|
Buffer *buffer = new MockBuffer();
|
|
|
|
|
|
|
|
auto val = (cl_mem)buffer;
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
auto retVal = this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_FALSE(pKernel->requiresCoherency());
|
|
|
|
|
|
|
|
EXPECT_NE(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
2018-08-27 16:30:40 +02:00
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(), pKernelInfo->kernelArgInfo[0].offsetHeap));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-03-26 11:19:09 +01:00
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
2020-06-08 19:17:23 +02:00
|
|
|
EXPECT_EQ(buffer->getGraphicsAllocation(mockRootDeviceIndex)->getGpuAddress(), surfaceAddress);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
delete buffer;
|
|
|
|
}
|
|
|
|
|
2020-04-27 10:22:25 +02:00
|
|
|
HWTEST_F(KernelArgBufferTest, GivenBufferFromSvmPtrWhenSettingKernelArgThenArgumentsAreSetCorrectly) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
Buffer *buffer = new MockBuffer();
|
2020-06-08 19:17:23 +02:00
|
|
|
buffer->getGraphicsAllocation(mockRootDeviceIndex)->setCoherent(true);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
auto val = (cl_mem)buffer;
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
auto retVal = this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_TRUE(pKernel->requiresCoherency());
|
|
|
|
|
|
|
|
delete buffer;
|
|
|
|
}
|
|
|
|
|
2020-04-27 10:22:25 +02:00
|
|
|
TEST_F(KernelArgBufferTest, GivenInvalidBufferWhenSettingKernelArgThenInvalidMemObjectErrorIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
char *ptr = new char[sizeof(Buffer)];
|
|
|
|
|
|
|
|
auto val = (cl_mem *)ptr;
|
|
|
|
auto pVal = &val;
|
|
|
|
auto retVal = this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_INVALID_MEM_OBJECT, retVal);
|
|
|
|
|
|
|
|
delete[] ptr;
|
|
|
|
}
|
|
|
|
|
2020-04-27 10:22:25 +02:00
|
|
|
TEST_F(KernelArgBufferTest, GivenNullPtrWhenSettingKernelArgThenKernelArgIsNull) {
|
2017-12-21 00:45:38 +01:00
|
|
|
auto val = (cl_mem *)nullptr;
|
|
|
|
auto pVal = &val;
|
|
|
|
this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
|
2020-11-18 18:39:32 +00:00
|
|
|
auto pKernelArg = (cl_mem **)(this->pKernel->getCrossThreadData(rootDeviceIndex) +
|
2017-12-21 00:45:38 +01:00
|
|
|
this->pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset);
|
|
|
|
|
|
|
|
EXPECT_EQ(nullptr, *pKernelArg);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelArgBufferTest, given32BitDeviceWhenArgPtrPassedIsNullThenOnly4BytesAreBeingPatched) {
|
|
|
|
auto val = (cl_mem *)nullptr;
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
this->pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].size = 4;
|
|
|
|
|
2020-11-18 18:39:32 +00:00
|
|
|
auto pKernelArg64bit = (uint64_t *)(this->pKernel->getCrossThreadData(rootDeviceIndex) +
|
2017-12-21 00:45:38 +01:00
|
|
|
this->pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset);
|
|
|
|
|
|
|
|
uint32_t *pKernelArg32bit = (uint32_t *)pKernelArg64bit;
|
|
|
|
|
|
|
|
*pKernelArg64bit = 0xffffffffffffffff;
|
|
|
|
|
|
|
|
this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
uint64_t expValue = 0u;
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, *pKernelArg32bit);
|
|
|
|
EXPECT_NE(expValue, *pKernelArg64bit);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelArgBufferTest, given32BitDeviceWhenArgPassedIsNullThenOnly4BytesAreBeingPatched) {
|
|
|
|
auto pVal = nullptr;
|
|
|
|
this->pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].size = 4;
|
2020-11-18 18:39:32 +00:00
|
|
|
auto pKernelArg64bit = (uint64_t *)(this->pKernel->getCrossThreadData(rootDeviceIndex) +
|
2017-12-21 00:45:38 +01:00
|
|
|
this->pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset);
|
|
|
|
|
|
|
|
*pKernelArg64bit = 0xffffffffffffffff;
|
|
|
|
|
|
|
|
uint32_t *pKernelArg32bit = (uint32_t *)pKernelArg64bit;
|
|
|
|
|
|
|
|
this->pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
uint64_t expValue = 0u;
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, *pKernelArg32bit);
|
|
|
|
EXPECT_NE(expValue, *pKernelArg64bit);
|
|
|
|
}
|
2018-12-06 15:33:02 +01:00
|
|
|
|
2019-03-06 16:35:21 +01:00
|
|
|
TEST_F(KernelArgBufferTest, givenWritableBufferWhenSettingAsArgThenDoNotExpectAllocationInCacheFlushVector) {
|
2018-12-06 15:33:02 +01:00
|
|
|
auto buffer = std::make_unique<MockBuffer>();
|
|
|
|
buffer->mockGfxAllocation.setMemObjectsAllocationWithWritableFlags(true);
|
2019-02-27 14:59:46 +01:00
|
|
|
buffer->mockGfxAllocation.setFlushL3Required(false);
|
2018-12-06 15:33:02 +01:00
|
|
|
|
|
|
|
auto val = static_cast<cl_mem>(buffer.get());
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
auto retVal = pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
2019-04-03 11:22:04 +02:00
|
|
|
EXPECT_EQ(nullptr, pKernel->kernelArgRequiresCacheFlush[0]);
|
2018-12-06 15:33:02 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelArgBufferTest, givenCacheFlushBufferWhenSettingAsArgThenExpectAllocationInCacheFlushVector) {
|
|
|
|
auto buffer = std::make_unique<MockBuffer>();
|
|
|
|
buffer->mockGfxAllocation.setMemObjectsAllocationWithWritableFlags(false);
|
2019-02-27 14:59:46 +01:00
|
|
|
buffer->mockGfxAllocation.setFlushL3Required(true);
|
2018-12-06 15:33:02 +01:00
|
|
|
|
|
|
|
auto val = static_cast<cl_mem>(buffer.get());
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
auto retVal = pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(&buffer->mockGfxAllocation, pKernel->kernelArgRequiresCacheFlush[0]);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelArgBufferTest, givenNoCacheFlushBufferWhenSettingAsArgThenNotExpectAllocationInCacheFlushVector) {
|
|
|
|
auto buffer = std::make_unique<MockBuffer>();
|
|
|
|
buffer->mockGfxAllocation.setMemObjectsAllocationWithWritableFlags(false);
|
2019-02-27 14:59:46 +01:00
|
|
|
buffer->mockGfxAllocation.setFlushL3Required(false);
|
2018-12-06 15:33:02 +01:00
|
|
|
|
|
|
|
auto val = static_cast<cl_mem>(buffer.get());
|
|
|
|
auto pVal = &val;
|
|
|
|
|
|
|
|
auto retVal = pKernel->setArg(0, sizeof(cl_mem *), pVal);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(nullptr, pKernel->kernelArgRequiresCacheFlush[0]);
|
|
|
|
}
|
2020-04-21 22:40:21 +02:00
|
|
|
|
2020-11-17 11:55:37 +00:00
|
|
|
class KernelArgBufferFixtureBindless : public KernelArgBufferFixture {
|
|
|
|
public:
|
|
|
|
void SetUp() {
|
|
|
|
DebugManager.flags.UseBindlessMode.set(1);
|
|
|
|
KernelArgBufferFixture::SetUp();
|
|
|
|
}
|
2020-11-20 09:48:23 +00:00
|
|
|
void TearDown() override {
|
2020-11-17 11:55:37 +00:00
|
|
|
KernelArgBufferFixture::TearDown();
|
|
|
|
}
|
|
|
|
DebugManagerStateRestore restorer;
|
|
|
|
};
|
|
|
|
|
|
|
|
typedef Test<KernelArgBufferFixtureBindless> KernelArgBufferTestBindless;
|
|
|
|
|
|
|
|
HWTEST_F(KernelArgBufferTestBindless, givenUsedBindlessBuffersWhenPatchingSurfaceStateOffsetsThenCorrectOffsetIsPatchedInCrossThreadData) {
|
2020-04-27 15:40:12 +02:00
|
|
|
using DataPortBindlessSurfaceExtendedMessageDescriptor = typename FamilyType::DataPortBindlessSurfaceExtendedMessageDescriptor;
|
2020-04-21 22:40:21 +02:00
|
|
|
DebugManagerStateRestore restorer;
|
2020-10-07 13:53:56 +02:00
|
|
|
DebugManager.flags.UseBindlessMode.set(1);
|
2020-04-21 22:40:21 +02:00
|
|
|
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
auto crossThreadDataOffset = pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset;
|
|
|
|
pKernelInfo->kernelArgInfo[0].offsetHeap = 64;
|
|
|
|
pKernelInfo->kernelArgInfo[0].isBuffer = true;
|
|
|
|
|
2020-11-18 18:39:32 +00:00
|
|
|
auto patchLocation = reinterpret_cast<uint32_t *>(ptrOffset(pKernel->getCrossThreadData(rootDeviceIndex), crossThreadDataOffset));
|
2020-04-21 22:40:21 +02:00
|
|
|
*patchLocation = 0xdead;
|
|
|
|
|
2020-04-27 15:40:12 +02:00
|
|
|
uint32_t sshOffset = 0x1000;
|
2020-11-18 18:39:32 +00:00
|
|
|
pKernel->patchBindlessSurfaceStateOffsets(*pDevice, sshOffset);
|
2020-04-27 15:40:12 +02:00
|
|
|
DataPortBindlessSurfaceExtendedMessageDescriptor extMessageDesc;
|
|
|
|
extMessageDesc.setBindlessSurfaceOffset(sshOffset + pKernelInfo->kernelArgInfo[0].offsetHeap);
|
|
|
|
auto expectedOffset = extMessageDesc.getBindlessSurfaceOffsetToPatch();
|
2020-04-21 22:40:21 +02:00
|
|
|
EXPECT_EQ(expectedOffset, *patchLocation);
|
|
|
|
|
2020-04-27 15:40:12 +02:00
|
|
|
sshOffset = static_cast<uint32_t>(maxNBitValue(20) + 1) - 64;
|
2020-11-18 18:39:32 +00:00
|
|
|
pKernel->patchBindlessSurfaceStateOffsets(*pDevice, sshOffset);
|
2020-04-27 15:40:12 +02:00
|
|
|
extMessageDesc.setBindlessSurfaceOffset(sshOffset + pKernelInfo->kernelArgInfo[0].offsetHeap);
|
|
|
|
expectedOffset = extMessageDesc.getBindlessSurfaceOffsetToPatch();
|
2020-04-21 22:40:21 +02:00
|
|
|
EXPECT_EQ(expectedOffset, *patchLocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelArgBufferTest, givenUsedBindlessBuffersAndNonBufferArgWhenPatchingSurfaceStateOffsetsThenCrossThreadDataIsNotPatched) {
|
|
|
|
DebugManagerStateRestore restorer;
|
2020-10-07 13:53:56 +02:00
|
|
|
DebugManager.flags.UseBindlessMode.set(1);
|
2020-04-21 22:40:21 +02:00
|
|
|
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
auto crossThreadDataOffset = pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset;
|
|
|
|
pKernelInfo->kernelArgInfo[0].offsetHeap = 64;
|
|
|
|
pKernelInfo->kernelArgInfo[0].isBuffer = false;
|
|
|
|
|
2020-11-18 18:39:32 +00:00
|
|
|
auto patchLocation = reinterpret_cast<uint32_t *>(ptrOffset(pKernel->getCrossThreadData(rootDeviceIndex), crossThreadDataOffset));
|
2020-04-21 22:40:21 +02:00
|
|
|
*patchLocation = 0xdead;
|
|
|
|
|
|
|
|
uint32_t sshOffset = 4000;
|
2020-11-18 18:39:32 +00:00
|
|
|
pKernel->patchBindlessSurfaceStateOffsets(*pDevice, sshOffset);
|
2020-04-21 22:40:21 +02:00
|
|
|
EXPECT_EQ(0xdeadu, *patchLocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelArgBufferTest, givenNotUsedBindlessBuffersAndBufferArgWhenPatchingSurfaceStateOffsetsThenCrossThreadDataIsNotPatched) {
|
|
|
|
DebugManagerStateRestore restorer;
|
2020-10-07 13:53:56 +02:00
|
|
|
DebugManager.flags.UseBindlessMode.set(0);
|
2020-04-21 22:40:21 +02:00
|
|
|
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
auto crossThreadDataOffset = pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset;
|
|
|
|
pKernelInfo->kernelArgInfo[0].offsetHeap = 64;
|
|
|
|
pKernelInfo->kernelArgInfo[0].isBuffer = true;
|
|
|
|
|
2020-11-18 18:39:32 +00:00
|
|
|
auto patchLocation = reinterpret_cast<uint32_t *>(ptrOffset(pKernel->getCrossThreadData(rootDeviceIndex), crossThreadDataOffset));
|
2020-04-21 22:40:21 +02:00
|
|
|
*patchLocation = 0xdead;
|
|
|
|
|
|
|
|
uint32_t sshOffset = 4000;
|
2020-11-18 18:39:32 +00:00
|
|
|
pKernel->patchBindlessSurfaceStateOffsets(*pDevice, sshOffset);
|
2020-04-21 22:40:21 +02:00
|
|
|
EXPECT_EQ(0xdeadu, *patchLocation);
|
2020-05-04 11:49:35 +02:00
|
|
|
}
|
|
|
|
|
2020-11-17 11:55:37 +00:00
|
|
|
HWTEST_F(KernelArgBufferTestBindless, givenUsedBindlessBuffersAndBuiltinKernelWhenPatchingSurfaceStateOffsetsThenOffsetIsPatched) {
|
2020-05-04 11:49:35 +02:00
|
|
|
using DataPortBindlessSurfaceExtendedMessageDescriptor = typename FamilyType::DataPortBindlessSurfaceExtendedMessageDescriptor;
|
|
|
|
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
auto crossThreadDataOffset = pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset;
|
|
|
|
pKernelInfo->kernelArgInfo[0].offsetHeap = 64;
|
|
|
|
pKernelInfo->kernelArgInfo[0].isBuffer = true;
|
|
|
|
|
2020-11-18 18:39:32 +00:00
|
|
|
auto patchLocation = reinterpret_cast<uint32_t *>(ptrOffset(pKernel->getCrossThreadData(rootDeviceIndex), crossThreadDataOffset));
|
2020-05-04 11:49:35 +02:00
|
|
|
*patchLocation = 0xdead;
|
|
|
|
|
|
|
|
pKernel->isBuiltIn = true;
|
|
|
|
|
|
|
|
uint32_t sshOffset = 0x1000;
|
2020-11-18 18:39:32 +00:00
|
|
|
pKernel->patchBindlessSurfaceStateOffsets(*pDevice, sshOffset);
|
2020-10-07 13:53:56 +02:00
|
|
|
EXPECT_NE(0xdeadu, *patchLocation);
|
2020-04-21 22:40:21 +02:00
|
|
|
}
|