2017-12-21 00:45:38 +01:00
|
|
|
/*
|
2019-12-17 17:04:57 +01:00
|
|
|
* Copyright (C) 2017-2020 Intel Corporation
|
2017-12-21 00:45:38 +01:00
|
|
|
*
|
2018-09-18 09:11:08 +02:00
|
|
|
* SPDX-License-Identifier: MIT
|
2017-12-21 00:45:38 +01:00
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2020-02-23 22:44:01 +01:00
|
|
|
#include "shared/source/command_stream/command_stream_receiver_hw.h"
|
|
|
|
#include "shared/source/debug_settings/debug_settings_manager.h"
|
|
|
|
#include "shared/source/gmm_helper/gmm_helper.h"
|
|
|
|
#include "shared/source/helpers/flush_stamp.h"
|
|
|
|
#include "shared/source/helpers/hw_helper.h"
|
|
|
|
#include "shared/source/memory_manager/allocations_list.h"
|
|
|
|
#include "shared/source/memory_manager/unified_memory_manager.h"
|
|
|
|
#include "shared/source/os_interface/os_context.h"
|
2020-02-24 01:01:38 +01:00
|
|
|
#include "shared/test/unit_test/helpers/debug_manager_state_restore.h"
|
|
|
|
#include "shared/test/unit_test/page_fault_manager/mock_cpu_page_fault_manager.h"
|
|
|
|
#include "shared/test/unit_test/utilities/base_object_utils.h"
|
2020-02-24 10:22:30 +01:00
|
|
|
|
2020-02-22 22:50:57 +01:00
|
|
|
#include "opencl/source/built_ins/builtins_dispatch_builder.h"
|
2020-05-04 11:04:43 +02:00
|
|
|
#include "opencl/source/helpers/memory_properties_helpers.h"
|
2020-02-22 22:50:57 +01:00
|
|
|
#include "opencl/source/helpers/surface_formats.h"
|
|
|
|
#include "opencl/source/kernel/kernel.h"
|
|
|
|
#include "opencl/source/mem_obj/image.h"
|
|
|
|
#include "opencl/source/memory_manager/os_agnostic_memory_manager.h"
|
2020-05-28 14:05:12 +02:00
|
|
|
#include "opencl/test/unit_test/fixtures/cl_device_fixture.h"
|
2020-02-23 15:20:22 +01:00
|
|
|
#include "opencl/test/unit_test/fixtures/device_host_queue_fixture.h"
|
|
|
|
#include "opencl/test/unit_test/fixtures/execution_model_fixture.h"
|
|
|
|
#include "opencl/test/unit_test/fixtures/memory_management_fixture.h"
|
|
|
|
#include "opencl/test/unit_test/fixtures/multi_root_device_fixture.h"
|
|
|
|
#include "opencl/test/unit_test/helpers/gtest_helpers.h"
|
|
|
|
#include "opencl/test/unit_test/libult/ult_command_stream_receiver.h"
|
2020-03-19 14:26:08 +01:00
|
|
|
#include "opencl/test/unit_test/mocks/mock_allocation_properties.h"
|
2020-02-23 15:20:22 +01:00
|
|
|
#include "opencl/test/unit_test/mocks/mock_command_queue.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_context.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_graphics_allocation.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_kernel.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_memory_manager.h"
|
|
|
|
#include "opencl/test/unit_test/mocks/mock_program.h"
|
|
|
|
#include "opencl/test/unit_test/program/program_from_binary.h"
|
|
|
|
#include "opencl/test/unit_test/program/program_tests.h"
|
2020-04-22 19:26:27 +02:00
|
|
|
#include "opencl/test/unit_test/test_macros/test_checks_ocl.h"
|
2019-01-28 15:27:15 +01:00
|
|
|
#include "test.h"
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
#include <memory>
|
|
|
|
|
2019-03-26 11:59:46 +01:00
|
|
|
using namespace NEO;
|
2019-07-15 15:08:42 +02:00
|
|
|
using namespace DeviceHostQueue;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
class KernelTest : public ProgramFromBinaryTest {
|
|
|
|
public:
|
|
|
|
~KernelTest() override = default;
|
|
|
|
|
|
|
|
protected:
|
|
|
|
void SetUp() override {
|
|
|
|
ProgramFromBinaryTest::SetUp();
|
|
|
|
ASSERT_NE(nullptr, pProgram);
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
cl_device_id device = pClDevice;
|
2017-12-21 00:45:38 +01:00
|
|
|
retVal = pProgram->build(
|
|
|
|
1,
|
|
|
|
&device,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
false);
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
// create a kernel
|
|
|
|
pKernel = Kernel::create(
|
|
|
|
pProgram,
|
|
|
|
*pProgram->getKernelInfo(KernelName),
|
|
|
|
&retVal);
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
ASSERT_NE(nullptr, pKernel);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TearDown() override {
|
|
|
|
delete pKernel;
|
|
|
|
pKernel = nullptr;
|
2019-08-29 15:10:51 +02:00
|
|
|
knownSource.reset();
|
2017-12-21 00:45:38 +01:00
|
|
|
ProgramFromBinaryTest::TearDown();
|
|
|
|
}
|
|
|
|
|
|
|
|
Kernel *pKernel = nullptr;
|
|
|
|
cl_int retVal = CL_SUCCESS;
|
|
|
|
};
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST(KernelTest, WhenKernelIsCreatedThenCorrectMembersAreMemObjects) {
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_TRUE(Kernel::isMemObj(Kernel::BUFFER_OBJ));
|
|
|
|
EXPECT_TRUE(Kernel::isMemObj(Kernel::IMAGE_OBJ));
|
|
|
|
EXPECT_TRUE(Kernel::isMemObj(Kernel::PIPE_OBJ));
|
|
|
|
|
|
|
|
EXPECT_FALSE(Kernel::isMemObj(Kernel::SAMPLER_OBJ));
|
|
|
|
EXPECT_FALSE(Kernel::isMemObj(Kernel::ACCELERATOR_OBJ));
|
|
|
|
EXPECT_FALSE(Kernel::isMemObj(Kernel::NONE_OBJ));
|
|
|
|
EXPECT_FALSE(Kernel::isMemObj(Kernel::SVM_ALLOC_OBJ));
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, WhenKernelIsCreatedThenKernelHeapIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_EQ(pKernel->getKernelInfo().heapInfo.pKernelHeap, pKernel->getKernelHeap());
|
2020-05-26 09:36:04 +02:00
|
|
|
EXPECT_EQ(pKernel->getKernelInfo().heapInfo.KernelHeapSize, pKernel->getKernelHeapSize());
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenInvalidParamNameWhenGettingInfoThenInvalidValueErrorIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
|
|
|
|
// get size
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
nullptr,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_INVALID_VALUE, retVal);
|
|
|
|
}
|
|
|
|
|
2020-05-18 16:13:59 +02:00
|
|
|
TEST_P(KernelTest, GivenInvalidParametersWhenGettingInfoThenValueSizeRetIsNotUpdated) {
|
|
|
|
size_t paramValueSizeRet = 0x1234;
|
|
|
|
|
|
|
|
// get size
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
0,
|
|
|
|
0,
|
|
|
|
nullptr,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_INVALID_VALUE, retVal);
|
|
|
|
EXPECT_EQ(0x1234u, paramValueSizeRet);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenKernelFunctionNameWhenGettingInfoThenKernelFunctionNameIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
cl_kernel_info paramName = CL_KERNEL_FUNCTION_NAME;
|
|
|
|
size_t paramValueSize = 0;
|
|
|
|
char *paramValue = nullptr;
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
|
|
|
|
// get size
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
nullptr,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
EXPECT_NE(0u, paramValueSizeRet);
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
// allocate space for name
|
|
|
|
paramValue = new char[paramValueSizeRet];
|
|
|
|
|
|
|
|
// get the name
|
|
|
|
paramValueSize = paramValueSizeRet;
|
|
|
|
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
paramValue,
|
|
|
|
nullptr);
|
|
|
|
|
|
|
|
EXPECT_NE(nullptr, paramValue);
|
|
|
|
EXPECT_EQ(0, strcmp(paramValue, KernelName));
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
delete[] paramValue;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenKernelBinaryProgramIntelWhenGettingInfoThenKernelBinaryIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
cl_kernel_info paramName = CL_KERNEL_BINARY_PROGRAM_INTEL;
|
|
|
|
size_t paramValueSize = 0;
|
|
|
|
char *paramValue = nullptr;
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
const char *pKernelData = reinterpret_cast<const char *>(pKernel->getKernelHeap());
|
|
|
|
EXPECT_NE(nullptr, pKernelData);
|
|
|
|
|
|
|
|
// get size of kernel binary
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
nullptr,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
EXPECT_NE(0u, paramValueSizeRet);
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
// allocate space for kernel binary
|
|
|
|
paramValue = new char[paramValueSizeRet];
|
|
|
|
|
|
|
|
// get kernel binary
|
|
|
|
paramValueSize = paramValueSizeRet;
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
paramValue,
|
|
|
|
nullptr);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_NE(nullptr, paramValue);
|
|
|
|
EXPECT_EQ(0, memcmp(paramValue, pKernelData, paramValueSize));
|
|
|
|
|
|
|
|
delete[] paramValue;
|
|
|
|
}
|
|
|
|
|
2019-03-26 09:41:21 +01:00
|
|
|
TEST_P(KernelTest, givenBinaryWhenItIsQueriedForGpuAddressThenAbsoluteAddressIsReturned) {
|
|
|
|
cl_kernel_info paramName = CL_KERNEL_BINARY_GPU_ADDRESS_INTEL;
|
|
|
|
uint64_t paramValue = 0llu;
|
|
|
|
size_t paramValueSize = sizeof(paramValue);
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
¶mValue,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
auto expectedGpuAddress = GmmHelper::decanonize(pKernel->getKernelInfo().kernelAllocation->getGpuAddress());
|
|
|
|
EXPECT_EQ(expectedGpuAddress, paramValue);
|
|
|
|
EXPECT_EQ(paramValueSize, paramValueSizeRet);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenKernelNumArgsWhenGettingInfoThenNumberOfKernelArgsIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
cl_kernel_info paramName = CL_KERNEL_NUM_ARGS;
|
|
|
|
size_t paramValueSize = sizeof(cl_uint);
|
|
|
|
cl_uint paramValue = 0;
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
|
|
|
|
// get size
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
¶mValue,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(sizeof(cl_uint), paramValueSizeRet);
|
|
|
|
EXPECT_EQ(2u, paramValue);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenKernelProgramWhenGettingInfoThenProgramIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
cl_kernel_info paramName = CL_KERNEL_PROGRAM;
|
|
|
|
size_t paramValueSize = sizeof(cl_program);
|
|
|
|
cl_program paramValue = 0;
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
cl_program prog = pProgram;
|
|
|
|
|
|
|
|
// get size
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
¶mValue,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(sizeof(cl_program), paramValueSizeRet);
|
|
|
|
EXPECT_EQ(prog, paramValue);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenKernelContextWhenGettingInfoThenKernelContextIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
cl_kernel_info paramName = CL_KERNEL_CONTEXT;
|
|
|
|
cl_context paramValue = 0;
|
|
|
|
size_t paramValueSize = sizeof(paramValue);
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
cl_context context = pContext;
|
|
|
|
|
|
|
|
// get size
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
¶mValue,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(paramValueSize, paramValueSizeRet);
|
|
|
|
EXPECT_EQ(context, paramValue);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenKernelWorkGroupSizeWhenGettingWorkGroupInfoThenWorkGroupSizeIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
cl_kernel_info paramName = CL_KERNEL_WORK_GROUP_SIZE;
|
|
|
|
size_t paramValue = 0;
|
|
|
|
size_t paramValueSize = sizeof(paramValue);
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
|
2019-09-16 20:07:04 +02:00
|
|
|
auto kernelMaxWorkGroupSize = pDevice->getDeviceInfo().maxWorkGroupSize - 1;
|
|
|
|
pKernel->maxKernelWorkGroupSize = static_cast<uint32_t>(kernelMaxWorkGroupSize);
|
|
|
|
|
2017-12-21 00:45:38 +01:00
|
|
|
retVal = pKernel->getWorkGroupInfo(
|
2020-01-14 14:32:11 +01:00
|
|
|
pClDevice,
|
2017-12-21 00:45:38 +01:00
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
¶mValue,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(paramValueSize, paramValueSizeRet);
|
2019-09-16 20:07:04 +02:00
|
|
|
EXPECT_EQ(kernelMaxWorkGroupSize, paramValue);
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_P(KernelTest, GivenKernelCompileWorkGroupSizeWhenGettingWorkGroupInfoThenCompileWorkGroupSizeIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
cl_kernel_info paramName = CL_KERNEL_COMPILE_WORK_GROUP_SIZE;
|
|
|
|
size_t paramValue[3];
|
|
|
|
size_t paramValueSize = sizeof(paramValue);
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
|
|
|
|
retVal = pKernel->getWorkGroupInfo(
|
2020-01-14 14:32:11 +01:00
|
|
|
pClDevice,
|
2017-12-21 00:45:38 +01:00
|
|
|
paramName,
|
|
|
|
paramValueSize,
|
|
|
|
¶mValue,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(paramValueSize, paramValueSizeRet);
|
|
|
|
}
|
|
|
|
|
|
|
|
INSTANTIATE_TEST_CASE_P(KernelTests,
|
|
|
|
KernelTest,
|
|
|
|
::testing::Combine(
|
|
|
|
::testing::ValuesIn(BinaryFileNames),
|
|
|
|
::testing::ValuesIn(KernelNames)));
|
|
|
|
|
2018-03-29 00:38:41 +02:00
|
|
|
class KernelFromBinaryTest : public ProgramSimpleFixture {
|
2017-12-21 00:45:38 +01:00
|
|
|
public:
|
|
|
|
void SetUp() override {
|
2018-03-15 18:13:52 +01:00
|
|
|
ProgramSimpleFixture::SetUp();
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
void TearDown() override {
|
2018-03-15 18:13:52 +01:00
|
|
|
ProgramSimpleFixture::TearDown();
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
typedef Test<KernelFromBinaryTest> KernelFromBinaryTests;
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelFromBinaryTests, GivenKernelNumArgsWhenGettingInfoThenNumberOfKernelArgsIsReturned) {
|
2020-01-14 14:32:11 +01:00
|
|
|
cl_device_id device = pClDevice;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-08-29 15:10:51 +02:00
|
|
|
CreateProgramFromBinary(pContext, &device, "kernel_num_args");
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
ASSERT_NE(nullptr, pProgram);
|
|
|
|
retVal = pProgram->build(
|
|
|
|
1,
|
|
|
|
&device,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
false);
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
auto pKernelInfo = pProgram->getKernelInfo("test");
|
|
|
|
|
|
|
|
// create a kernel
|
|
|
|
auto pKernel = Kernel::create(
|
|
|
|
pProgram,
|
|
|
|
*pKernelInfo,
|
|
|
|
&retVal);
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
cl_uint paramValue = 0;
|
|
|
|
size_t paramValueSizeRet = 0;
|
|
|
|
|
|
|
|
// get size
|
|
|
|
retVal = pKernel->getInfo(
|
|
|
|
CL_KERNEL_NUM_ARGS,
|
|
|
|
sizeof(cl_uint),
|
|
|
|
¶mValue,
|
|
|
|
¶mValueSizeRet);
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(sizeof(cl_uint), paramValueSizeRet);
|
|
|
|
EXPECT_EQ(3u, paramValue);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelFromBinaryTests, WhenRegularKernelIsCreatedThenItIsNotBuiltIn) {
|
2020-01-14 14:32:11 +01:00
|
|
|
cl_device_id device = pClDevice;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-08-29 15:10:51 +02:00
|
|
|
CreateProgramFromBinary(pContext, &device, "simple_kernels");
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
ASSERT_NE(nullptr, pProgram);
|
|
|
|
retVal = pProgram->build(
|
|
|
|
1,
|
|
|
|
&device,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
false);
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
auto pKernelInfo = pProgram->getKernelInfo("simple_kernel_0");
|
|
|
|
|
|
|
|
// create a kernel
|
|
|
|
auto pKernel = Kernel::create(
|
|
|
|
pProgram,
|
|
|
|
*pKernelInfo,
|
|
|
|
&retVal);
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
ASSERT_NE(nullptr, pKernel);
|
|
|
|
|
|
|
|
// get builtIn property
|
|
|
|
bool isBuiltIn = pKernel->isBuiltIn;
|
|
|
|
|
|
|
|
EXPECT_FALSE(isBuiltIn);
|
|
|
|
|
2018-10-17 21:38:18 -07:00
|
|
|
delete pKernel;
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2019-07-31 11:27:24 +02:00
|
|
|
TEST_F(KernelFromBinaryTests, givenArgumentDeclaredAsConstantWhenKernelIsCreatedThenArgumentIsMarkedAsReadOnly) {
|
2020-01-14 14:32:11 +01:00
|
|
|
cl_device_id device = pClDevice;
|
2019-07-31 11:27:24 +02:00
|
|
|
|
2019-08-29 15:10:51 +02:00
|
|
|
CreateProgramFromBinary(pContext, &device, "simple_kernels");
|
2019-07-31 11:27:24 +02:00
|
|
|
|
|
|
|
ASSERT_NE(nullptr, pProgram);
|
|
|
|
retVal = pProgram->build(
|
|
|
|
1,
|
|
|
|
&device,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
nullptr,
|
|
|
|
false);
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, retVal);
|
|
|
|
|
|
|
|
auto pKernelInfo = pProgram->getKernelInfo("simple_kernel_6");
|
|
|
|
EXPECT_TRUE(pKernelInfo->kernelArgInfo[1].isReadOnly);
|
2019-08-01 09:18:56 +02:00
|
|
|
pKernelInfo = pProgram->getKernelInfo("simple_kernel_1");
|
|
|
|
EXPECT_TRUE(pKernelInfo->kernelArgInfo[0].isReadOnly);
|
2019-07-31 11:27:24 +02:00
|
|
|
}
|
|
|
|
|
2017-12-21 00:45:38 +01:00
|
|
|
TEST(PatchInfo, Constructor) {
|
|
|
|
PatchInfo patchInfo;
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.interfaceDescriptorDataLoad);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.localsurface);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.mediavfestate);
|
2019-06-27 14:06:19 +02:00
|
|
|
EXPECT_EQ(nullptr, patchInfo.mediaVfeStateSlot1);
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_EQ(nullptr, patchInfo.interfaceDescriptorData);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.samplerStateArray);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.bindingTableState);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.dataParameterStream);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.threadPayload);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.executionEnvironment);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.pKernelAttributesInfo);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.pAllocateStatelessPrivateSurface);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.pAllocateStatelessConstantMemorySurfaceWithInitialization);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.pAllocateStatelessGlobalMemorySurfaceWithInitialization);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.pAllocateStatelessPrintfSurface);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.pAllocateStatelessEventPoolSurface);
|
|
|
|
EXPECT_EQ(nullptr, patchInfo.pAllocateStatelessDefaultDeviceQueueSurface);
|
|
|
|
}
|
|
|
|
|
2020-05-28 14:05:12 +02:00
|
|
|
typedef Test<ClDeviceFixture> KernelPrivateSurfaceTest;
|
|
|
|
typedef Test<ClDeviceFixture> KernelGlobalSurfaceTest;
|
|
|
|
typedef Test<ClDeviceFixture> KernelConstantSurfaceTest;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2020-05-28 14:05:12 +02:00
|
|
|
struct KernelWithDeviceQueueFixture : public ClDeviceFixture,
|
2017-12-21 00:45:38 +01:00
|
|
|
public DeviceQueueFixture,
|
|
|
|
public testing::Test {
|
|
|
|
void SetUp() override {
|
2020-05-28 14:05:12 +02:00
|
|
|
ClDeviceFixture::SetUp();
|
2020-01-14 14:32:11 +01:00
|
|
|
DeviceQueueFixture::SetUp(&context, pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
void TearDown() override {
|
|
|
|
DeviceQueueFixture::TearDown();
|
2020-05-28 14:05:12 +02:00
|
|
|
ClDeviceFixture::TearDown();
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
MockContext context;
|
|
|
|
};
|
|
|
|
|
|
|
|
typedef KernelWithDeviceQueueFixture KernelDefaultDeviceQueueSurfaceTest;
|
|
|
|
typedef KernelWithDeviceQueueFixture KernelEventPoolSurfaceTest;
|
|
|
|
|
|
|
|
class CommandStreamReceiverMock : public CommandStreamReceiver {
|
|
|
|
typedef CommandStreamReceiver BaseClass;
|
|
|
|
|
|
|
|
public:
|
2019-07-04 17:14:51 +02:00
|
|
|
using CommandStreamReceiver::executionEnvironment;
|
|
|
|
|
2018-10-11 11:19:49 +02:00
|
|
|
using BaseClass::CommandStreamReceiver;
|
2019-09-12 17:30:13 +02:00
|
|
|
|
|
|
|
bool isMultiOsContextCapable() const override { return false; }
|
|
|
|
|
2019-10-29 08:01:53 +01:00
|
|
|
CommandStreamReceiverMock() : BaseClass(*(new ExecutionEnvironment), 0) {
|
2018-08-08 13:49:09 +02:00
|
|
|
this->mockExecutionEnvironment.reset(&this->executionEnvironment);
|
2019-11-15 09:59:48 +01:00
|
|
|
executionEnvironment.prepareRootDeviceEnvironments(1);
|
2020-03-24 18:00:34 +01:00
|
|
|
executionEnvironment.rootDeviceEnvironments[0]->setHwInfo(defaultHwInfo.get());
|
2019-09-02 13:23:25 +02:00
|
|
|
executionEnvironment.initializeMemoryManager();
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void makeResident(GraphicsAllocation &graphicsAllocation) override {
|
|
|
|
residency[graphicsAllocation.getUnderlyingBuffer()] = graphicsAllocation.getUnderlyingBufferSize();
|
2019-07-04 17:14:51 +02:00
|
|
|
if (passResidencyCallToBaseClass) {
|
|
|
|
CommandStreamReceiver::makeResident(graphicsAllocation);
|
|
|
|
}
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void makeNonResident(GraphicsAllocation &graphicsAllocation) override {
|
|
|
|
residency.erase(graphicsAllocation.getUnderlyingBuffer());
|
2019-07-04 17:14:51 +02:00
|
|
|
if (passResidencyCallToBaseClass) {
|
|
|
|
CommandStreamReceiver::makeNonResident(graphicsAllocation);
|
|
|
|
}
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2019-11-24 14:50:41 +01:00
|
|
|
bool flush(BatchBuffer &batchBuffer, ResidencyContainer &allocationsForResidency) override {
|
|
|
|
return true;
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2018-11-26 14:04:52 +01:00
|
|
|
void waitForTaskCountWithKmdNotifyFallback(uint32_t taskCountToWait, FlushStamp flushStampToWait, bool quickKmdSleep, bool forcePowerSavingMode) override {
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
2020-04-29 14:06:01 +02:00
|
|
|
uint32_t blitBuffer(const BlitPropertiesContainer &blitPropertiesContainer, bool blocking, bool profilingEnabled) override { return taskCount; };
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
CompletionStamp flushTask(
|
|
|
|
LinearStream &commandStream,
|
|
|
|
size_t commandStreamStart,
|
2018-04-17 13:50:50 +02:00
|
|
|
const IndirectHeap &dsh,
|
|
|
|
const IndirectHeap &ioh,
|
|
|
|
const IndirectHeap &ssh,
|
2017-12-21 00:45:38 +01:00
|
|
|
uint32_t taskLevel,
|
2018-08-01 10:01:41 +02:00
|
|
|
DispatchFlags &dispatchFlags,
|
|
|
|
Device &device) override {
|
2017-12-21 00:45:38 +01:00
|
|
|
CompletionStamp cs = {};
|
|
|
|
return cs;
|
|
|
|
}
|
|
|
|
|
2019-11-24 14:50:41 +01:00
|
|
|
bool flushBatchedSubmissions() override { return true; }
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-06-06 10:34:51 +02:00
|
|
|
CommandStreamReceiverType getType() override {
|
|
|
|
return CommandStreamReceiverType::CSR_HW;
|
|
|
|
}
|
|
|
|
|
2020-05-22 18:11:28 +02:00
|
|
|
void programHardwareContext(LinearStream &cmdStream) override {}
|
2020-05-27 15:30:31 +02:00
|
|
|
size_t getCmdsSizeForHardwareContext() const override {
|
|
|
|
return 0;
|
|
|
|
}
|
2017-12-21 00:45:38 +01:00
|
|
|
std::map<const void *, size_t> residency;
|
2019-07-04 17:14:51 +02:00
|
|
|
bool passResidencyCallToBaseClass = true;
|
2018-08-08 13:49:09 +02:00
|
|
|
std::unique_ptr<ExecutionEnvironment> mockExecutionEnvironment;
|
2017-12-21 00:45:38 +01:00
|
|
|
};
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelPrivateSurfaceTest, WhenChangingResidencyThenCsrResidencySizeIsUpdated) {
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_NE(nullptr, pDevice);
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup private memory
|
|
|
|
SPatchAllocateStatelessPrivateSurface tokenSPS;
|
|
|
|
tokenSPS.SurfaceStateHeapOffset = 64;
|
|
|
|
tokenSPS.DataParamOffset = 40;
|
|
|
|
tokenSPS.DataParamSize = 8;
|
|
|
|
tokenSPS.PerThreadPrivateMemorySize = 112;
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = &tokenSPS;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tokenDPS;
|
|
|
|
tokenDPS.DataParameterStreamSize = 64;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tokenDPS;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// create kernel
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
// Test it
|
2018-10-11 11:19:49 +02:00
|
|
|
auto executionEnvironment = pDevice->getExecutionEnvironment();
|
2019-10-29 08:01:53 +01:00
|
|
|
std::unique_ptr<CommandStreamReceiverMock> csr(new CommandStreamReceiverMock(*executionEnvironment, 0));
|
2019-01-10 09:37:56 +01:00
|
|
|
csr->setupContext(*pDevice->getDefaultEngine().osContext);
|
2017-12-21 00:45:38 +01:00
|
|
|
csr->residency.clear();
|
|
|
|
EXPECT_EQ(0u, csr->residency.size());
|
|
|
|
|
|
|
|
pKernel->makeResident(*csr.get());
|
|
|
|
EXPECT_EQ(1u, csr->residency.size());
|
|
|
|
|
2018-11-26 14:04:52 +01:00
|
|
|
csr->makeSurfacePackNonResident(csr->getResidencyAllocations());
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_EQ(0u, csr->residency.size());
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2020-06-25 10:49:29 +02:00
|
|
|
TEST_F(KernelPrivateSurfaceTest, givenKernelWithPrivateSurfaceThatIsInUseByGpuWhenKernelIsBeingDestroyedThenAllocationIsAddedToDeferredFreeList) {
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-01-18 16:41:35 +01:00
|
|
|
SPatchAllocateStatelessPrivateSurface tokenSPS;
|
|
|
|
tokenSPS.SurfaceStateHeapOffset = 64;
|
|
|
|
tokenSPS.DataParamOffset = 40;
|
|
|
|
tokenSPS.DataParamSize = 8;
|
|
|
|
tokenSPS.PerThreadPrivateMemorySize = 112;
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = &tokenSPS;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tokenDPS;
|
|
|
|
tokenDPS.DataParameterStreamSize = 64;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tokenDPS;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2018-01-18 16:41:35 +01:00
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(&program, *pKernelInfo, *pClDevice));
|
2018-01-18 16:41:35 +01:00
|
|
|
pKernel->initialize();
|
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
auto &csr = pDevice->getGpgpuCommandStreamReceiver();
|
2018-01-18 16:41:35 +01:00
|
|
|
|
|
|
|
auto privateSurface = pKernel->getPrivateSurface();
|
2018-11-29 11:39:10 +01:00
|
|
|
auto tagAddress = csr.getTagAddress();
|
2018-01-18 16:41:35 +01:00
|
|
|
|
2018-12-03 10:05:36 +01:00
|
|
|
privateSurface->updateTaskCount(*tagAddress + 1, csr.getOsContext().getContextId());
|
2018-01-18 16:41:35 +01:00
|
|
|
|
2018-10-09 11:50:58 +02:00
|
|
|
EXPECT_TRUE(csr.getTemporaryAllocations().peekIsEmpty());
|
2018-01-18 16:41:35 +01:00
|
|
|
pKernel.reset(nullptr);
|
|
|
|
|
2018-10-09 11:50:58 +02:00
|
|
|
EXPECT_FALSE(csr.getTemporaryAllocations().peekIsEmpty());
|
|
|
|
EXPECT_EQ(csr.getTemporaryAllocations().peekHead(), privateSurface);
|
2018-01-18 16:41:35 +01:00
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelPrivateSurfaceTest, WhenPrivateSurfaceAllocationFailsThenOutOfResourcesErrorIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_NE(nullptr, pDevice);
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup private memory
|
|
|
|
SPatchAllocateStatelessPrivateSurface tokenSPS;
|
|
|
|
tokenSPS.SurfaceStateHeapOffset = 64;
|
|
|
|
tokenSPS.DataParamOffset = 40;
|
|
|
|
tokenSPS.DataParamSize = 8;
|
|
|
|
tokenSPS.PerThreadPrivateMemorySize = 112;
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = &tokenSPS;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tokenDPS;
|
|
|
|
tokenDPS.DataParameterStreamSize = 64;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tokenDPS;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// create kernel
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
MemoryManagementFixture::InjectedFunction method = [&](size_t failureIndex) {
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-07-15 11:30:19 +02:00
|
|
|
if (MemoryManagement::nonfailingAllocation == failureIndex) {
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
} else {
|
|
|
|
EXPECT_EQ(CL_OUT_OF_RESOURCES, pKernel->initialize());
|
|
|
|
}
|
|
|
|
delete pKernel;
|
|
|
|
};
|
|
|
|
auto f = new MemoryManagementFixture();
|
|
|
|
f->SetUp();
|
|
|
|
f->injectFailures(method);
|
|
|
|
f->TearDown();
|
|
|
|
delete f;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelPrivateSurfaceTest, given32BitDeviceWhenKernelIsCreatedThenPrivateSurfaceIs32BitAllocation) {
|
|
|
|
if (is64bit) {
|
|
|
|
pDevice->getMemoryManager()->setForce32BitAllocations(true);
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup private memory
|
|
|
|
SPatchAllocateStatelessPrivateSurface tokenSPS;
|
|
|
|
tokenSPS.SurfaceStateHeapOffset = 64;
|
|
|
|
tokenSPS.DataParamOffset = 40;
|
|
|
|
tokenSPS.DataParamSize = 4;
|
|
|
|
tokenSPS.PerThreadPrivateMemorySize = 112;
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = &tokenSPS;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tokenDPS;
|
|
|
|
tokenDPS.DataParameterStreamSize = 64;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tokenDPS;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// create kernel
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
2019-02-27 14:59:46 +01:00
|
|
|
EXPECT_TRUE(pKernel->getPrivateSurface()->is32BitAllocation());
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelPrivateSurfaceTest, givenStatefulKernelWhenKernelIsCreatedThenPrivateMemorySurfaceStateIsPatchedWithCpuAddress) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup constant memory
|
|
|
|
SPatchAllocateStatelessPrivateSurface AllocateStatelessPrivateMemorySurface;
|
|
|
|
AllocateStatelessPrivateMemorySurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessPrivateMemorySurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessPrivateMemorySurface.DataParamSize = 8;
|
|
|
|
AllocateStatelessPrivateMemorySurface.PerThreadPrivateMemorySize = 16;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = &AllocateStatelessPrivateMemorySurface;
|
|
|
|
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// create kernel
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup surface state heap
|
|
|
|
char surfaceStateHeap[0x80];
|
|
|
|
pKernelInfo->heapInfo.pSsh = surfaceStateHeap;
|
2020-05-26 09:36:04 +02:00
|
|
|
pKernelInfo->heapInfo.SurfaceStateHeapSize = sizeof(surfaceStateHeap);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
auto bufferAddress = pKernel->getPrivateSurface()->getGpuAddress();
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(),
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface->SurfaceStateHeapOffset));
|
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
|
|
|
|
|
|
|
EXPECT_EQ(bufferAddress, surfaceAddress);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelPrivateSurfaceTest, givenStatelessKernelWhenKernelIsCreatedThenPrivateMemorySurfaceStateIsNotPatched) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup global memory
|
|
|
|
char buffer[16];
|
2018-10-30 15:24:15 +01:00
|
|
|
MockGraphicsAllocation gfxAlloc(buffer, sizeof(buffer));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setConstantSurface(&gfxAlloc);
|
|
|
|
|
|
|
|
// create kernel
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, pKernel->getSurfaceStateHeapSize());
|
2020-01-23 15:52:49 +01:00
|
|
|
EXPECT_EQ(nullptr, pKernel->getSurfaceStateHeap());
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
program.setConstantSurface(nullptr);
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelPrivateSurfaceTest, givenNullDataParameterStreamGetConstantBufferSizeReturnsZero) {
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
EXPECT_EQ(0u, pKernelInfo->getConstantBufferSize());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelPrivateSurfaceTest, givenNonNullDataParameterStreamGetConstantBufferSizeReturnsCorrectSize) {
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
SPatchDataParameterStream tokenDPS;
|
|
|
|
tokenDPS.DataParameterStreamSize = 64;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tokenDPS;
|
|
|
|
|
|
|
|
EXPECT_EQ(64u, pKernelInfo->getConstantBufferSize());
|
|
|
|
}
|
|
|
|
|
2018-04-16 10:15:41 +02:00
|
|
|
TEST_F(KernelPrivateSurfaceTest, GivenKernelWhenPrivateSurfaceTooBigAndGpuPointerSize4ThenReturnOutOfResources) {
|
|
|
|
auto pAllocateStatelessPrivateSurface = std::unique_ptr<SPatchAllocateStatelessPrivateSurface>(new SPatchAllocateStatelessPrivateSurface());
|
|
|
|
pAllocateStatelessPrivateSurface->PerThreadPrivateMemorySize = std::numeric_limits<uint32_t>::max();
|
|
|
|
auto executionEnvironment = std::unique_ptr<SPatchExecutionEnvironment>(new SPatchExecutionEnvironment());
|
2018-10-05 11:28:50 +02:00
|
|
|
*executionEnvironment = {};
|
2018-04-16 10:15:41 +02:00
|
|
|
executionEnvironment->CompiledSIMD32 = 32;
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-04-16 10:15:41 +02:00
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = pAllocateStatelessPrivateSurface.get();
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = executionEnvironment.get();
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(&program, *pKernelInfo, *pClDevice));
|
2018-04-16 10:15:41 +02:00
|
|
|
pKernelInfo->gpuPointerSize = 4;
|
|
|
|
pDevice->getMemoryManager()->setForce32BitAllocations(false);
|
|
|
|
if (pDevice->getDeviceInfo().computeUnitsUsedForScratch == 0)
|
2019-08-27 15:26:30 +02:00
|
|
|
pDevice->deviceInfo.computeUnitsUsedForScratch = 120;
|
2018-04-16 10:15:41 +02:00
|
|
|
EXPECT_EQ(CL_OUT_OF_RESOURCES, pKernel->initialize());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelPrivateSurfaceTest, GivenKernelWhenPrivateSurfaceTooBigAndGpuPointerSize4And32BitAllocationsThenReturnOutOfResources) {
|
|
|
|
auto pAllocateStatelessPrivateSurface = std::unique_ptr<SPatchAllocateStatelessPrivateSurface>(new SPatchAllocateStatelessPrivateSurface());
|
|
|
|
pAllocateStatelessPrivateSurface->PerThreadPrivateMemorySize = std::numeric_limits<uint32_t>::max();
|
|
|
|
auto executionEnvironment = std::unique_ptr<SPatchExecutionEnvironment>(new SPatchExecutionEnvironment());
|
2018-10-05 11:28:50 +02:00
|
|
|
*executionEnvironment = {};
|
2018-04-16 10:15:41 +02:00
|
|
|
executionEnvironment->CompiledSIMD32 = 32;
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-04-16 10:15:41 +02:00
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = pAllocateStatelessPrivateSurface.get();
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = executionEnvironment.get();
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(&program, *pKernelInfo, *pClDevice));
|
2018-04-16 10:15:41 +02:00
|
|
|
pKernelInfo->gpuPointerSize = 4;
|
|
|
|
pDevice->getMemoryManager()->setForce32BitAllocations(true);
|
|
|
|
if (pDevice->getDeviceInfo().computeUnitsUsedForScratch == 0)
|
2019-08-27 15:26:30 +02:00
|
|
|
pDevice->deviceInfo.computeUnitsUsedForScratch = 120;
|
2018-04-16 10:15:41 +02:00
|
|
|
EXPECT_EQ(CL_OUT_OF_RESOURCES, pKernel->initialize());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelPrivateSurfaceTest, GivenKernelWhenPrivateSurfaceTooBigAndGpuPointerSize8And32BitAllocationsThenReturnOutOfResources) {
|
|
|
|
auto pAllocateStatelessPrivateSurface = std::unique_ptr<SPatchAllocateStatelessPrivateSurface>(new SPatchAllocateStatelessPrivateSurface());
|
|
|
|
pAllocateStatelessPrivateSurface->PerThreadPrivateMemorySize = std::numeric_limits<uint32_t>::max();
|
|
|
|
auto executionEnvironment = std::unique_ptr<SPatchExecutionEnvironment>(new SPatchExecutionEnvironment());
|
2018-10-05 11:28:50 +02:00
|
|
|
*executionEnvironment = {};
|
2018-04-16 10:15:41 +02:00
|
|
|
executionEnvironment->CompiledSIMD32 = 32;
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-04-16 10:15:41 +02:00
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = pAllocateStatelessPrivateSurface.get();
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = executionEnvironment.get();
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(&program, *pKernelInfo, *pClDevice));
|
2018-04-16 10:15:41 +02:00
|
|
|
pKernelInfo->gpuPointerSize = 8;
|
|
|
|
pDevice->getMemoryManager()->setForce32BitAllocations(true);
|
|
|
|
if (pDevice->getDeviceInfo().computeUnitsUsedForScratch == 0)
|
2019-08-27 15:26:30 +02:00
|
|
|
pDevice->deviceInfo.computeUnitsUsedForScratch = 120;
|
2018-04-16 10:15:41 +02:00
|
|
|
EXPECT_EQ(CL_OUT_OF_RESOURCES, pKernel->initialize());
|
|
|
|
}
|
|
|
|
|
2017-12-21 00:45:38 +01:00
|
|
|
TEST_F(KernelGlobalSurfaceTest, givenBuiltInKernelWhenKernelIsCreatedThenGlobalSurfaceIsPatchedWithCpuAddress) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup global memory
|
|
|
|
SPatchAllocateStatelessGlobalMemorySurfaceWithInitialization AllocateStatelessGlobalMemorySurfaceWithInitialization;
|
|
|
|
AllocateStatelessGlobalMemorySurfaceWithInitialization.DataParamOffset = 0;
|
|
|
|
AllocateStatelessGlobalMemorySurfaceWithInitialization.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessGlobalMemorySurfaceWithInitialization = &AllocateStatelessGlobalMemorySurfaceWithInitialization;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tempSPatchDataParameterStream;
|
|
|
|
tempSPatchDataParameterStream.DataParameterStreamSize = 16;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tempSPatchDataParameterStream;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
char buffer[16];
|
|
|
|
|
2019-11-04 16:03:30 +01:00
|
|
|
GraphicsAllocation gfxAlloc(0, GraphicsAllocation::AllocationType::UNKNOWN, buffer, (uint64_t)buffer - 8u, 8, (osHandle)1u, MemoryPool::MemoryNull);
|
2017-12-21 00:45:38 +01:00
|
|
|
uint64_t bufferAddress = (uint64_t)gfxAlloc.getUnderlyingBuffer();
|
|
|
|
|
|
|
|
// create kernel
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setGlobalSurface(&gfxAlloc);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernel->isBuiltIn = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(bufferAddress, *(uint64_t *)pKernel->getCrossThreadData());
|
|
|
|
|
|
|
|
program.setGlobalSurface(nullptr);
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelGlobalSurfaceTest, givenNDRangeKernelWhenKernelIsCreatedThenGlobalSurfaceIsPatchedWithBaseAddressOffset) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup global memory
|
|
|
|
SPatchAllocateStatelessGlobalMemorySurfaceWithInitialization AllocateStatelessGlobalMemorySurfaceWithInitialization;
|
|
|
|
AllocateStatelessGlobalMemorySurfaceWithInitialization.DataParamOffset = 0;
|
|
|
|
AllocateStatelessGlobalMemorySurfaceWithInitialization.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessGlobalMemorySurfaceWithInitialization = &AllocateStatelessGlobalMemorySurfaceWithInitialization;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tempSPatchDataParameterStream;
|
|
|
|
tempSPatchDataParameterStream.DataParameterStreamSize = 16;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tempSPatchDataParameterStream;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
char buffer[16];
|
|
|
|
|
2019-11-04 16:03:30 +01:00
|
|
|
GraphicsAllocation gfxAlloc(0, GraphicsAllocation::AllocationType::UNKNOWN, buffer, (uint64_t)buffer - 8u, 8, MemoryPool::MemoryNull);
|
2017-12-21 00:45:38 +01:00
|
|
|
uint64_t bufferAddress = gfxAlloc.getGpuAddress();
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setGlobalSurface(&gfxAlloc);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(bufferAddress, *(uint64_t *)pKernel->getCrossThreadData());
|
|
|
|
|
|
|
|
program.setGlobalSurface(nullptr);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelGlobalSurfaceTest, givenStatefulKernelWhenKernelIsCreatedThenGlobalMemorySurfaceStateIsPatchedWithCpuAddress) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup global memory
|
|
|
|
SPatchAllocateStatelessGlobalMemorySurfaceWithInitialization AllocateStatelessGlobalMemorySurfaceWithInitialization;
|
|
|
|
AllocateStatelessGlobalMemorySurfaceWithInitialization.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessGlobalMemorySurfaceWithInitialization.DataParamOffset = 0;
|
|
|
|
AllocateStatelessGlobalMemorySurfaceWithInitialization.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessGlobalMemorySurfaceWithInitialization = &AllocateStatelessGlobalMemorySurfaceWithInitialization;
|
|
|
|
|
|
|
|
char buffer[16];
|
2018-10-30 15:24:15 +01:00
|
|
|
MockGraphicsAllocation gfxAlloc(buffer, sizeof(buffer));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto bufferAddress = gfxAlloc.getGpuAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setGlobalSurface(&gfxAlloc);
|
|
|
|
|
|
|
|
// create kernel
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup surface state heap
|
|
|
|
char surfaceStateHeap[0x80];
|
|
|
|
pKernelInfo->heapInfo.pSsh = surfaceStateHeap;
|
2020-05-26 09:36:04 +02:00
|
|
|
pKernelInfo->heapInfo.SurfaceStateHeapSize = sizeof(surfaceStateHeap);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(),
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessGlobalMemorySurfaceWithInitialization->SurfaceStateHeapOffset));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
EXPECT_EQ(bufferAddress, surfaceAddress);
|
|
|
|
|
|
|
|
program.setGlobalSurface(nullptr);
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelGlobalSurfaceTest, givenStatelessKernelWhenKernelIsCreatedThenGlobalMemorySurfaceStateIsNotPatched) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup global memory
|
|
|
|
char buffer[16];
|
2018-10-30 15:24:15 +01:00
|
|
|
MockGraphicsAllocation gfxAlloc(buffer, sizeof(buffer));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setGlobalSurface(&gfxAlloc);
|
|
|
|
|
|
|
|
// create kernel
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, pKernel->getSurfaceStateHeapSize());
|
2020-01-23 15:52:49 +01:00
|
|
|
EXPECT_EQ(nullptr, pKernel->getSurfaceStateHeap());
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
program.setGlobalSurface(nullptr);
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelConstantSurfaceTest, givenBuiltInKernelWhenKernelIsCreatedThenConstantSurfaceIsPatchedWithCpuAddress) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup constant memory
|
|
|
|
SPatchAllocateStatelessConstantMemorySurfaceWithInitialization AllocateStatelessConstantMemorySurfaceWithInitialization;
|
|
|
|
AllocateStatelessConstantMemorySurfaceWithInitialization.DataParamOffset = 0;
|
|
|
|
AllocateStatelessConstantMemorySurfaceWithInitialization.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessConstantMemorySurfaceWithInitialization = &AllocateStatelessConstantMemorySurfaceWithInitialization;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tempSPatchDataParameterStream;
|
|
|
|
tempSPatchDataParameterStream.DataParameterStreamSize = 16;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tempSPatchDataParameterStream;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
char buffer[16];
|
|
|
|
|
2019-11-04 16:03:30 +01:00
|
|
|
GraphicsAllocation gfxAlloc(0, GraphicsAllocation::AllocationType::UNKNOWN, buffer, (uint64_t)buffer - 8u, 8, (osHandle)1u, MemoryPool::MemoryNull);
|
2017-12-21 00:45:38 +01:00
|
|
|
uint64_t bufferAddress = (uint64_t)gfxAlloc.getUnderlyingBuffer();
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setConstantSurface(&gfxAlloc);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernel->isBuiltIn = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(bufferAddress, *(uint64_t *)pKernel->getCrossThreadData());
|
|
|
|
|
|
|
|
program.setConstantSurface(nullptr);
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelConstantSurfaceTest, givenNDRangeKernelWhenKernelIsCreatedThenConstantSurfaceIsPatchedWithBaseAddressOffset) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup constant memory
|
|
|
|
SPatchAllocateStatelessConstantMemorySurfaceWithInitialization AllocateStatelessConstantMemorySurfaceWithInitialization;
|
|
|
|
AllocateStatelessConstantMemorySurfaceWithInitialization.DataParamOffset = 0;
|
|
|
|
AllocateStatelessConstantMemorySurfaceWithInitialization.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessConstantMemorySurfaceWithInitialization = &AllocateStatelessConstantMemorySurfaceWithInitialization;
|
|
|
|
|
|
|
|
SPatchDataParameterStream tempSPatchDataParameterStream;
|
|
|
|
tempSPatchDataParameterStream.DataParameterStreamSize = 16;
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &tempSPatchDataParameterStream;
|
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
char buffer[16];
|
|
|
|
|
2019-11-04 16:03:30 +01:00
|
|
|
GraphicsAllocation gfxAlloc(0, GraphicsAllocation::AllocationType::UNKNOWN, buffer, (uint64_t)buffer - 8u, 8, MemoryPool::MemoryNull);
|
2017-12-21 00:45:38 +01:00
|
|
|
uint64_t bufferAddress = gfxAlloc.getGpuAddress();
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setConstantSurface(&gfxAlloc);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(bufferAddress, *(uint64_t *)pKernel->getCrossThreadData());
|
|
|
|
|
|
|
|
program.setConstantSurface(nullptr);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelConstantSurfaceTest, givenStatefulKernelWhenKernelIsCreatedThenConstantMemorySurfaceStateIsPatchedWithCpuAddress) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup constant memory
|
|
|
|
SPatchAllocateStatelessConstantMemorySurfaceWithInitialization AllocateStatelessConstantMemorySurfaceWithInitialization;
|
|
|
|
AllocateStatelessConstantMemorySurfaceWithInitialization.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessConstantMemorySurfaceWithInitialization.DataParamOffset = 0;
|
|
|
|
AllocateStatelessConstantMemorySurfaceWithInitialization.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessConstantMemorySurfaceWithInitialization = &AllocateStatelessConstantMemorySurfaceWithInitialization;
|
|
|
|
|
|
|
|
char buffer[16];
|
2018-10-30 15:24:15 +01:00
|
|
|
MockGraphicsAllocation gfxAlloc(buffer, sizeof(buffer));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto bufferAddress = gfxAlloc.getGpuAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
MockContext context;
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setConstantSurface(&gfxAlloc);
|
|
|
|
|
|
|
|
// create kernel
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup surface state heap
|
|
|
|
char surfaceStateHeap[0x80];
|
|
|
|
pKernelInfo->heapInfo.pSsh = surfaceStateHeap;
|
2020-05-26 09:36:04 +02:00
|
|
|
pKernelInfo->heapInfo.SurfaceStateHeapSize = sizeof(surfaceStateHeap);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(),
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessConstantMemorySurfaceWithInitialization->SurfaceStateHeapOffset));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
EXPECT_EQ(bufferAddress, surfaceAddress);
|
|
|
|
|
|
|
|
program.setConstantSurface(nullptr);
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST_F(KernelConstantSurfaceTest, givenStatelessKernelWhenKernelIsCreatedThenConstantMemorySurfaceStateIsNotPatched) {
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup global memory
|
|
|
|
char buffer[16];
|
2018-10-30 15:24:15 +01:00
|
|
|
MockGraphicsAllocation gfxAlloc(buffer, sizeof(buffer));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2017-12-21 00:45:38 +01:00
|
|
|
program.setConstantSurface(&gfxAlloc);
|
|
|
|
|
|
|
|
// create kernel
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, pKernel->getSurfaceStateHeapSize());
|
2020-01-23 15:52:49 +01:00
|
|
|
EXPECT_EQ(nullptr, pKernel->getSurfaceStateHeap());
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
program.setConstantSurface(nullptr);
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelEventPoolSurfaceTest, givenStatefulKernelWhenKernelIsCreatedThenEventPoolSurfaceStateIsPatchedWithNullSurface) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup event pool surface
|
|
|
|
SPatchAllocateStatelessEventPoolSurface AllocateStatelessEventPoolSurface;
|
|
|
|
AllocateStatelessEventPoolSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessEventPoolSurface = &AllocateStatelessEventPoolSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup surface state heap
|
|
|
|
char surfaceStateHeap[0x80];
|
|
|
|
pKernelInfo->heapInfo.pSsh = surfaceStateHeap;
|
2020-05-26 09:36:04 +02:00
|
|
|
pKernelInfo->heapInfo.SurfaceStateHeapSize = sizeof(surfaceStateHeap);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(),
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessEventPoolSurface->SurfaceStateHeapOffset));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-03-26 11:19:09 +01:00
|
|
|
EXPECT_EQ(0u, surfaceAddress);
|
2017-12-21 00:45:38 +01:00
|
|
|
auto surfaceType = surfaceState->getSurfaceType();
|
|
|
|
EXPECT_EQ(RENDER_SURFACE_STATE::SURFACE_TYPE_SURFTYPE_NULL, surfaceType);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelEventPoolSurfaceTest, givenStatefulKernelWhenEventPoolIsPatchedThenEventPoolSurfaceStateIsProgrammed) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup event pool surface
|
|
|
|
SPatchAllocateStatelessEventPoolSurface AllocateStatelessEventPoolSurface;
|
|
|
|
AllocateStatelessEventPoolSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessEventPoolSurface = &AllocateStatelessEventPoolSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup surface state heap
|
|
|
|
char surfaceStateHeap[0x80];
|
|
|
|
pKernelInfo->heapInfo.pSsh = surfaceStateHeap;
|
2020-05-26 09:36:04 +02:00
|
|
|
pKernelInfo->heapInfo.SurfaceStateHeapSize = sizeof(surfaceStateHeap);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
pKernel->patchEventPool(pDevQueue);
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(),
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessEventPoolSurface->SurfaceStateHeapOffset));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-03-26 11:19:09 +01:00
|
|
|
EXPECT_EQ(pDevQueue->getEventPoolBuffer()->getGpuAddress(), surfaceAddress);
|
2017-12-21 00:45:38 +01:00
|
|
|
auto surfaceType = surfaceState->getSurfaceType();
|
|
|
|
EXPECT_EQ(RENDER_SURFACE_STATE::SURFACE_TYPE_SURFTYPE_BUFFER, surfaceType);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelEventPoolSurfaceTest, givenKernelWithNullEventPoolInKernelInfoWhenEventPoolIsPatchedThenAddressIsNotPatched) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessEventPoolSurface = nullptr;
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
uint64_t crossThreadData = 123;
|
|
|
|
|
|
|
|
pKernel->setCrossThreadData(&crossThreadData, sizeof(uint64_t));
|
|
|
|
|
|
|
|
pKernel->patchEventPool(pDevQueue);
|
|
|
|
|
|
|
|
EXPECT_EQ(123u, *(uint64_t *)pKernel->getCrossThreadData());
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelEventPoolSurfaceTest, givenStatelessKernelWhenKernelIsCreatedThenEventPoolSurfaceStateIsNotPatched) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup event pool surface
|
|
|
|
SPatchAllocateStatelessEventPoolSurface AllocateStatelessEventPoolSurface;
|
|
|
|
AllocateStatelessEventPoolSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessEventPoolSurface = &AllocateStatelessEventPoolSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
2020-05-05 16:40:31 +02:00
|
|
|
if (pClDevice->areOcl21FeaturesSupported() == false) {
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_EQ(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
} else {
|
|
|
|
}
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelEventPoolSurfaceTest, givenStatelessKernelWhenEventPoolIsPatchedThenCrossThreadDataIsPatched) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup event pool surface
|
|
|
|
SPatchAllocateStatelessEventPoolSurface AllocateStatelessEventPoolSurface;
|
|
|
|
AllocateStatelessEventPoolSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessEventPoolSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessEventPoolSurface = &AllocateStatelessEventPoolSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
uint64_t crossThreadData = 0;
|
|
|
|
|
|
|
|
pKernel->setCrossThreadData(&crossThreadData, sizeof(uint64_t));
|
|
|
|
|
|
|
|
pKernel->patchEventPool(pDevQueue);
|
|
|
|
|
2019-04-18 15:30:47 +02:00
|
|
|
EXPECT_EQ(pDevQueue->getEventPoolBuffer()->getGpuAddressToPatch(), *(uint64_t *)pKernel->getCrossThreadData());
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelDefaultDeviceQueueSurfaceTest, givenStatefulKernelWhenKernelIsCreatedThenDefaultDeviceQueueSurfaceStateIsPatchedWithNullSurface) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup default device queue surface
|
|
|
|
SPatchAllocateStatelessDefaultDeviceQueueSurface AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessDefaultDeviceQueueSurface = &AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup surface state heap
|
|
|
|
char surfaceStateHeap[0x80];
|
|
|
|
pKernelInfo->heapInfo.pSsh = surfaceStateHeap;
|
2020-05-26 09:36:04 +02:00
|
|
|
pKernelInfo->heapInfo.SurfaceStateHeapSize = sizeof(surfaceStateHeap);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(),
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessDefaultDeviceQueueSurface->SurfaceStateHeapOffset));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-03-26 11:19:09 +01:00
|
|
|
EXPECT_EQ(0u, surfaceAddress);
|
2017-12-21 00:45:38 +01:00
|
|
|
auto surfaceType = surfaceState->getSurfaceType();
|
|
|
|
EXPECT_EQ(RENDER_SURFACE_STATE::SURFACE_TYPE_SURFTYPE_NULL, surfaceType);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelDefaultDeviceQueueSurfaceTest, givenStatefulKernelWhenDefaultDeviceQueueIsPatchedThenSurfaceStateIsCorrectlyProgrammed) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup default device queue surface
|
|
|
|
SPatchAllocateStatelessDefaultDeviceQueueSurface AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessDefaultDeviceQueueSurface = &AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment(), &context, false, pDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup surface state heap
|
|
|
|
char surfaceStateHeap[0x80];
|
|
|
|
pKernelInfo->heapInfo.pSsh = surfaceStateHeap;
|
2020-05-26 09:36:04 +02:00
|
|
|
pKernelInfo->heapInfo.SurfaceStateHeapSize = sizeof(surfaceStateHeap);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = true;
|
|
|
|
pKernelInfo->requiresSshForBuffers = true;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
pKernel->patchDefaultDeviceQueue(pDevQueue);
|
|
|
|
|
|
|
|
EXPECT_NE(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
typedef typename FamilyType::RENDER_SURFACE_STATE RENDER_SURFACE_STATE;
|
|
|
|
auto surfaceState = reinterpret_cast<const RENDER_SURFACE_STATE *>(
|
|
|
|
ptrOffset(pKernel->getSurfaceStateHeap(),
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessDefaultDeviceQueueSurface->SurfaceStateHeapOffset));
|
2019-03-26 11:19:09 +01:00
|
|
|
auto surfaceAddress = surfaceState->getSurfaceBaseAddress();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-03-26 11:19:09 +01:00
|
|
|
EXPECT_EQ(pDevQueue->getQueueBuffer()->getGpuAddress(), surfaceAddress);
|
2017-12-21 00:45:38 +01:00
|
|
|
auto surfaceType = surfaceState->getSurfaceType();
|
|
|
|
EXPECT_EQ(RENDER_SURFACE_STATE::SURFACE_TYPE_SURFTYPE_BUFFER, surfaceType);
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelDefaultDeviceQueueSurfaceTest, givenStatelessKernelWhenKernelIsCreatedThenDefaultDeviceQueueSurfaceStateIsNotPatched) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup default device queue surface
|
|
|
|
SPatchAllocateStatelessDefaultDeviceQueueSurface AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessDefaultDeviceQueueSurface = &AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, pKernel->getSurfaceStateHeapSize());
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelDefaultDeviceQueueSurfaceTest, givenKernelWithNullDeviceQueueKernelInfoWhenDefaultDeviceQueueIsPatchedThenAddressIsNotPatched) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessDefaultDeviceQueueSurface = nullptr;
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
uint64_t crossThreadData = 123;
|
|
|
|
|
|
|
|
pKernel->setCrossThreadData(&crossThreadData, sizeof(uint64_t));
|
|
|
|
|
|
|
|
pKernel->patchDefaultDeviceQueue(pDevQueue);
|
|
|
|
|
|
|
|
EXPECT_EQ(123u, *(uint64_t *)pKernel->getCrossThreadData());
|
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, KernelDefaultDeviceQueueSurfaceTest, givenStatelessKernelWhenDefaultDeviceQueueIsPatchedThenCrossThreadDataIsPatched) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment tokenEE = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
tokenEE.CompiledSIMD8 = false;
|
|
|
|
tokenEE.CompiledSIMD16 = false;
|
|
|
|
tokenEE.CompiledSIMD32 = true;
|
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &tokenEE;
|
|
|
|
|
|
|
|
// setup default device queue surface
|
|
|
|
SPatchAllocateStatelessDefaultDeviceQueueSurface AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.SurfaceStateHeapOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamOffset = 0;
|
|
|
|
AllocateStatelessDefaultDeviceQueueSurface.DataParamSize = 8;
|
|
|
|
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessDefaultDeviceQueueSurface = &AllocateStatelessDefaultDeviceQueueSurface;
|
|
|
|
|
|
|
|
// create kernel
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *pKernel = new MockKernel(&program, *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// define stateful path
|
|
|
|
pKernelInfo->usesSsh = false;
|
|
|
|
pKernelInfo->requiresSshForBuffers = false;
|
|
|
|
|
|
|
|
uint64_t crossThreadData = 0;
|
|
|
|
|
|
|
|
pKernel->setCrossThreadData(&crossThreadData, sizeof(uint64_t));
|
|
|
|
|
|
|
|
pKernel->patchDefaultDeviceQueue(pDevQueue);
|
|
|
|
|
2019-04-18 15:30:47 +02:00
|
|
|
EXPECT_EQ(pDevQueue->getQueueBuffer()->getGpuAddressToPatch(), *(uint64_t *)pKernel->getCrossThreadData());
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
delete pKernel;
|
|
|
|
}
|
|
|
|
|
2020-05-28 14:05:12 +02:00
|
|
|
typedef Test<ClDeviceFixture> KernelResidencyTest;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-03-13 10:44:35 +01:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenMakeResidentIsCalledThenKernelIsaIsMadeResident) {
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_NE(nullptr, pDevice);
|
|
|
|
char pCrossThreadData[64];
|
|
|
|
|
|
|
|
// define kernel info
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-03-13 10:44:35 +01:00
|
|
|
auto &commandStreamReceiver = pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
commandStreamReceiver.storeMakeResidentAllocations = true;
|
|
|
|
|
|
|
|
auto memoryManager = commandStreamReceiver.getMemoryManager();
|
2020-03-25 15:15:03 +01:00
|
|
|
pKernelInfo->kernelAllocation = memoryManager->allocateGraphicsMemoryWithProperties(MockAllocationProperties{pDevice->getRootDeviceIndex(), MemoryConstants::pageSize});
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// setup kernel arg offsets
|
|
|
|
KernelArgPatchInfo kernelArgPatchInfo;
|
|
|
|
|
|
|
|
pKernelInfo->kernelArgInfo.resize(3);
|
|
|
|
pKernelInfo->kernelArgInfo[2].kernelArgPatchInfoVector.push_back(kernelArgPatchInfo);
|
|
|
|
pKernelInfo->kernelArgInfo[1].kernelArgPatchInfoVector.push_back(kernelArgPatchInfo);
|
|
|
|
pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector.push_back(kernelArgPatchInfo);
|
|
|
|
|
|
|
|
pKernelInfo->kernelArgInfo[2].kernelArgPatchInfoVector[0].crossthreadOffset = 0x10;
|
|
|
|
pKernelInfo->kernelArgInfo[1].kernelArgPatchInfoVector[0].crossthreadOffset = 0x20;
|
|
|
|
pKernelInfo->kernelArgInfo[0].kernelArgPatchInfoVector[0].crossthreadOffset = 0x30;
|
|
|
|
|
2018-08-06 09:46:57 +02:00
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2019-07-04 12:17:42 +02:00
|
|
|
MockContext ctx;
|
|
|
|
program.setContext(&ctx);
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(&program, *pKernelInfo, *pClDevice));
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
pKernel->setCrossThreadData(pCrossThreadData, sizeof(pCrossThreadData));
|
|
|
|
|
2018-03-13 10:44:35 +01:00
|
|
|
EXPECT_EQ(0u, commandStreamReceiver.makeResidentAllocations.size());
|
2019-07-15 14:28:09 +02:00
|
|
|
pKernel->makeResident(pDevice->getGpgpuCommandStreamReceiver());
|
2018-03-13 10:44:35 +01:00
|
|
|
EXPECT_EQ(1u, commandStreamReceiver.makeResidentAllocations.size());
|
2018-10-18 10:08:39 +02:00
|
|
|
EXPECT_TRUE(commandStreamReceiver.isMadeResident(pKernel->getKernelInfo().getGraphicsAllocation()));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-07-04 17:14:51 +02:00
|
|
|
memoryManager->freeGraphicsMemory(pKernelInfo->kernelAllocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenMakeResidentIsCalledThenExportedFunctionsIsaAllocationIsMadeResident) {
|
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
|
|
|
auto &commandStreamReceiver = pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
commandStreamReceiver.storeMakeResidentAllocations = true;
|
|
|
|
|
|
|
|
auto memoryManager = commandStreamReceiver.getMemoryManager();
|
2020-03-25 15:15:03 +01:00
|
|
|
pKernelInfo->kernelAllocation = memoryManager->allocateGraphicsMemoryWithProperties(MockAllocationProperties{pDevice->getRootDeviceIndex(), MemoryConstants::pageSize});
|
2019-07-04 17:14:51 +02:00
|
|
|
|
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
|
|
|
auto exportedFunctionsSurface = std::make_unique<MockGraphicsAllocation>();
|
|
|
|
program.exportedFunctionsSurface = exportedFunctionsSurface.get();
|
2019-07-04 12:17:42 +02:00
|
|
|
MockContext ctx;
|
|
|
|
program.setContext(&ctx);
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(&program, *pKernelInfo, *pClDevice));
|
2019-07-04 17:14:51 +02:00
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, commandStreamReceiver.makeResidentAllocations.size());
|
2019-07-15 14:28:09 +02:00
|
|
|
pKernel->makeResident(pDevice->getGpgpuCommandStreamReceiver());
|
2019-07-04 17:14:51 +02:00
|
|
|
EXPECT_TRUE(commandStreamReceiver.isMadeResident(program.exportedFunctionsSurface));
|
|
|
|
|
|
|
|
// check getResidency as well
|
|
|
|
std::vector<NEO::Surface *> residencySurfaces;
|
|
|
|
pKernel->getResidency(residencySurfaces);
|
|
|
|
std::unique_ptr<NEO::ExecutionEnvironment> mockCsrExecEnv;
|
|
|
|
{
|
|
|
|
CommandStreamReceiverMock csrMock;
|
|
|
|
csrMock.passResidencyCallToBaseClass = false;
|
|
|
|
for (const auto &s : residencySurfaces) {
|
|
|
|
s->makeResident(csrMock);
|
|
|
|
delete s;
|
|
|
|
}
|
|
|
|
EXPECT_EQ(1U, csrMock.residency.count(exportedFunctionsSurface->getUnderlyingBuffer()));
|
|
|
|
mockCsrExecEnv = std::move(csrMock.mockExecutionEnvironment);
|
|
|
|
}
|
|
|
|
|
|
|
|
memoryManager->freeGraphicsMemory(pKernelInfo->kernelAllocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenMakeResidentIsCalledThenGlobalBufferIsMadeResident) {
|
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
|
|
|
auto &commandStreamReceiver = pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
commandStreamReceiver.storeMakeResidentAllocations = true;
|
|
|
|
|
|
|
|
auto memoryManager = commandStreamReceiver.getMemoryManager();
|
2020-03-25 15:15:03 +01:00
|
|
|
pKernelInfo->kernelAllocation = memoryManager->allocateGraphicsMemoryWithProperties(MockAllocationProperties{pDevice->getRootDeviceIndex(), MemoryConstants::pageSize});
|
2019-07-04 17:14:51 +02:00
|
|
|
|
|
|
|
MockProgram program(*pDevice->getExecutionEnvironment());
|
2019-07-04 12:17:42 +02:00
|
|
|
MockContext ctx;
|
|
|
|
program.setContext(&ctx);
|
2019-07-04 17:14:51 +02:00
|
|
|
program.globalSurface = new MockGraphicsAllocation();
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(&program, *pKernelInfo, *pClDevice));
|
2019-07-04 17:14:51 +02:00
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, commandStreamReceiver.makeResidentAllocations.size());
|
2019-07-15 14:28:09 +02:00
|
|
|
pKernel->makeResident(pDevice->getGpgpuCommandStreamReceiver());
|
2019-07-04 17:14:51 +02:00
|
|
|
EXPECT_TRUE(commandStreamReceiver.isMadeResident(program.globalSurface));
|
|
|
|
|
|
|
|
std::vector<NEO::Surface *> residencySurfaces;
|
|
|
|
pKernel->getResidency(residencySurfaces);
|
|
|
|
std::unique_ptr<NEO::ExecutionEnvironment> mockCsrExecEnv;
|
|
|
|
{
|
|
|
|
CommandStreamReceiverMock csrMock;
|
|
|
|
csrMock.passResidencyCallToBaseClass = false;
|
|
|
|
for (const auto &s : residencySurfaces) {
|
|
|
|
s->makeResident(csrMock);
|
|
|
|
delete s;
|
|
|
|
}
|
|
|
|
EXPECT_EQ(1U, csrMock.residency.count(program.globalSurface->getUnderlyingBuffer()));
|
|
|
|
mockCsrExecEnv = std::move(csrMock.mockExecutionEnvironment);
|
|
|
|
}
|
|
|
|
|
2018-03-13 10:44:35 +01:00
|
|
|
memoryManager->freeGraphicsMemory(pKernelInfo->kernelAllocation);
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2019-06-13 15:49:35 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenItUsesIndirectUnifiedMemoryDeviceAllocationThenTheyAreMadeResident) {
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-13 15:49:35 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto properties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::DEVICE_UNIFIED_MEMORY);
|
|
|
|
properties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, properties);
|
2019-06-13 15:49:35 +02:00
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
mockKernel.mockKernel->makeResident(this->pDevice->getGpgpuCommandStreamReceiver());
|
2019-06-13 15:49:35 +02:00
|
|
|
|
|
|
|
EXPECT_EQ(0u, commandStreamReceiver.getResidencyAllocations().size());
|
|
|
|
|
|
|
|
mockKernel.mockKernel->setUnifiedMemoryProperty(CL_KERNEL_EXEC_INFO_INDIRECT_DEVICE_ACCESS_INTEL, true);
|
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
mockKernel.mockKernel->makeResident(this->pDevice->getGpgpuCommandStreamReceiver());
|
2019-06-13 15:49:35 +02:00
|
|
|
|
|
|
|
EXPECT_EQ(1u, commandStreamReceiver.getResidencyAllocations().size());
|
|
|
|
|
|
|
|
EXPECT_EQ(commandStreamReceiver.getResidencyAllocations()[0]->getGpuAddress(), castToUint64(unifiedMemoryAllocation));
|
|
|
|
|
|
|
|
mockKernel.mockKernel->setUnifiedMemoryProperty(CL_KERNEL_EXEC_INFO_SVM_PTRS, true);
|
|
|
|
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
2019-06-17 13:31:23 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelUsingIndirectHostMemoryWhenMakeResidentIsCalledThenOnlyHostAllocationsAreMadeResident) {
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-17 13:31:23 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto deviceProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::DEVICE_UNIFIED_MEMORY);
|
|
|
|
deviceProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto hostProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::HOST_UNIFIED_MEMORY);
|
|
|
|
hostProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedDeviceMemoryAllocation = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, deviceProperties);
|
|
|
|
auto unifiedHostMemoryAllocation = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, hostProperties);
|
2019-06-17 13:31:23 +02:00
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
mockKernel.mockKernel->makeResident(this->pDevice->getGpgpuCommandStreamReceiver());
|
2019-06-17 13:31:23 +02:00
|
|
|
EXPECT_EQ(0u, commandStreamReceiver.getResidencyAllocations().size());
|
|
|
|
mockKernel.mockKernel->setUnifiedMemoryProperty(CL_KERNEL_EXEC_INFO_INDIRECT_HOST_ACCESS_INTEL, true);
|
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
mockKernel.mockKernel->makeResident(this->pDevice->getGpgpuCommandStreamReceiver());
|
2019-06-17 13:31:23 +02:00
|
|
|
EXPECT_EQ(1u, commandStreamReceiver.getResidencyAllocations().size());
|
|
|
|
EXPECT_EQ(commandStreamReceiver.getResidencyAllocations()[0]->getGpuAddress(), castToUint64(unifiedHostMemoryAllocation));
|
|
|
|
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedDeviceMemoryAllocation);
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedHostMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
2019-06-18 07:22:38 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelUsingIndirectSharedMemoryWhenMakeResidentIsCalledThenOnlySharedAllocationsAreMadeResident) {
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-18 07:22:38 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto sharedProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::SHARED_UNIFIED_MEMORY);
|
|
|
|
sharedProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto hostProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::HOST_UNIFIED_MEMORY);
|
|
|
|
hostProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedSharedMemoryAllocation = svmAllocationsManager->createSharedUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, sharedProperties, mockKernel.mockContext->getSpecialQueue());
|
|
|
|
auto unifiedHostMemoryAllocation = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, hostProperties);
|
2019-06-18 07:22:38 +02:00
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
mockKernel.mockKernel->makeResident(this->pDevice->getGpgpuCommandStreamReceiver());
|
2019-06-18 07:22:38 +02:00
|
|
|
EXPECT_EQ(0u, commandStreamReceiver.getResidencyAllocations().size());
|
|
|
|
mockKernel.mockKernel->setUnifiedMemoryProperty(CL_KERNEL_EXEC_INFO_INDIRECT_SHARED_ACCESS_INTEL, true);
|
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
mockKernel.mockKernel->makeResident(this->pDevice->getGpgpuCommandStreamReceiver());
|
2019-06-18 07:22:38 +02:00
|
|
|
EXPECT_EQ(1u, commandStreamReceiver.getResidencyAllocations().size());
|
|
|
|
EXPECT_EQ(commandStreamReceiver.getResidencyAllocations()[0]->getGpuAddress(), castToUint64(unifiedSharedMemoryAllocation));
|
|
|
|
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedSharedMemoryAllocation);
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedHostMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
2019-07-04 12:17:42 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenDeviceUnifiedMemoryAndPageFaultManagerWhenMakeResidentIsCalledThenAllocationIsNotDecommited) {
|
2019-09-10 07:30:27 +02:00
|
|
|
auto mockPageFaultManager = new MockPageFaultManager();
|
|
|
|
static_cast<MockMemoryManager *>(this->pDevice->getExecutionEnvironment()->memoryManager.get())->pageFaultManager.reset(mockPageFaultManager);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-07-04 12:17:42 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto deviceProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::DEVICE_UNIFIED_MEMORY);
|
|
|
|
deviceProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, deviceProperties);
|
2019-07-04 12:17:42 +02:00
|
|
|
auto unifiedMemoryGraphicsAllocation = svmAllocationsManager->getSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
2020-06-29 12:47:13 +02:00
|
|
|
mockKernel.mockKernel->setUnifiedMemoryExecInfo(unifiedMemoryGraphicsAllocation->gpuAllocations.getGraphicsAllocation(pDevice->getRootDeviceIndex()));
|
2019-07-04 12:17:42 +02:00
|
|
|
EXPECT_EQ(1u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
|
|
|
|
mockKernel.mockKernel->makeResident(commandStreamReceiver);
|
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->allowMemoryAccessCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectMemoryCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToGpuCalled, 0);
|
|
|
|
|
|
|
|
mockKernel.mockKernel->clearUnifiedMemoryExecInfo();
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelResidencyTest, givenSharedUnifiedMemoryAndPageFaultManagerWhenMakeResidentIsCalledThenAllocationIsDecommited) {
|
2019-09-10 07:30:27 +02:00
|
|
|
auto mockPageFaultManager = new MockPageFaultManager();
|
|
|
|
static_cast<MockMemoryManager *>(this->pDevice->getExecutionEnvironment()->memoryManager.get())->pageFaultManager.reset(mockPageFaultManager);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-07-04 12:17:42 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto sharedProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::SHARED_UNIFIED_MEMORY);
|
|
|
|
sharedProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createSharedUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, sharedProperties, mockKernel.mockContext->getSpecialQueue());
|
2019-07-04 12:17:42 +02:00
|
|
|
auto unifiedMemoryGraphicsAllocation = svmAllocationsManager->getSVMAlloc(unifiedMemoryAllocation);
|
2019-09-10 07:30:27 +02:00
|
|
|
mockPageFaultManager->insertAllocation(unifiedMemoryAllocation, 4096u, svmAllocationsManager, mockKernel.mockContext->getSpecialQueue());
|
2019-07-04 12:17:42 +02:00
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
2020-06-29 12:47:13 +02:00
|
|
|
mockKernel.mockKernel->setUnifiedMemoryExecInfo(unifiedMemoryGraphicsAllocation->gpuAllocations.getGraphicsAllocation(pDevice->getRootDeviceIndex()));
|
2019-07-04 12:17:42 +02:00
|
|
|
EXPECT_EQ(1u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
|
|
|
|
mockKernel.mockKernel->makeResident(commandStreamReceiver);
|
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->allowMemoryAccessCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectMemoryCalled, 1);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToGpuCalled, 1);
|
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectedMemoryAccessAddress, unifiedMemoryAllocation);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectedSize, 4096u);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToGpuAddress, unifiedMemoryAllocation);
|
|
|
|
|
|
|
|
mockKernel.mockKernel->clearUnifiedMemoryExecInfo();
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelResidencyTest, givenSharedUnifiedMemoryAndNotRequiredMemSyncWhenMakeResidentIsCalledThenAllocationIsNotDecommited) {
|
2019-09-10 07:30:27 +02:00
|
|
|
auto mockPageFaultManager = new MockPageFaultManager();
|
|
|
|
static_cast<MockMemoryManager *>(this->pDevice->getExecutionEnvironment()->memoryManager.get())->pageFaultManager.reset(mockPageFaultManager);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice, nullptr, true);
|
2019-07-04 12:17:42 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto sharedProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::SHARED_UNIFIED_MEMORY);
|
|
|
|
sharedProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createSharedUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, sharedProperties, mockKernel.mockContext->getSpecialQueue());
|
2019-07-04 12:17:42 +02:00
|
|
|
auto unifiedMemoryGraphicsAllocation = svmAllocationsManager->getSVMAlloc(unifiedMemoryAllocation);
|
2019-09-10 07:30:27 +02:00
|
|
|
mockPageFaultManager->insertAllocation(unifiedMemoryAllocation, 4096u, svmAllocationsManager, mockKernel.mockContext->getSpecialQueue());
|
2019-07-04 12:17:42 +02:00
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
2020-06-29 12:47:13 +02:00
|
|
|
auto gpuAllocation = unifiedMemoryGraphicsAllocation->gpuAllocations.getGraphicsAllocation(pDevice->getRootDeviceIndex());
|
|
|
|
mockKernel.mockKernel->kernelArguments[0] = {Kernel::kernelArgType::SVM_ALLOC_OBJ, gpuAllocation, unifiedMemoryAllocation, 4096u, gpuAllocation, sizeof(uintptr_t)};
|
2019-07-04 12:17:42 +02:00
|
|
|
mockKernel.mockKernel->setUnifiedMemorySyncRequirement(false);
|
|
|
|
|
|
|
|
mockKernel.mockKernel->makeResident(commandStreamReceiver);
|
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->allowMemoryAccessCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectMemoryCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToGpuCalled, 0);
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
2020-02-20 08:12:44 +01:00
|
|
|
HWTEST_F(KernelResidencyTest, givenSharedUnifiedMemoryRequiredMemSyncWhenMakeResidentIsCalledThenAllocationIsDecommited) {
|
|
|
|
auto mockPageFaultManager = new MockPageFaultManager();
|
|
|
|
static_cast<MockMemoryManager *>(this->pDevice->getExecutionEnvironment()->memoryManager.get())->pageFaultManager.reset(mockPageFaultManager);
|
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice, nullptr, true);
|
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto sharedProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::SHARED_UNIFIED_MEMORY);
|
|
|
|
sharedProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createSharedUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, sharedProperties, mockKernel.mockContext->getSpecialQueue());
|
2020-02-20 08:12:44 +01:00
|
|
|
auto unifiedMemoryGraphicsAllocation = svmAllocationsManager->getSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
mockPageFaultManager->insertAllocation(unifiedMemoryAllocation, 4096u, svmAllocationsManager, mockKernel.mockContext->getSpecialQueue());
|
|
|
|
|
2020-06-29 12:47:13 +02:00
|
|
|
auto gpuAllocation = unifiedMemoryGraphicsAllocation->gpuAllocations.getGraphicsAllocation(pDevice->getRootDeviceIndex());
|
2020-02-20 08:12:44 +01:00
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
2020-06-29 12:47:13 +02:00
|
|
|
mockKernel.mockKernel->kernelArguments[0] = {Kernel::kernelArgType::SVM_ALLOC_OBJ, gpuAllocation, unifiedMemoryAllocation, 4096u, gpuAllocation, sizeof(uintptr_t)};
|
2020-02-20 08:12:44 +01:00
|
|
|
mockKernel.mockKernel->setUnifiedMemorySyncRequirement(true);
|
|
|
|
|
|
|
|
mockKernel.mockKernel->makeResident(commandStreamReceiver);
|
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->allowMemoryAccessCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectMemoryCalled, 1);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToGpuCalled, 1);
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
2019-07-04 12:17:42 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenSharedUnifiedMemoryAllocPageFaultManagerAndIndirectAllocsAllowedWhenMakeResidentIsCalledThenAllocationIsDecommited) {
|
2019-09-10 07:30:27 +02:00
|
|
|
auto mockPageFaultManager = new MockPageFaultManager();
|
|
|
|
static_cast<MockMemoryManager *>(this->pDevice->getExecutionEnvironment()->memoryManager.get())->pageFaultManager.reset(mockPageFaultManager);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-07-04 12:17:42 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto sharedProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::SHARED_UNIFIED_MEMORY);
|
|
|
|
sharedProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createSharedUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, sharedProperties, mockKernel.mockContext->getSpecialQueue());
|
2019-09-10 07:30:27 +02:00
|
|
|
mockPageFaultManager->insertAllocation(unifiedMemoryAllocation, 4096u, svmAllocationsManager, mockKernel.mockContext->getSpecialQueue());
|
2019-07-04 12:17:42 +02:00
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
|
|
|
mockKernel.mockKernel->unifiedMemoryControls.indirectSharedAllocationsAllowed = true;
|
|
|
|
|
|
|
|
mockKernel.mockKernel->makeResident(commandStreamReceiver);
|
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->allowMemoryAccessCalled, 0);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectMemoryCalled, 1);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToCpuCalled, 1);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToGpuCalled, 1);
|
|
|
|
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectedMemoryAccessAddress, unifiedMemoryAllocation);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->protectedSize, 4096u);
|
|
|
|
EXPECT_EQ(mockPageFaultManager->transferToGpuAddress, unifiedMemoryAllocation);
|
|
|
|
|
|
|
|
mockKernel.mockKernel->clearUnifiedMemoryExecInfo();
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
2019-06-14 12:48:40 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenSetKernelExecInfoWithUnifiedMemoryIsCalledThenAllocationIsStoredWithinKernel) {
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-14 12:48:40 +02:00
|
|
|
auto &commandStreamReceiver = this->pDevice->getUltCommandStreamReceiver<FamilyType>();
|
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto deviceProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::DEVICE_UNIFIED_MEMORY);
|
|
|
|
deviceProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, deviceProperties);
|
2019-06-14 12:48:40 +02:00
|
|
|
auto unifiedMemoryGraphicsAllocation = svmAllocationsManager->getSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
|
2020-06-29 12:47:13 +02:00
|
|
|
mockKernel.mockKernel->setUnifiedMemoryExecInfo(unifiedMemoryGraphicsAllocation->gpuAllocations.getGraphicsAllocation(pDevice->getRootDeviceIndex()));
|
2019-06-14 12:48:40 +02:00
|
|
|
|
|
|
|
EXPECT_EQ(1u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
EXPECT_EQ(mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations[0]->getGpuAddress(), castToUint64(unifiedMemoryAllocation));
|
|
|
|
|
2019-07-15 14:28:09 +02:00
|
|
|
mockKernel.mockKernel->makeResident(this->pDevice->getGpgpuCommandStreamReceiver());
|
2019-06-14 12:48:40 +02:00
|
|
|
EXPECT_EQ(1u, commandStreamReceiver.getResidencyAllocations().size());
|
|
|
|
EXPECT_EQ(commandStreamReceiver.getResidencyAllocations()[0]->getGpuAddress(), castToUint64(unifiedMemoryAllocation));
|
|
|
|
|
|
|
|
mockKernel.mockKernel->clearUnifiedMemoryExecInfo();
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
}
|
|
|
|
|
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenclSetKernelExecInfoWithUnifiedMemoryIsCalledThenAllocationIsStoredWithinKernel) {
|
2020-03-24 12:17:13 +01:00
|
|
|
REQUIRE_SVM_OR_SKIP(pClDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-14 12:48:40 +02:00
|
|
|
|
|
|
|
auto svmAllocationsManager = mockKernel.mockContext->getSVMAllocsManager();
|
2020-06-04 10:54:43 +02:00
|
|
|
auto deviceProperties = SVMAllocsManager::UnifiedMemoryProperties(InternalMemoryType::DEVICE_UNIFIED_MEMORY);
|
|
|
|
deviceProperties.subdeviceBitfield = pDevice->getDeviceBitfield();
|
|
|
|
auto unifiedMemoryAllocation = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, deviceProperties);
|
2019-06-14 12:48:40 +02:00
|
|
|
|
2020-06-04 10:54:43 +02:00
|
|
|
auto unifiedMemoryAllocation2 = svmAllocationsManager->createUnifiedMemoryAllocation(pDevice->getRootDeviceIndex(), 4096u, deviceProperties);
|
2019-06-14 12:48:40 +02:00
|
|
|
|
|
|
|
auto status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_USM_PTRS_INTEL, sizeof(unifiedMemoryAllocation), &unifiedMemoryAllocation);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
|
|
|
|
EXPECT_EQ(1u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
EXPECT_EQ(mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations[0]->getGpuAddress(), castToUint64(unifiedMemoryAllocation));
|
|
|
|
|
|
|
|
status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_USM_PTRS_INTEL, sizeof(unifiedMemoryAllocation), &unifiedMemoryAllocation2);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
EXPECT_EQ(1u, mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations.size());
|
|
|
|
EXPECT_EQ(mockKernel.mockKernel->kernelUnifiedMemoryGfxAllocations[0]->getGpuAddress(), castToUint64(unifiedMemoryAllocation2));
|
|
|
|
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation);
|
|
|
|
svmAllocationsManager->freeSVMAlloc(unifiedMemoryAllocation2);
|
|
|
|
}
|
2019-06-18 07:22:38 +02:00
|
|
|
|
2019-06-14 12:48:40 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenclSetKernelExecInfoWithUnifiedMemoryDevicePropertyIsCalledThenKernelControlIsChanged) {
|
2020-03-24 12:17:13 +01:00
|
|
|
REQUIRE_SVM_OR_SKIP(pClDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-14 12:48:40 +02:00
|
|
|
cl_bool enableIndirectDeviceAccess = CL_TRUE;
|
|
|
|
auto status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_INDIRECT_DEVICE_ACCESS_INTEL, sizeof(cl_bool), &enableIndirectDeviceAccess);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
EXPECT_TRUE(mockKernel.mockKernel->unifiedMemoryControls.indirectDeviceAllocationsAllowed);
|
|
|
|
enableIndirectDeviceAccess = CL_FALSE;
|
|
|
|
status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_INDIRECT_DEVICE_ACCESS_INTEL, sizeof(cl_bool), &enableIndirectDeviceAccess);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
EXPECT_FALSE(mockKernel.mockKernel->unifiedMemoryControls.indirectDeviceAllocationsAllowed);
|
|
|
|
}
|
|
|
|
|
2019-06-17 13:31:23 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenclSetKernelExecInfoWithUnifiedMemoryHostPropertyIsCalledThenKernelControlIsChanged) {
|
2020-03-24 12:17:13 +01:00
|
|
|
REQUIRE_SVM_OR_SKIP(pClDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-17 13:31:23 +02:00
|
|
|
cl_bool enableIndirectHostAccess = CL_TRUE;
|
|
|
|
auto status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_INDIRECT_HOST_ACCESS_INTEL, sizeof(cl_bool), &enableIndirectHostAccess);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
EXPECT_TRUE(mockKernel.mockKernel->unifiedMemoryControls.indirectHostAllocationsAllowed);
|
|
|
|
enableIndirectHostAccess = CL_FALSE;
|
|
|
|
status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_INDIRECT_HOST_ACCESS_INTEL, sizeof(cl_bool), &enableIndirectHostAccess);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
EXPECT_FALSE(mockKernel.mockKernel->unifiedMemoryControls.indirectHostAllocationsAllowed);
|
|
|
|
}
|
|
|
|
|
2019-06-18 07:22:38 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, givenKernelWhenclSetKernelExecInfoWithUnifiedMemorySharedPropertyIsCalledThenKernelControlIsChanged) {
|
2020-03-24 12:17:13 +01:00
|
|
|
REQUIRE_SVM_OR_SKIP(pClDevice);
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals mockKernel(*this->pClDevice);
|
2019-06-18 07:22:38 +02:00
|
|
|
cl_bool enableIndirectSharedAccess = CL_TRUE;
|
|
|
|
auto status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_INDIRECT_SHARED_ACCESS_INTEL, sizeof(cl_bool), &enableIndirectSharedAccess);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
EXPECT_TRUE(mockKernel.mockKernel->unifiedMemoryControls.indirectSharedAllocationsAllowed);
|
|
|
|
enableIndirectSharedAccess = CL_FALSE;
|
|
|
|
status = clSetKernelExecInfo(mockKernel.mockKernel, CL_KERNEL_EXEC_INFO_INDIRECT_SHARED_ACCESS_INTEL, sizeof(cl_bool), &enableIndirectSharedAccess);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, status);
|
|
|
|
EXPECT_FALSE(mockKernel.mockKernel->unifiedMemoryControls.indirectSharedAllocationsAllowed);
|
|
|
|
}
|
|
|
|
|
2018-08-07 09:49:47 +02:00
|
|
|
TEST(KernelImageDetectionTests, givenKernelWithImagesOnlyWhenItIsAskedIfItHasImagesOnlyThenTrueIsReturned) {
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-08-07 09:49:47 +02:00
|
|
|
pKernelInfo->kernelArgInfo.resize(3);
|
|
|
|
pKernelInfo->kernelArgInfo[2].isImage = true;
|
|
|
|
pKernelInfo->kernelArgInfo[1].isMediaBlockImage = true;
|
|
|
|
pKernelInfo->kernelArgInfo[0].isMediaImage = true;
|
|
|
|
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-01-28 15:27:15 +01:00
|
|
|
auto context = clUniquePtr(new MockContext(device.get()));
|
2020-02-20 08:12:44 +01:00
|
|
|
auto program = clUniquePtr(new MockProgram(*device->getExecutionEnvironment(), context.get(), false, &device->getDevice()));
|
2019-01-28 15:27:15 +01:00
|
|
|
auto kernel = clUniquePtr(new MockKernel(program.get(), *pKernelInfo, *device));
|
2018-08-07 09:49:47 +02:00
|
|
|
EXPECT_FALSE(kernel->usesOnlyImages());
|
|
|
|
kernel->initialize();
|
|
|
|
EXPECT_TRUE(kernel->usesOnlyImages());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelImageDetectionTests, givenKernelWithImagesAndBuffersWhenItIsAskedIfItHasImagesOnlyThenFalseIsReturned) {
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-08-07 09:49:47 +02:00
|
|
|
pKernelInfo->kernelArgInfo.resize(3);
|
|
|
|
pKernelInfo->kernelArgInfo[2].isImage = true;
|
|
|
|
pKernelInfo->kernelArgInfo[1].isBuffer = true;
|
|
|
|
pKernelInfo->kernelArgInfo[0].isMediaImage = true;
|
|
|
|
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-01-28 15:27:15 +01:00
|
|
|
auto context = clUniquePtr(new MockContext(device.get()));
|
2020-02-20 08:12:44 +01:00
|
|
|
auto program = clUniquePtr(new MockProgram(*device->getExecutionEnvironment(), context.get(), false, &device->getDevice()));
|
2019-01-28 15:27:15 +01:00
|
|
|
auto kernel = clUniquePtr(new MockKernel(program.get(), *pKernelInfo, *device));
|
2018-08-07 09:49:47 +02:00
|
|
|
EXPECT_FALSE(kernel->usesOnlyImages());
|
|
|
|
kernel->initialize();
|
|
|
|
EXPECT_FALSE(kernel->usesOnlyImages());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelImageDetectionTests, givenKernelWithNoImagesWhenItIsAskedIfItHasImagesOnlyThenFalseIsReturned) {
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-08-07 09:49:47 +02:00
|
|
|
pKernelInfo->kernelArgInfo.resize(1);
|
|
|
|
pKernelInfo->kernelArgInfo[0].isBuffer = true;
|
|
|
|
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-01-28 15:27:15 +01:00
|
|
|
auto context = clUniquePtr(new MockContext(device.get()));
|
2020-02-20 08:12:44 +01:00
|
|
|
auto program = clUniquePtr(new MockProgram(*device->getExecutionEnvironment(), context.get(), false, &device->getDevice()));
|
2019-01-28 15:27:15 +01:00
|
|
|
auto kernel = clUniquePtr(new MockKernel(program.get(), *pKernelInfo, *device));
|
2018-08-07 09:49:47 +02:00
|
|
|
EXPECT_FALSE(kernel->usesOnlyImages());
|
|
|
|
kernel->initialize();
|
|
|
|
EXPECT_FALSE(kernel->usesOnlyImages());
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
HWTEST_F(KernelResidencyTest, WhenMakingArgsResidentThenImageFromImageCheckIsCorrect) {
|
2018-01-10 14:05:34 +01:00
|
|
|
ASSERT_NE(nullptr, pDevice);
|
|
|
|
|
|
|
|
//create NV12 image
|
|
|
|
cl_mem_flags flags = CL_MEM_READ_ONLY | CL_MEM_HOST_NO_ACCESS;
|
|
|
|
cl_image_format imageFormat;
|
|
|
|
imageFormat.image_channel_data_type = CL_UNORM_INT8;
|
|
|
|
imageFormat.image_channel_order = CL_NV12_INTEL;
|
2020-06-05 00:16:55 +02:00
|
|
|
auto surfaceFormat = Image::getSurfaceFormatFromTable(
|
|
|
|
flags, &imageFormat, pClDevice->getHardwareInfo().capabilityTable.supportsOcl21Features);
|
2018-01-10 14:05:34 +01:00
|
|
|
|
|
|
|
cl_image_desc imageDesc = {};
|
|
|
|
imageDesc.image_type = CL_MEM_OBJECT_IMAGE2D;
|
|
|
|
imageDesc.image_width = 16;
|
|
|
|
imageDesc.image_height = 16;
|
|
|
|
imageDesc.image_depth = 1;
|
|
|
|
|
|
|
|
cl_int retVal;
|
|
|
|
MockContext context;
|
2020-06-05 00:16:55 +02:00
|
|
|
std::unique_ptr<NEO::Image> imageNV12(
|
|
|
|
Image::create(&context, MemoryPropertiesHelper::createMemoryProperties(flags, 0, 0, &context.getDevice(0)->getDevice()),
|
|
|
|
flags, 0, surfaceFormat, &imageDesc, nullptr, retVal));
|
2018-03-28 18:06:51 +02:00
|
|
|
EXPECT_EQ(imageNV12->getMediaPlaneType(), 0u);
|
2018-01-10 14:05:34 +01:00
|
|
|
|
|
|
|
//create Y plane
|
|
|
|
imageFormat.image_channel_order = CL_R;
|
|
|
|
flags = CL_MEM_READ_ONLY;
|
2020-06-05 00:16:55 +02:00
|
|
|
surfaceFormat = Image::getSurfaceFormatFromTable(
|
|
|
|
flags, &imageFormat, context.getDevice(0)->getHardwareInfo().capabilityTable.supportsOcl21Features);
|
2018-01-10 14:05:34 +01:00
|
|
|
|
|
|
|
imageDesc.image_width = 0;
|
|
|
|
imageDesc.image_height = 0;
|
|
|
|
imageDesc.image_depth = 0;
|
|
|
|
imageDesc.mem_object = imageNV12.get();
|
|
|
|
|
2020-06-05 00:16:55 +02:00
|
|
|
std::unique_ptr<NEO::Image> imageY(
|
|
|
|
Image::create(&context, MemoryPropertiesHelper::createMemoryProperties(flags, 0, 0, &context.getDevice(0)->getDevice()),
|
|
|
|
flags, 0, surfaceFormat, &imageDesc, nullptr, retVal));
|
2018-03-28 18:06:51 +02:00
|
|
|
EXPECT_EQ(imageY->getMediaPlaneType(), 0u);
|
|
|
|
|
2018-08-16 14:28:58 +02:00
|
|
|
auto pKernelInfo = std::make_unique<KernelInfo>();
|
2018-01-10 14:05:34 +01:00
|
|
|
KernelArgInfo kernelArgInfo;
|
|
|
|
kernelArgInfo.isImage = true;
|
|
|
|
|
2020-01-11 18:25:26 +01:00
|
|
|
pKernelInfo->kernelArgInfo.push_back(std::move(kernelArgInfo));
|
2018-01-10 14:05:34 +01:00
|
|
|
|
2018-08-06 09:46:57 +02:00
|
|
|
auto program = std::make_unique<MockProgram>(*pDevice->getExecutionEnvironment());
|
2019-07-04 12:17:42 +02:00
|
|
|
program->setContext(&context);
|
2020-01-14 14:32:11 +01:00
|
|
|
std::unique_ptr<MockKernel> pKernel(new MockKernel(program.get(), *pKernelInfo, *pClDevice));
|
2018-01-10 14:05:34 +01:00
|
|
|
|
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
pKernel->storeKernelArg(0, Kernel::IMAGE_OBJ, (cl_mem)imageY.get(), NULL, 0);
|
2019-07-15 14:28:09 +02:00
|
|
|
pKernel->makeResident(pDevice->getGpgpuCommandStreamReceiver());
|
2018-01-10 14:05:34 +01:00
|
|
|
|
|
|
|
EXPECT_FALSE(imageNV12->isImageFromImage());
|
|
|
|
EXPECT_TRUE(imageY->isImageFromImage());
|
|
|
|
|
|
|
|
auto &commandStreamReceiver = pDevice->getUltCommandStreamReceiver<FamilyType>();
|
2018-11-16 09:11:52 +01:00
|
|
|
EXPECT_EQ(CommandStreamReceiver::SamplerCacheFlushState::samplerCacheFlushBefore, commandStreamReceiver.samplerCacheFlushRequired);
|
2018-01-10 14:05:34 +01:00
|
|
|
}
|
|
|
|
|
2020-05-28 14:05:12 +02:00
|
|
|
struct KernelExecutionEnvironmentTest : public Test<ClDeviceFixture> {
|
2017-12-21 00:45:38 +01:00
|
|
|
void SetUp() override {
|
2020-05-28 14:05:12 +02:00
|
|
|
ClDeviceFixture::SetUp();
|
2018-08-16 14:28:58 +02:00
|
|
|
|
2018-08-06 09:46:57 +02:00
|
|
|
program = std::make_unique<MockProgram>(*pDevice->getExecutionEnvironment());
|
2018-08-16 14:28:58 +02:00
|
|
|
pKernelInfo = std::make_unique<KernelInfo>();
|
2020-02-13 09:38:54 +01:00
|
|
|
executionEnvironment.CompiledSIMD32 = 1;
|
2017-12-21 00:45:38 +01:00
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &executionEnvironment;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
pKernel = new MockKernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, pKernel->initialize());
|
|
|
|
}
|
|
|
|
|
|
|
|
void TearDown() override {
|
|
|
|
delete pKernel;
|
2018-08-16 14:28:58 +02:00
|
|
|
|
2020-05-28 14:05:12 +02:00
|
|
|
ClDeviceFixture::TearDown();
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
MockKernel *pKernel;
|
2018-08-09 11:34:50 +02:00
|
|
|
std::unique_ptr<MockProgram> program;
|
2018-08-16 14:28:58 +02:00
|
|
|
std::unique_ptr<KernelInfo> pKernelInfo;
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment executionEnvironment = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
};
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenCompiledSimd32TrueWhenGettingMaxSimdSizeThen32IsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
executionEnvironment.CompiledSIMD32 = true;
|
|
|
|
executionEnvironment.CompiledSIMD16 = true;
|
|
|
|
executionEnvironment.CompiledSIMD8 = true;
|
|
|
|
|
|
|
|
EXPECT_EQ(32u, this->pKernelInfo->getMaxSimdSize());
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenCompiledSimd32FalseAndCompiledSimd16TrueWhenGettingMaxSimdSizeThen16IsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
executionEnvironment.CompiledSIMD32 = false;
|
|
|
|
executionEnvironment.CompiledSIMD16 = true;
|
|
|
|
executionEnvironment.CompiledSIMD8 = true;
|
|
|
|
|
|
|
|
EXPECT_EQ(16u, this->pKernelInfo->getMaxSimdSize());
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenOnlyCompiledSimd8TrueWhenGettingMaxSimdSizeThen8IsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
executionEnvironment.CompiledSIMD32 = false;
|
|
|
|
executionEnvironment.CompiledSIMD16 = false;
|
|
|
|
executionEnvironment.CompiledSIMD8 = true;
|
|
|
|
|
|
|
|
EXPECT_EQ(8u, this->pKernelInfo->getMaxSimdSize());
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenAllCompiledSimdFalseWhenGettingMaxSimdSizeThen8IsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
executionEnvironment.CompiledSIMD32 = false;
|
|
|
|
executionEnvironment.CompiledSIMD16 = false;
|
|
|
|
executionEnvironment.CompiledSIMD8 = false;
|
|
|
|
|
|
|
|
EXPECT_EQ(8u, this->pKernelInfo->getMaxSimdSize());
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenExecutionEnvironmentNotAvailableWhenGettingMaxSimdSizeThen1IsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
executionEnvironment.CompiledSIMD32 = false;
|
|
|
|
executionEnvironment.CompiledSIMD16 = false;
|
|
|
|
executionEnvironment.CompiledSIMD8 = false;
|
|
|
|
|
|
|
|
auto oldExcEnv = this->pKernelInfo->patchInfo.executionEnvironment;
|
|
|
|
|
|
|
|
this->pKernelInfo->patchInfo.executionEnvironment = nullptr;
|
|
|
|
EXPECT_EQ(1U, this->pKernelInfo->getMaxSimdSize());
|
|
|
|
this->pKernelInfo->patchInfo.executionEnvironment = oldExcEnv;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenLargestCompiledSimdSizeEqualOneWhenGettingMaxSimdSizeThen1IsReturned) {
|
2019-10-23 09:36:37 +02:00
|
|
|
|
|
|
|
executionEnvironment.LargestCompiledSIMDSize = 1;
|
|
|
|
|
|
|
|
auto oldExcEnv = this->pKernelInfo->patchInfo.executionEnvironment;
|
|
|
|
|
|
|
|
EXPECT_EQ(1U, this->pKernelInfo->getMaxSimdSize());
|
|
|
|
this->pKernelInfo->patchInfo.executionEnvironment = oldExcEnv;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenCompiledWorkGroupSizeIsZeroWhenGettingMaxRequiredWorkGroupSizeThenMaxWorkGroupSizeIsCorrect) {
|
2020-03-05 18:13:32 +01:00
|
|
|
auto maxWorkGroupSize = static_cast<size_t>(pDevice->getDeviceInfo().maxWorkGroupSize);
|
2017-12-21 00:45:38 +01:00
|
|
|
auto oldRequiredWorkGroupSizeX = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeX;
|
|
|
|
auto oldRequiredWorkGroupSizeY = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeY;
|
|
|
|
auto oldRequiredWorkGroupSizeZ = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeZ;
|
|
|
|
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeX = 0;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeY = 0;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeZ = 0;
|
|
|
|
|
|
|
|
EXPECT_EQ(maxWorkGroupSize, this->pKernelInfo->getMaxRequiredWorkGroupSize(maxWorkGroupSize));
|
|
|
|
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeX = oldRequiredWorkGroupSizeX;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeY = oldRequiredWorkGroupSizeY;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeZ = oldRequiredWorkGroupSizeZ;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenCompiledWorkGroupSizeLowerThanMaxWorkGroupSizeWhenGettingMaxRequiredWorkGroupSizeThenMaxWorkGroupSizeIsCorrect) {
|
2020-03-05 18:13:32 +01:00
|
|
|
auto maxWorkGroupSize = static_cast<size_t>(pDevice->getDeviceInfo().maxWorkGroupSize);
|
2017-12-21 00:45:38 +01:00
|
|
|
auto oldRequiredWorkGroupSizeX = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeX;
|
|
|
|
auto oldRequiredWorkGroupSizeY = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeY;
|
|
|
|
auto oldRequiredWorkGroupSizeZ = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeZ;
|
|
|
|
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeX = static_cast<uint32_t>(maxWorkGroupSize / 2);
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeY = 1;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeZ = 1;
|
|
|
|
|
|
|
|
EXPECT_EQ(maxWorkGroupSize / 2, this->pKernelInfo->getMaxRequiredWorkGroupSize(maxWorkGroupSize));
|
|
|
|
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeX = oldRequiredWorkGroupSizeX;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeY = oldRequiredWorkGroupSizeY;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeZ = oldRequiredWorkGroupSizeZ;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelExecutionEnvironmentTest, GivenCompiledWorkGroupSizeIsGreaterThanMaxWorkGroupSizeWhenGettingMaxRequiredWorkGroupSizeThenMaxWorkGroupSizeIsCorrect) {
|
2020-03-05 18:13:32 +01:00
|
|
|
auto maxWorkGroupSize = static_cast<size_t>(pDevice->getDeviceInfo().maxWorkGroupSize);
|
2017-12-21 00:45:38 +01:00
|
|
|
auto oldRequiredWorkGroupSizeX = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeX;
|
|
|
|
auto oldRequiredWorkGroupSizeY = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeY;
|
|
|
|
auto oldRequiredWorkGroupSizeZ = this->pKernelInfo->patchInfo.executionEnvironment->RequiredWorkGroupSizeZ;
|
|
|
|
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeX = static_cast<uint32_t>(maxWorkGroupSize);
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeY = static_cast<uint32_t>(maxWorkGroupSize);
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeZ = static_cast<uint32_t>(maxWorkGroupSize);
|
|
|
|
|
|
|
|
EXPECT_EQ(maxWorkGroupSize, this->pKernelInfo->getMaxRequiredWorkGroupSize(maxWorkGroupSize));
|
|
|
|
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeX = oldRequiredWorkGroupSizeX;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeY = oldRequiredWorkGroupSizeY;
|
|
|
|
const_cast<SPatchExecutionEnvironment *>(this->pKernelInfo->patchInfo.executionEnvironment)->RequiredWorkGroupSizeZ = oldRequiredWorkGroupSizeZ;
|
|
|
|
}
|
|
|
|
|
2020-05-28 14:05:12 +02:00
|
|
|
struct KernelCrossThreadTests : Test<ClDeviceFixture> {
|
2017-12-21 00:45:38 +01:00
|
|
|
KernelCrossThreadTests() {
|
|
|
|
}
|
|
|
|
|
|
|
|
void SetUp() override {
|
2020-05-28 14:05:12 +02:00
|
|
|
ClDeviceFixture::SetUp();
|
2018-08-06 09:46:57 +02:00
|
|
|
program = std::make_unique<MockProgram>(*pDevice->getExecutionEnvironment());
|
2017-12-21 00:45:38 +01:00
|
|
|
patchDataParameterStream.DataParameterStreamSize = 64 * sizeof(uint8_t);
|
|
|
|
|
2018-08-16 14:28:58 +02:00
|
|
|
pKernelInfo = std::make_unique<KernelInfo>();
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_NE(nullptr, pKernelInfo);
|
|
|
|
pKernelInfo->patchInfo.dataParameterStream = &patchDataParameterStream;
|
2020-02-13 09:38:54 +01:00
|
|
|
executionEnvironment.CompiledSIMD32 = 1;
|
2017-12-21 00:45:38 +01:00
|
|
|
pKernelInfo->patchInfo.executionEnvironment = &executionEnvironment;
|
|
|
|
}
|
|
|
|
|
|
|
|
void TearDown() override {
|
2018-08-16 14:28:58 +02:00
|
|
|
|
2020-05-28 14:05:12 +02:00
|
|
|
ClDeviceFixture::TearDown();
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2018-08-09 11:34:50 +02:00
|
|
|
std::unique_ptr<MockProgram> program;
|
2018-08-16 14:28:58 +02:00
|
|
|
std::unique_ptr<KernelInfo> pKernelInfo;
|
2017-12-21 00:45:38 +01:00
|
|
|
SPatchDataParameterStream patchDataParameterStream;
|
2018-10-05 11:28:50 +02:00
|
|
|
SPatchExecutionEnvironment executionEnvironment = {};
|
2017-12-21 00:45:38 +01:00
|
|
|
};
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenGlobalWorkOffsetIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.globalWorkOffsetOffsets[1] = 4;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.globalWorkOffsetX);
|
|
|
|
EXPECT_NE(nullptr, kernel.globalWorkOffsetY);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.globalWorkOffsetY);
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.globalWorkOffsetZ);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenLocalWorkSizeIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.localWorkSizeOffsets[0] = 0xc;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(nullptr, kernel.localWorkSizeX);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.localWorkSizeX);
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.localWorkSizeY);
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.localWorkSizeZ);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenLocalWorkSize2IsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.localWorkSizeOffsets2[1] = 0xd;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.localWorkSizeX2);
|
|
|
|
EXPECT_NE(nullptr, kernel.localWorkSizeY2);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.localWorkSizeY2);
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.localWorkSizeZ2);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenGlobalWorkSizeIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.globalWorkSizeOffsets[2] = 8;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.globalWorkSizeX);
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.globalWorkSizeY);
|
|
|
|
EXPECT_NE(nullptr, kernel.globalWorkSizeZ);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.globalWorkSizeZ);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenLocalWorkDimIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.workDimOffset = 12;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(nullptr, kernel.workDim);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.workDim);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenNumWorkGroupsIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.numWorkGroupsOffset[0] = 0 * sizeof(uint32_t);
|
|
|
|
pKernelInfo->workloadInfo.numWorkGroupsOffset[1] = 1 * sizeof(uint32_t);
|
|
|
|
pKernelInfo->workloadInfo.numWorkGroupsOffset[2] = 2 * sizeof(uint32_t);
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(nullptr, kernel.numWorkGroupsX);
|
|
|
|
EXPECT_NE(nullptr, kernel.numWorkGroupsY);
|
|
|
|
EXPECT_NE(nullptr, kernel.numWorkGroupsZ);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.numWorkGroupsX);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.numWorkGroupsY);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.numWorkGroupsZ);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenEnqueuedLocalWorkSizeIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.enqueuedLocalWorkSizeOffsets[0] = 0;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(nullptr, kernel.enqueuedLocalWorkSizeX);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.enqueuedLocalWorkSizeX);
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.enqueuedLocalWorkSizeY);
|
|
|
|
EXPECT_EQ(&Kernel::dummyPatchLocation, kernel.enqueuedLocalWorkSizeZ);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenEnqueuedMaxWorkGroupSizeIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.maxWorkGroupSizeOffset = 12;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
2019-09-13 14:09:49 +02:00
|
|
|
EXPECT_NE(nullptr, kernel.maxWorkGroupSizeForCrossThreadData);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.maxWorkGroupSizeForCrossThreadData);
|
|
|
|
EXPECT_EQ(static_cast<void *>(kernel.getCrossThreadData() + pKernelInfo->workloadInfo.maxWorkGroupSizeOffset), static_cast<void *>(kernel.maxWorkGroupSizeForCrossThreadData));
|
|
|
|
EXPECT_EQ(pDevice->getDeviceInfo().maxWorkGroupSize, *kernel.maxWorkGroupSizeForCrossThreadData);
|
|
|
|
EXPECT_EQ(pDevice->getDeviceInfo().maxWorkGroupSize, kernel.maxKernelWorkGroupSize);
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenKernelIsInitializedThenDataParameterSimdSizeIsCorrect) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.simdSizeOffset = 16;
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
executionEnvironment.CompiledSIMD32 = false;
|
2020-02-13 09:38:54 +01:00
|
|
|
executionEnvironment.CompiledSIMD16 = true;
|
2017-12-21 00:45:38 +01:00
|
|
|
executionEnvironment.CompiledSIMD8 = true;
|
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(nullptr, kernel.dataParameterSimdSize);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.dataParameterSimdSize);
|
|
|
|
EXPECT_EQ(static_cast<void *>(kernel.getCrossThreadData() + pKernelInfo->workloadInfo.simdSizeOffset), static_cast<void *>(kernel.dataParameterSimdSize));
|
|
|
|
EXPECT_EQ_VAL(pKernelInfo->getMaxSimdSize(), *kernel.dataParameterSimdSize);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, GivenParentEventOffsetWhenKernelIsInitializedThenParentEventIsInitiatedWithInvalid) {
|
2017-12-21 00:45:38 +01:00
|
|
|
pKernelInfo->workloadInfo.parentEventOffset = 16;
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel kernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
ASSERT_EQ(CL_SUCCESS, kernel.initialize());
|
|
|
|
|
|
|
|
EXPECT_NE(nullptr, kernel.parentEventOffset);
|
|
|
|
EXPECT_NE(&Kernel::dummyPatchLocation, kernel.parentEventOffset);
|
|
|
|
EXPECT_EQ(static_cast<void *>(kernel.getCrossThreadData() + pKernelInfo->workloadInfo.parentEventOffset), static_cast<void *>(kernel.parentEventOffset));
|
|
|
|
EXPECT_EQ(WorkloadInfo::invalidParentEvent, *kernel.parentEventOffset);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenAddingKernelThenProgramRefCountIsIncremented) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2018-08-09 11:34:50 +02:00
|
|
|
auto refCount = program->getReference();
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *kernel = new MockKernel(program.get(), *pKernelInfo, *pClDevice);
|
2018-08-09 11:34:50 +02:00
|
|
|
auto refCount2 = program->getReference();
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_EQ(refCount2, refCount + 1);
|
|
|
|
|
|
|
|
delete kernel;
|
2018-08-09 11:34:50 +02:00
|
|
|
auto refCount3 = program->getReference();
|
2017-12-21 00:45:38 +01:00
|
|
|
EXPECT_EQ(refCount, refCount3);
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, GivenSlmStatisSizeWhenCreatingKernelThenSlmTotalSizeIsSet) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
pKernelInfo->workloadInfo.slmStaticSize = 1024;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *kernel = new MockKernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
EXPECT_EQ(1024u, kernel->slmTotalSize);
|
|
|
|
|
|
|
|
delete kernel;
|
|
|
|
}
|
|
|
|
TEST_F(KernelCrossThreadTests, givenKernelWithPrivateMemoryWhenItIsCreatedThenCurbeIsPatchedProperly) {
|
|
|
|
|
|
|
|
SPatchAllocateStatelessPrivateSurface allocatePrivate;
|
|
|
|
allocatePrivate.DataParamSize = 8;
|
|
|
|
allocatePrivate.DataParamOffset = 0;
|
|
|
|
allocatePrivate.PerThreadPrivateMemorySize = 1;
|
|
|
|
pKernelInfo->patchInfo.pAllocateStatelessPrivateSurface = &allocatePrivate;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *kernel = new MockKernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
kernel->initialize();
|
|
|
|
|
|
|
|
auto privateSurface = kernel->getPrivateSurface();
|
|
|
|
|
|
|
|
auto constantBuffer = kernel->getCrossThreadData();
|
|
|
|
auto privateAddress = (uintptr_t)privateSurface->getGpuAddressToPatch();
|
|
|
|
auto ptrCurbe = (uint64_t *)constantBuffer;
|
|
|
|
auto privateAddressFromCurbe = (uintptr_t)*ptrCurbe;
|
|
|
|
|
|
|
|
EXPECT_EQ(privateAddressFromCurbe, privateAddress);
|
|
|
|
|
|
|
|
delete kernel;
|
|
|
|
}
|
|
|
|
|
2019-07-17 17:45:52 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, givenKernelWithPreferredWkgMultipleWhenItIsCreatedThenCurbeIsPatchedProperly) {
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-07-17 17:45:52 +02:00
|
|
|
pKernelInfo->workloadInfo.preferredWkgMultipleOffset = 8;
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernel *kernel = new MockKernel(program.get(), *pKernelInfo, *pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
kernel->initialize();
|
|
|
|
|
|
|
|
auto *crossThread = kernel->getCrossThreadData();
|
|
|
|
|
2019-07-17 17:45:52 +02:00
|
|
|
uint32_t *preferredWkgMultipleOffset = (uint32_t *)ptrOffset(crossThread, 8);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
2019-07-17 17:45:52 +02:00
|
|
|
EXPECT_EQ(pKernelInfo->getMaxSimdSize(), *preferredWkgMultipleOffset);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
delete kernel;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelCrossThreadTests, WhenPatchingBlocksSimdSizeThenSimdSizeIsPatchedCorrectly) {
|
2020-01-14 14:32:11 +01:00
|
|
|
MockKernelWithInternals *kernel = new MockKernelWithInternals(*pClDevice);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// store offset to child's simd size in kernel info
|
|
|
|
uint32_t crossThreadOffset = 0; //offset of simd size
|
|
|
|
kernel->kernelInfo.childrenKernelsIdOffset.push_back({0, crossThreadOffset});
|
|
|
|
|
|
|
|
// add a new block kernel to program
|
2018-08-16 14:28:58 +02:00
|
|
|
auto infoBlock = new KernelInfo();
|
2017-12-21 00:45:38 +01:00
|
|
|
kernel->executionEnvironmentBlock.CompiledSIMD8 = 0;
|
|
|
|
kernel->executionEnvironmentBlock.CompiledSIMD16 = 1;
|
|
|
|
kernel->executionEnvironmentBlock.CompiledSIMD32 = 0;
|
|
|
|
infoBlock->patchInfo.executionEnvironment = &kernel->executionEnvironmentBlock;
|
2019-10-27 19:48:26 +01:00
|
|
|
kernel->mockProgram->blockKernelManager->addBlockKernelInfo(infoBlock);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
// patch block's simd size
|
|
|
|
kernel->mockKernel->patchBlocksSimdSize();
|
|
|
|
|
|
|
|
// obtain block's simd size from cross thread data
|
|
|
|
void *blockSimdSize = ptrOffset(kernel->mockKernel->getCrossThreadData(), kernel->kernelInfo.childrenKernelsIdOffset[0].second);
|
|
|
|
uint32_t *simdSize = reinterpret_cast<uint32_t *>(blockSimdSize);
|
|
|
|
|
|
|
|
// check of block's simd size has been patched correctly
|
2019-10-27 19:48:26 +01:00
|
|
|
EXPECT_EQ(kernel->mockProgram->blockKernelManager->getBlockKernelInfo(0)->getMaxSimdSize(), *simdSize);
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
delete kernel;
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST(KernelInfoTest, WhenPatchingBorderColorOffsetThenPatchIsAppliedCorrectly) {
|
2017-12-21 00:45:38 +01:00
|
|
|
KernelInfo info;
|
|
|
|
SPatchSamplerStateArray samplerState;
|
|
|
|
samplerState.BorderColorOffset = 3;
|
|
|
|
|
|
|
|
info.patchInfo.samplerStateArray = nullptr;
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, info.getBorderColorOffset());
|
|
|
|
|
|
|
|
info.patchInfo.samplerStateArray = &samplerState;
|
|
|
|
|
|
|
|
EXPECT_EQ(3u, info.getBorderColorOffset());
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST(KernelInfoTest, GivenArgNameWhenGettingArgNumberByNameThenCorrectValueIsReturned) {
|
2017-12-21 00:45:38 +01:00
|
|
|
KernelInfo info;
|
|
|
|
EXPECT_EQ(-1, info.getArgNumByName(""));
|
|
|
|
|
|
|
|
KernelArgInfo kai;
|
2020-01-11 18:25:26 +01:00
|
|
|
kai.metadataExtended = std::make_unique<ArgTypeMetadataExtended>();
|
|
|
|
kai.metadataExtended->argName = "arg1";
|
|
|
|
info.kernelArgInfo.push_back(std::move(kai));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
EXPECT_EQ(-1, info.getArgNumByName(""));
|
|
|
|
EXPECT_EQ(-1, info.getArgNumByName("arg2"));
|
|
|
|
|
|
|
|
EXPECT_EQ(0, info.getArgNumByName("arg1"));
|
|
|
|
|
2020-01-11 18:25:26 +01:00
|
|
|
kai = {};
|
|
|
|
kai.metadataExtended = std::make_unique<ArgTypeMetadataExtended>();
|
|
|
|
kai.metadataExtended->argName = "arg2";
|
|
|
|
info.kernelArgInfo.push_back(std::move(kai));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
EXPECT_EQ(0, info.getArgNumByName("arg1"));
|
|
|
|
EXPECT_EQ(1, info.getArgNumByName("arg2"));
|
2020-01-11 18:25:26 +01:00
|
|
|
|
|
|
|
info.kernelArgInfo[0].metadataExtended.reset();
|
|
|
|
EXPECT_EQ(-1, info.getArgNumByName("arg1"));
|
2017-12-21 00:45:38 +01:00
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST(KernelTest, GivenNormalKernelWhenGettingInstructionHeapSizeForExecutionModelThenZeroIsReturned) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2017-12-21 00:45:38 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, kernel.mockKernel->getInstructionHeapSizeForExecutionModel());
|
|
|
|
}
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST(KernelTest, WhenSettingKernelArgThenBuiltinDispatchInfoBuilderIsUsed) {
|
2017-12-21 00:45:38 +01:00
|
|
|
struct MockBuiltinDispatchBuilder : BuiltinDispatchInfoBuilder {
|
|
|
|
MockBuiltinDispatchBuilder(BuiltIns &builtins)
|
|
|
|
: BuiltinDispatchInfoBuilder(builtins) {
|
|
|
|
}
|
|
|
|
|
|
|
|
bool setExplicitArg(uint32_t argIndex, size_t argSize, const void *argVal, cl_int &err) const override {
|
|
|
|
receivedArgs.push_back(std::make_tuple(argIndex, argSize, argVal));
|
|
|
|
err = errToReturn;
|
|
|
|
return valueToReturn;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool valueToReturn = false;
|
|
|
|
cl_int errToReturn = CL_SUCCESS;
|
|
|
|
mutable std::vector<std::tuple<uint32_t, size_t, const void *>> receivedArgs;
|
|
|
|
};
|
|
|
|
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2017-12-21 00:45:38 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.kernelInfo.resizeKernelArgInfoAndRegisterParameter(1);
|
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
|
2020-02-27 15:32:57 +01:00
|
|
|
MockBuiltinDispatchBuilder mockBuilder(*device->getBuiltIns());
|
2017-12-21 00:45:38 +01:00
|
|
|
kernel.kernelInfo.builtinDispatchBuilder = &mockBuilder;
|
|
|
|
|
|
|
|
mockBuilder.valueToReturn = false;
|
|
|
|
mockBuilder.errToReturn = CL_SUCCESS;
|
|
|
|
EXPECT_EQ(0u, kernel.mockKernel->getPatchedArgumentsNum());
|
|
|
|
auto ret = kernel.mockKernel->setArg(1, 3, reinterpret_cast<const void *>(5));
|
|
|
|
EXPECT_EQ(CL_SUCCESS, ret);
|
|
|
|
EXPECT_EQ(1u, kernel.mockKernel->getPatchedArgumentsNum());
|
|
|
|
|
|
|
|
mockBuilder.valueToReturn = false;
|
|
|
|
mockBuilder.errToReturn = CL_INVALID_ARG_SIZE;
|
|
|
|
ret = kernel.mockKernel->setArg(7, 11, reinterpret_cast<const void *>(13));
|
|
|
|
EXPECT_EQ(CL_INVALID_ARG_SIZE, ret);
|
|
|
|
EXPECT_EQ(1u, kernel.mockKernel->getPatchedArgumentsNum());
|
|
|
|
|
|
|
|
mockBuilder.valueToReturn = true;
|
|
|
|
mockBuilder.errToReturn = CL_SUCCESS;
|
|
|
|
ret = kernel.mockKernel->setArg(17, 19, reinterpret_cast<const void *>(23));
|
|
|
|
EXPECT_EQ(CL_INVALID_ARG_INDEX, ret);
|
|
|
|
EXPECT_EQ(1u, kernel.mockKernel->getPatchedArgumentsNum());
|
|
|
|
|
|
|
|
mockBuilder.valueToReturn = true;
|
|
|
|
mockBuilder.errToReturn = CL_INVALID_ARG_SIZE;
|
|
|
|
ret = kernel.mockKernel->setArg(29, 31, reinterpret_cast<const void *>(37));
|
|
|
|
EXPECT_EQ(CL_INVALID_ARG_INDEX, ret);
|
|
|
|
EXPECT_EQ(1u, kernel.mockKernel->getPatchedArgumentsNum());
|
|
|
|
|
|
|
|
ASSERT_EQ(4U, mockBuilder.receivedArgs.size());
|
|
|
|
|
|
|
|
EXPECT_EQ(1U, std::get<0>(mockBuilder.receivedArgs[0]));
|
|
|
|
EXPECT_EQ(3U, std::get<1>(mockBuilder.receivedArgs[0]));
|
|
|
|
EXPECT_EQ(reinterpret_cast<const void *>(5), std::get<2>(mockBuilder.receivedArgs[0]));
|
|
|
|
|
|
|
|
EXPECT_EQ(7U, std::get<0>(mockBuilder.receivedArgs[1]));
|
|
|
|
EXPECT_EQ(11U, std::get<1>(mockBuilder.receivedArgs[1]));
|
|
|
|
EXPECT_EQ(reinterpret_cast<const void *>(13), std::get<2>(mockBuilder.receivedArgs[1]));
|
|
|
|
|
|
|
|
EXPECT_EQ(17U, std::get<0>(mockBuilder.receivedArgs[2]));
|
|
|
|
EXPECT_EQ(19U, std::get<1>(mockBuilder.receivedArgs[2]));
|
|
|
|
EXPECT_EQ(reinterpret_cast<const void *>(23), std::get<2>(mockBuilder.receivedArgs[2]));
|
|
|
|
|
|
|
|
EXPECT_EQ(29U, std::get<0>(mockBuilder.receivedArgs[3]));
|
|
|
|
EXPECT_EQ(31U, std::get<1>(mockBuilder.receivedArgs[3]));
|
|
|
|
EXPECT_EQ(reinterpret_cast<const void *>(37), std::get<2>(mockBuilder.receivedArgs[3]));
|
|
|
|
}
|
|
|
|
TEST(KernelTest, givenKernelWhenDebugFlagToUseMaxSimdForCalculationsIsUsedThenMaxWorkgroupSizeIsSimdSizeDependant) {
|
|
|
|
DebugManagerStateRestore dbgStateRestore;
|
|
|
|
DebugManager.flags.UseMaxSimdSizeToDeduceMaxWorkgroupSize.set(true);
|
|
|
|
|
2020-03-24 11:42:54 +01:00
|
|
|
HardwareInfo myHwInfo = *defaultHwInfo;
|
2019-05-08 16:00:24 +02:00
|
|
|
GT_SYSTEM_INFO &mySysInfo = myHwInfo.gtSystemInfo;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
mySysInfo.EUCount = 24;
|
|
|
|
mySysInfo.SubSliceCount = 3;
|
|
|
|
mySysInfo.ThreadCount = 24 * 7;
|
2020-01-14 14:32:11 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(&myHwInfo));
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
MockKernelWithInternals kernel(*device);
|
2020-05-26 14:11:22 +02:00
|
|
|
kernel.executionEnvironment.LargestCompiledSIMDSize = CommonConstants::maximalSimdSize;
|
2017-12-21 00:45:38 +01:00
|
|
|
|
|
|
|
size_t maxKernelWkgSize;
|
|
|
|
kernel.mockKernel->getWorkGroupInfo(device.get(), CL_KERNEL_WORK_GROUP_SIZE, sizeof(size_t), &maxKernelWkgSize, nullptr);
|
|
|
|
EXPECT_EQ(1024u, maxKernelWkgSize);
|
|
|
|
kernel.executionEnvironment.LargestCompiledSIMDSize = 16;
|
|
|
|
kernel.mockKernel->getWorkGroupInfo(device.get(), CL_KERNEL_WORK_GROUP_SIZE, sizeof(size_t), &maxKernelWkgSize, nullptr);
|
|
|
|
EXPECT_EQ(512u, maxKernelWkgSize);
|
|
|
|
kernel.executionEnvironment.LargestCompiledSIMDSize = 8;
|
|
|
|
kernel.mockKernel->getWorkGroupInfo(device.get(), CL_KERNEL_WORK_GROUP_SIZE, sizeof(size_t), &maxKernelWkgSize, nullptr);
|
|
|
|
EXPECT_EQ(256u, maxKernelWkgSize);
|
|
|
|
}
|
2018-04-03 16:06:37 +02:00
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelWithKernelInfoWith32bitPointerSizeThenReport32bit) {
|
|
|
|
KernelInfo info;
|
|
|
|
info.gpuPointerSize = 4;
|
|
|
|
|
|
|
|
MockContext context;
|
2020-01-14 14:32:11 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(nullptr));
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*device->getExecutionEnvironment(), &context, false, &device->getDevice());
|
2018-04-03 16:06:37 +02:00
|
|
|
std::unique_ptr<MockKernel> kernel(new MockKernel(&program, info, *device.get()));
|
|
|
|
|
|
|
|
EXPECT_TRUE(kernel->is32Bit());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelWithKernelInfoWith64bitPointerSizeThenReport64bit) {
|
|
|
|
KernelInfo info;
|
|
|
|
info.gpuPointerSize = 8;
|
|
|
|
|
|
|
|
MockContext context;
|
2020-01-14 14:32:11 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(nullptr));
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*device->getExecutionEnvironment(), &context, false, &device->getDevice());
|
2018-04-03 16:06:37 +02:00
|
|
|
std::unique_ptr<MockKernel> kernel(new MockKernel(&program, info, *device.get()));
|
|
|
|
|
|
|
|
EXPECT_FALSE(kernel->is32Bit());
|
|
|
|
}
|
2018-08-03 08:14:43 +02:00
|
|
|
|
|
|
|
TEST(KernelTest, givenFtrRenderCompressedBuffersWhenInitializingArgsWithNonStatefulAccessThenMarkKernelForAuxTranslation) {
|
2019-06-25 10:22:59 +02:00
|
|
|
DebugManagerStateRestore restore;
|
2020-07-02 12:53:22 +02:00
|
|
|
DebugManager.flags.ForceAuxTranslationEnabled.set(1);
|
2020-01-14 14:32:11 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(nullptr));
|
2020-03-04 08:51:02 +01:00
|
|
|
auto hwInfo = device->getRootDeviceEnvironment().getMutableHardwareInfo();
|
2019-05-06 12:33:44 +02:00
|
|
|
auto &capabilityTable = hwInfo->capabilityTable;
|
2019-01-28 15:27:15 +01:00
|
|
|
auto context = clUniquePtr(new MockContext(device.get()));
|
2019-10-16 10:59:10 +02:00
|
|
|
context->contextType = ContextType::CONTEXT_TYPE_UNRESTRICTIVE;
|
2019-01-28 15:27:15 +01:00
|
|
|
MockKernelWithInternals kernel(*device, context.get());
|
2018-08-03 08:14:43 +02:00
|
|
|
kernel.kernelInfo.kernelArgInfo.resize(1);
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo[0].metadataExtended = std::make_unique<NEO::ArgTypeMetadataExtended>();
|
|
|
|
kernel.kernelInfo.kernelArgInfo[0].metadataExtended->type = "char *";
|
|
|
|
kernel.kernelInfo.kernelArgInfo[0].isBuffer = true;
|
2018-08-03 08:14:43 +02:00
|
|
|
|
2019-05-06 12:33:44 +02:00
|
|
|
capabilityTable.ftrRenderCompressedBuffers = false;
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo[0].pureStatefulBufferAccess = true;
|
2018-08-03 08:14:43 +02:00
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_FALSE(kernel.mockKernel->isAuxTranslationRequired());
|
|
|
|
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo[0].pureStatefulBufferAccess = false;
|
2018-08-03 08:14:43 +02:00
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_FALSE(kernel.mockKernel->isAuxTranslationRequired());
|
|
|
|
|
2019-05-06 12:33:44 +02:00
|
|
|
capabilityTable.ftrRenderCompressedBuffers = true;
|
2018-08-03 08:14:43 +02:00
|
|
|
kernel.mockKernel->initialize();
|
2020-07-02 12:53:22 +02:00
|
|
|
EXPECT_TRUE(kernel.mockKernel->isAuxTranslationRequired());
|
2019-09-05 14:07:52 +02:00
|
|
|
|
2020-07-02 12:53:22 +02:00
|
|
|
DebugManager.flags.ForceAuxTranslationEnabled.set(-1);
|
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_EQ(HwHelper::get(hwInfo->platform.eRenderCoreFamily).requiresAuxResolves(), kernel.mockKernel->isAuxTranslationRequired());
|
2019-06-25 10:22:59 +02:00
|
|
|
|
2020-07-02 12:53:22 +02:00
|
|
|
DebugManager.flags.ForceAuxTranslationEnabled.set(0);
|
2019-06-25 10:22:59 +02:00
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_FALSE(kernel.mockKernel->isAuxTranslationRequired());
|
2018-08-03 08:14:43 +02:00
|
|
|
}
|
2018-12-14 16:00:43 +01:00
|
|
|
|
|
|
|
TEST(KernelTest, givenDebugVariableSetWhenKernelHasStatefulBufferAccessThenMarkKernelForAuxTranslation) {
|
|
|
|
DebugManagerStateRestore restore;
|
|
|
|
DebugManager.flags.RenderCompressedBuffersEnabled.set(1);
|
|
|
|
|
2020-03-24 11:42:54 +01:00
|
|
|
HardwareInfo localHwInfo = *defaultHwInfo;
|
2019-01-28 15:27:15 +01:00
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(&localHwInfo));
|
2019-01-28 15:27:15 +01:00
|
|
|
auto context = clUniquePtr(new MockContext(device.get()));
|
|
|
|
MockKernelWithInternals kernel(*device, context.get());
|
2018-12-14 16:00:43 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo.resize(1);
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo[0].metadataExtended = std::make_unique<NEO::ArgTypeMetadataExtended>();
|
|
|
|
kernel.kernelInfo.kernelArgInfo[0].metadataExtended->type = "char *";
|
|
|
|
kernel.kernelInfo.kernelArgInfo[0].isBuffer = true;
|
2018-12-14 16:00:43 +01:00
|
|
|
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo[0].pureStatefulBufferAccess = false;
|
2018-12-14 16:00:43 +01:00
|
|
|
localHwInfo.capabilityTable.ftrRenderCompressedBuffers = false;
|
|
|
|
|
|
|
|
kernel.mockKernel->initialize();
|
2019-09-05 14:07:52 +02:00
|
|
|
|
|
|
|
if (HwHelper::get(localHwInfo.platform.eRenderCoreFamily).requiresAuxResolves()) {
|
|
|
|
EXPECT_TRUE(kernel.mockKernel->isAuxTranslationRequired());
|
|
|
|
} else {
|
|
|
|
EXPECT_FALSE(kernel.mockKernel->isAuxTranslationRequired());
|
|
|
|
}
|
2018-12-14 16:00:43 +01:00
|
|
|
}
|
2018-12-06 15:33:02 +01:00
|
|
|
|
2019-11-07 09:27:44 +01:00
|
|
|
TEST(KernelTest, givenKernelWithPairArgumentWhenItIsInitializedThenPatchImmediateIsUsedAsArgHandler) {
|
2020-03-24 11:42:54 +01:00
|
|
|
HardwareInfo localHwInfo = *defaultHwInfo;
|
2019-11-07 09:27:44 +01:00
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(&localHwInfo));
|
2019-11-07 09:27:44 +01:00
|
|
|
auto context = clUniquePtr(new MockContext(device.get()));
|
|
|
|
MockKernelWithInternals kernel(*device, context.get());
|
|
|
|
kernel.kernelInfo.kernelArgInfo.resize(1);
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo[0].metadataExtended = std::make_unique<NEO::ArgTypeMetadataExtended>();
|
|
|
|
kernel.kernelInfo.kernelArgInfo[0].metadataExtended->type = "pair<char*, int>";
|
2019-11-07 09:27:44 +01:00
|
|
|
|
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_EQ(&Kernel::setArgImmediate, kernel.mockKernel->kernelArgHandlers[0]);
|
|
|
|
}
|
|
|
|
|
2018-12-06 15:33:02 +01:00
|
|
|
TEST(KernelTest, whenNullAllocationThenAssignNullPointerToCacheFlushVector) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2018-12-06 15:33:02 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.mockKernel->kernelArgRequiresCacheFlush.resize(1);
|
|
|
|
kernel.mockKernel->kernelArgRequiresCacheFlush[0] = reinterpret_cast<GraphicsAllocation *>(0x1);
|
|
|
|
|
|
|
|
kernel.mockKernel->addAllocationToCacheFlushVector(0, nullptr);
|
|
|
|
EXPECT_EQ(nullptr, kernel.mockKernel->kernelArgRequiresCacheFlush[0]);
|
|
|
|
}
|
|
|
|
|
2020-02-13 11:37:05 +01:00
|
|
|
TEST(KernelTest, givenKernelCompiledWithSimdSizeLowerThanExpectedWhenInitializingThenReturnError) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-02-13 11:37:05 +01:00
|
|
|
auto minSimd = HwHelper::get(device->getHardwareInfo().platform.eRenderCoreFamily).getMinimalSIMDSize();
|
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.executionEnvironment.CompiledSIMD32 = 0;
|
|
|
|
kernel.executionEnvironment.CompiledSIMD16 = 0;
|
|
|
|
kernel.executionEnvironment.CompiledSIMD8 = 1;
|
|
|
|
|
|
|
|
cl_int retVal = kernel.mockKernel->initialize();
|
|
|
|
|
|
|
|
if (minSimd > 8) {
|
|
|
|
EXPECT_EQ(CL_INVALID_KERNEL, retVal);
|
|
|
|
} else {
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelCompiledWithSimdOneWhenInitializingThenReturnError) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-02-13 11:37:05 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.executionEnvironment.CompiledSIMD32 = 0;
|
|
|
|
kernel.executionEnvironment.CompiledSIMD16 = 0;
|
|
|
|
kernel.executionEnvironment.CompiledSIMD8 = 0;
|
|
|
|
kernel.executionEnvironment.LargestCompiledSIMDSize = 1;
|
|
|
|
|
|
|
|
cl_int retVal = kernel.mockKernel->initialize();
|
|
|
|
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
}
|
|
|
|
|
2018-12-06 15:33:02 +01:00
|
|
|
TEST(KernelTest, whenAllocationRequiringCacheFlushThenAssignAllocationPointerToCacheFlushVector) {
|
|
|
|
MockGraphicsAllocation mockAllocation;
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2018-12-06 15:33:02 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.mockKernel->kernelArgRequiresCacheFlush.resize(1);
|
|
|
|
|
|
|
|
mockAllocation.setMemObjectsAllocationWithWritableFlags(false);
|
2019-02-27 14:59:46 +01:00
|
|
|
mockAllocation.setFlushL3Required(true);
|
2018-12-06 15:33:02 +01:00
|
|
|
|
|
|
|
kernel.mockKernel->addAllocationToCacheFlushVector(0, &mockAllocation);
|
|
|
|
EXPECT_EQ(&mockAllocation, kernel.mockKernel->kernelArgRequiresCacheFlush[0]);
|
|
|
|
}
|
|
|
|
|
2019-04-26 13:05:48 +02:00
|
|
|
TEST(KernelTest, whenKernelRequireCacheFlushAfterWalkerThenRequireCacheFlushAfterWalker) {
|
2019-02-10 17:50:54 +01:00
|
|
|
MockGraphicsAllocation mockAllocation;
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-02-10 17:50:54 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.mockKernel->svmAllocationsRequireCacheFlush = true;
|
|
|
|
|
|
|
|
MockCommandQueue queue;
|
|
|
|
|
|
|
|
DebugManagerStateRestore debugRestore;
|
|
|
|
DebugManager.flags.EnableCacheFlushAfterWalker.set(true);
|
|
|
|
|
|
|
|
queue.requiresCacheFlushAfterWalker = true;
|
|
|
|
EXPECT_TRUE(kernel.mockKernel->requiresCacheFlushCommand(queue));
|
|
|
|
|
|
|
|
queue.requiresCacheFlushAfterWalker = false;
|
|
|
|
EXPECT_TRUE(kernel.mockKernel->requiresCacheFlushCommand(queue));
|
|
|
|
}
|
|
|
|
|
2019-04-03 11:22:04 +02:00
|
|
|
TEST(KernelTest, whenAllocationWriteableThenDoNotAssignAllocationPointerToCacheFlushVector) {
|
2018-12-06 15:33:02 +01:00
|
|
|
MockGraphicsAllocation mockAllocation;
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2018-12-06 15:33:02 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.mockKernel->kernelArgRequiresCacheFlush.resize(1);
|
|
|
|
|
|
|
|
mockAllocation.setMemObjectsAllocationWithWritableFlags(true);
|
2019-02-27 14:59:46 +01:00
|
|
|
mockAllocation.setFlushL3Required(false);
|
2018-12-06 15:33:02 +01:00
|
|
|
|
|
|
|
kernel.mockKernel->addAllocationToCacheFlushVector(0, &mockAllocation);
|
2019-04-03 11:22:04 +02:00
|
|
|
EXPECT_EQ(nullptr, kernel.mockKernel->kernelArgRequiresCacheFlush[0]);
|
2018-12-06 15:33:02 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, whenAllocationReadOnlyNonFlushRequiredThenAssignNullPointerToCacheFlushVector) {
|
|
|
|
MockGraphicsAllocation mockAllocation;
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2018-12-06 15:33:02 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
kernel.mockKernel->kernelArgRequiresCacheFlush.resize(1);
|
|
|
|
kernel.mockKernel->kernelArgRequiresCacheFlush[0] = reinterpret_cast<GraphicsAllocation *>(0x1);
|
|
|
|
|
|
|
|
mockAllocation.setMemObjectsAllocationWithWritableFlags(false);
|
2019-02-27 14:59:46 +01:00
|
|
|
mockAllocation.setFlushL3Required(false);
|
2018-12-06 15:33:02 +01:00
|
|
|
|
|
|
|
kernel.mockKernel->addAllocationToCacheFlushVector(0, &mockAllocation);
|
|
|
|
EXPECT_EQ(nullptr, kernel.mockKernel->kernelArgRequiresCacheFlush[0]);
|
|
|
|
}
|
|
|
|
|
2019-02-06 14:30:32 +01:00
|
|
|
TEST(KernelTest, givenKernelUsesPrivateMemoryWhenDeviceReleasedBeforeKernelThenKernelUsesMemoryManagerFromEnvironment) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-02-06 14:30:32 +01:00
|
|
|
auto executionEnvironment = device->getExecutionEnvironment();
|
|
|
|
|
|
|
|
auto mockKernel = std::make_unique<MockKernelWithInternals>(*device);
|
2020-03-25 15:15:03 +01:00
|
|
|
GraphicsAllocation *privateSurface = device->getExecutionEnvironment()->memoryManager->allocateGraphicsMemoryWithProperties(MockAllocationProperties{device->getRootDeviceIndex(), MemoryConstants::pageSize});
|
2019-02-06 14:30:32 +01:00
|
|
|
mockKernel->mockKernel->setPrivateSurface(privateSurface, 10);
|
|
|
|
|
|
|
|
executionEnvironment->incRefInternal();
|
|
|
|
device.reset(nullptr);
|
|
|
|
mockKernel.reset(nullptr);
|
|
|
|
executionEnvironment->decRefInternal();
|
|
|
|
}
|
2019-03-21 12:51:20 +01:00
|
|
|
|
|
|
|
TEST(KernelTest, givenAllArgumentsAreStatefulBuffersWhenInitializingThenAllBufferArgsStatefulIsTrue) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-03-21 12:51:20 +01:00
|
|
|
|
|
|
|
std::vector<KernelArgInfo> kernelArgInfo(2);
|
|
|
|
kernelArgInfo[0].isBuffer = true;
|
|
|
|
kernelArgInfo[1].isBuffer = true;
|
|
|
|
kernelArgInfo[0].pureStatefulBufferAccess = true;
|
|
|
|
kernelArgInfo[1].pureStatefulBufferAccess = true;
|
|
|
|
|
|
|
|
MockKernelWithInternals kernel{*device};
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo.swap(kernelArgInfo);
|
2019-03-21 12:51:20 +01:00
|
|
|
|
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_TRUE(kernel.mockKernel->allBufferArgsStateful);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenAllArgumentsAreBuffersButNotAllAreStatefulWhenInitializingThenAllBufferArgsStatefulIsFalse) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-03-21 12:51:20 +01:00
|
|
|
|
|
|
|
std::vector<KernelArgInfo> kernelArgInfo(2);
|
|
|
|
kernelArgInfo[0].isBuffer = true;
|
|
|
|
kernelArgInfo[1].isBuffer = true;
|
|
|
|
kernelArgInfo[0].pureStatefulBufferAccess = true;
|
|
|
|
kernelArgInfo[1].pureStatefulBufferAccess = false;
|
|
|
|
|
|
|
|
MockKernelWithInternals kernel{*device};
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo.swap(kernelArgInfo);
|
2019-03-21 12:51:20 +01:00
|
|
|
|
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_FALSE(kernel.mockKernel->allBufferArgsStateful);
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenNotAllArgumentsAreBuffersButAllBuffersAreStatefulWhenInitializingThenAllBufferArgsStatefulIsTrue) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-03-21 12:51:20 +01:00
|
|
|
|
|
|
|
std::vector<KernelArgInfo> kernelArgInfo(2);
|
|
|
|
kernelArgInfo[0].isBuffer = true;
|
|
|
|
kernelArgInfo[1].isBuffer = false;
|
|
|
|
kernelArgInfo[0].pureStatefulBufferAccess = true;
|
|
|
|
kernelArgInfo[1].pureStatefulBufferAccess = false;
|
|
|
|
|
|
|
|
MockKernelWithInternals kernel{*device};
|
2020-01-11 18:25:26 +01:00
|
|
|
kernel.kernelInfo.kernelArgInfo.swap(kernelArgInfo);
|
2019-03-21 12:51:20 +01:00
|
|
|
|
|
|
|
kernel.mockKernel->initialize();
|
|
|
|
EXPECT_TRUE(kernel.mockKernel->allBufferArgsStateful);
|
|
|
|
}
|
2019-06-27 14:06:19 +02:00
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelRequiringPrivateScratchSpaceWhenGettingSizeForPrivateScratchSpaceThenCorrectSizeIsReturned) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-06-27 14:06:19 +02:00
|
|
|
|
|
|
|
MockKernelWithInternals mockKernel(*device);
|
|
|
|
SPatchMediaVFEState mediaVFEstate;
|
|
|
|
SPatchMediaVFEState mediaVFEstateSlot1;
|
|
|
|
mediaVFEstateSlot1.PerThreadScratchSpace = 1024u;
|
|
|
|
mediaVFEstate.PerThreadScratchSpace = 512u;
|
|
|
|
mockKernel.kernelInfo.patchInfo.mediavfestate = &mediaVFEstate;
|
|
|
|
mockKernel.kernelInfo.patchInfo.mediaVfeStateSlot1 = &mediaVFEstateSlot1;
|
|
|
|
|
|
|
|
EXPECT_EQ(1024u, mockKernel.mockKernel->getPrivateScratchSize());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelWithoutMediaVfeStateSlot1WhenGettingSizeForPrivateScratchSpaceThenCorrectSizeIsReturned) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-06-27 14:06:19 +02:00
|
|
|
|
|
|
|
MockKernelWithInternals mockKernel(*device);
|
|
|
|
mockKernel.kernelInfo.patchInfo.mediaVfeStateSlot1 = nullptr;
|
|
|
|
|
|
|
|
EXPECT_EQ(0u, mockKernel.mockKernel->getPrivateScratchSize());
|
|
|
|
}
|
2019-07-15 15:08:42 +02:00
|
|
|
|
2019-08-13 11:34:56 +02:00
|
|
|
TEST(KernelTest, givenKernelWithPatchInfoCollectionEnabledWhenPatchWithImplicitSurfaceCalledThenPatchInfoDataIsCollected) {
|
|
|
|
DebugManagerStateRestore restore;
|
|
|
|
DebugManager.flags.AddPatchInfoCommentsForAUBDump.set(true);
|
|
|
|
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-08-13 11:34:56 +02:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
MockGraphicsAllocation mockAllocation;
|
|
|
|
SPatchAllocateStatelessGlobalMemorySurfaceWithInitialization patchToken{};
|
|
|
|
uint64_t crossThreadData = 0;
|
|
|
|
EXPECT_EQ(0u, kernel.mockKernel->getPatchInfoDataList().size());
|
|
|
|
kernel.mockKernel->patchWithImplicitSurface(&crossThreadData, mockAllocation, patchToken);
|
|
|
|
EXPECT_EQ(1u, kernel.mockKernel->getPatchInfoDataList().size());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelWithPatchInfoCollectionDisabledWhenPatchWithImplicitSurfaceCalledThenPatchInfoDataIsNotCollected) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-08-13 11:34:56 +02:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
MockGraphicsAllocation mockAllocation;
|
|
|
|
SPatchAllocateStatelessGlobalMemorySurfaceWithInitialization patchToken{};
|
|
|
|
uint64_t crossThreadData = 0;
|
|
|
|
EXPECT_EQ(0u, kernel.mockKernel->getPatchInfoDataList().size());
|
|
|
|
kernel.mockKernel->patchWithImplicitSurface(&crossThreadData, mockAllocation, patchToken);
|
|
|
|
EXPECT_EQ(0u, kernel.mockKernel->getPatchInfoDataList().size());
|
|
|
|
}
|
|
|
|
|
2019-08-23 10:35:57 +02:00
|
|
|
TEST(KernelTest, givenDefaultKernelWhenItIsCreatedThenItReportsStatelessWrites) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-08-23 10:35:57 +02:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
EXPECT_TRUE(kernel.mockKernel->areStatelessWritesUsed());
|
|
|
|
}
|
|
|
|
|
2019-12-17 17:04:57 +01:00
|
|
|
TEST(KernelTest, givenPolicyWhensetKernelThreadArbitrationPolicyThenExpectedClValueIsReturned) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2019-12-17 17:04:57 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, kernel.mockKernel->setKernelThreadArbitrationPolicy(CL_KERNEL_EXEC_INFO_THREAD_ARBITRATION_POLICY_ROUND_ROBIN_INTEL));
|
|
|
|
EXPECT_EQ(CL_SUCCESS, kernel.mockKernel->setKernelThreadArbitrationPolicy(CL_KERNEL_EXEC_INFO_THREAD_ARBITRATION_POLICY_OLDEST_FIRST_INTEL));
|
|
|
|
EXPECT_EQ(CL_SUCCESS, kernel.mockKernel->setKernelThreadArbitrationPolicy(CL_KERNEL_EXEC_INFO_THREAD_ARBITRATION_POLICY_AFTER_DEPENDENCY_ROUND_ROBIN_INTEL));
|
|
|
|
uint32_t notExistPolicy = 0;
|
|
|
|
EXPECT_EQ(CL_INVALID_VALUE, kernel.mockKernel->setKernelThreadArbitrationPolicy(notExistPolicy));
|
|
|
|
}
|
|
|
|
|
2020-01-21 19:02:36 +01:00
|
|
|
TEST(KernelTest, GivenDifferentValuesWhenSetKernelExecutionTypeIsCalledThenCorrectValueIsSet) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-01-21 19:02:36 +01:00
|
|
|
MockKernelWithInternals mockKernelWithInternals(*device);
|
|
|
|
auto &kernel = *mockKernelWithInternals.mockKernel;
|
|
|
|
cl_int retVal;
|
|
|
|
|
|
|
|
EXPECT_EQ(KernelExecutionType::Default, kernel.executionType);
|
|
|
|
|
|
|
|
retVal = kernel.setKernelExecutionType(-1);
|
|
|
|
EXPECT_EQ(CL_INVALID_VALUE, retVal);
|
|
|
|
EXPECT_EQ(KernelExecutionType::Default, kernel.executionType);
|
|
|
|
|
|
|
|
retVal = kernel.setKernelExecutionType(CL_KERNEL_EXEC_INFO_CONCURRENT_TYPE_INTEL);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(KernelExecutionType::Concurrent, kernel.executionType);
|
|
|
|
|
|
|
|
retVal = kernel.setKernelExecutionType(-1);
|
|
|
|
EXPECT_EQ(CL_INVALID_VALUE, retVal);
|
|
|
|
EXPECT_EQ(KernelExecutionType::Concurrent, kernel.executionType);
|
|
|
|
|
|
|
|
retVal = kernel.setKernelExecutionType(CL_KERNEL_EXEC_INFO_DEFAULT_TYPE_INTEL);
|
|
|
|
EXPECT_EQ(CL_SUCCESS, retVal);
|
|
|
|
EXPECT_EQ(KernelExecutionType::Default, kernel.executionType);
|
|
|
|
}
|
|
|
|
|
2020-01-23 15:52:49 +01:00
|
|
|
TEST(KernelTest, givenKernelLocalIdGenerationByRuntimeFalseWhenGettingStartOffsetThenOffsetToSkipPerThreadDataLoadIsAdded) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-01-23 15:52:49 +01:00
|
|
|
|
|
|
|
MockKernelWithInternals mockKernel(*device);
|
|
|
|
SPatchThreadPayload threadPayload = {};
|
|
|
|
|
|
|
|
threadPayload.OffsetToSkipPerThreadDataLoad = 128u;
|
|
|
|
mockKernel.kernelInfo.patchInfo.threadPayload = &threadPayload;
|
|
|
|
|
2020-07-03 17:09:37 +02:00
|
|
|
mockKernel.kernelInfo.createKernelAllocation(device->getDevice());
|
2020-01-23 15:52:49 +01:00
|
|
|
auto allocationOffset = mockKernel.kernelInfo.getGraphicsAllocation()->getGpuAddressToPatch();
|
|
|
|
|
|
|
|
mockKernel.mockKernel->setStartOffset(128);
|
|
|
|
auto offset = mockKernel.mockKernel->getKernelStartOffset(false, true, false);
|
|
|
|
EXPECT_EQ(allocationOffset + 256u, offset);
|
|
|
|
device->getMemoryManager()->freeGraphicsMemory(mockKernel.kernelInfo.getGraphicsAllocation());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelLocalIdGenerationByRuntimeTrueAndLocalIdsUsedWhenGettingStartOffsetThenOffsetToSkipPerThreadDataLoadIsNotAdded) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-01-23 15:52:49 +01:00
|
|
|
|
|
|
|
MockKernelWithInternals mockKernel(*device);
|
|
|
|
SPatchThreadPayload threadPayload = {};
|
|
|
|
|
|
|
|
threadPayload.OffsetToSkipPerThreadDataLoad = 128u;
|
|
|
|
mockKernel.kernelInfo.patchInfo.threadPayload = &threadPayload;
|
|
|
|
|
2020-07-03 17:09:37 +02:00
|
|
|
mockKernel.kernelInfo.createKernelAllocation(device->getDevice());
|
2020-01-23 15:52:49 +01:00
|
|
|
auto allocationOffset = mockKernel.kernelInfo.getGraphicsAllocation()->getGpuAddressToPatch();
|
|
|
|
|
|
|
|
mockKernel.mockKernel->setStartOffset(128);
|
|
|
|
auto offset = mockKernel.mockKernel->getKernelStartOffset(true, true, false);
|
|
|
|
EXPECT_EQ(allocationOffset + 128u, offset);
|
|
|
|
device->getMemoryManager()->freeGraphicsMemory(mockKernel.kernelInfo.getGraphicsAllocation());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelLocalIdGenerationByRuntimeFalseAndLocalIdsNotUsedWhenGettingStartOffsetThenOffsetToSkipPerThreadDataLoadIsNotAdded) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-01-23 15:52:49 +01:00
|
|
|
|
|
|
|
MockKernelWithInternals mockKernel(*device);
|
|
|
|
SPatchThreadPayload threadPayload = {};
|
|
|
|
|
|
|
|
threadPayload.OffsetToSkipPerThreadDataLoad = 128u;
|
|
|
|
mockKernel.kernelInfo.patchInfo.threadPayload = &threadPayload;
|
|
|
|
|
2020-07-03 17:09:37 +02:00
|
|
|
mockKernel.kernelInfo.createKernelAllocation(device->getDevice());
|
2020-01-23 15:52:49 +01:00
|
|
|
auto allocationOffset = mockKernel.kernelInfo.getGraphicsAllocation()->getGpuAddressToPatch();
|
|
|
|
|
|
|
|
mockKernel.mockKernel->setStartOffset(128);
|
|
|
|
auto offset = mockKernel.mockKernel->getKernelStartOffset(false, false, false);
|
|
|
|
EXPECT_EQ(allocationOffset + 128u, offset);
|
|
|
|
device->getMemoryManager()->freeGraphicsMemory(mockKernel.kernelInfo.getGraphicsAllocation());
|
|
|
|
}
|
|
|
|
|
2020-01-29 14:15:10 +01:00
|
|
|
TEST(KernelTest, givenKernelWhenForcePerDssBackedBufferProgrammingIsSetThenKernelRequiresPerDssBackedBuffer) {
|
|
|
|
DebugManagerStateRestore restore;
|
|
|
|
DebugManager.flags.ForcePerDssBackedBufferProgramming.set(true);
|
|
|
|
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-01-29 14:15:10 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
|
|
|
|
EXPECT_TRUE(kernel.mockKernel->requiresPerDssBackedBuffer());
|
|
|
|
}
|
|
|
|
|
|
|
|
TEST(KernelTest, givenKernelWhenForcePerDssBackedBufferProgrammingIsNotSetThenKernelDoesntRequirePerDssBackedBuffer) {
|
2020-03-24 18:00:34 +01:00
|
|
|
auto device = std::make_unique<MockClDevice>(MockDevice::createWithNewExecutionEnvironment<MockDevice>(defaultHwInfo.get()));
|
2020-01-29 14:15:10 +01:00
|
|
|
MockKernelWithInternals kernel(*device);
|
|
|
|
|
|
|
|
EXPECT_FALSE(kernel.mockKernel->requiresPerDssBackedBuffer());
|
|
|
|
}
|
|
|
|
|
2019-07-15 15:08:42 +02:00
|
|
|
namespace NEO {
|
|
|
|
|
|
|
|
template <typename GfxFamily>
|
|
|
|
class DeviceQueueHwMock : public DeviceQueueHw<GfxFamily> {
|
|
|
|
using BaseClass = DeviceQueueHw<GfxFamily>;
|
|
|
|
|
|
|
|
public:
|
|
|
|
using BaseClass::buildSlbDummyCommands;
|
|
|
|
using BaseClass::getCSPrefetchSize;
|
|
|
|
using BaseClass::getExecutionModelCleanupSectionSize;
|
|
|
|
using BaseClass::getMediaStateClearCmdsSize;
|
|
|
|
using BaseClass::getMinimumSlbSize;
|
|
|
|
using BaseClass::getProfilingEndCmdsSize;
|
|
|
|
using BaseClass::getSlbCS;
|
|
|
|
using BaseClass::getWaCommandsSize;
|
|
|
|
using BaseClass::offsetDsh;
|
|
|
|
|
2020-01-14 14:32:11 +01:00
|
|
|
DeviceQueueHwMock(Context *context, ClDevice *device, cl_queue_properties &properties) : BaseClass(context, device, properties) {
|
2019-07-15 15:08:42 +02:00
|
|
|
auto slb = this->getSlbBuffer();
|
|
|
|
LinearStream *slbCS = getSlbCS();
|
|
|
|
slbCS->replaceBuffer(slb->getUnderlyingBuffer(), slb->getUnderlyingBufferSize()); // reset
|
|
|
|
};
|
|
|
|
};
|
|
|
|
} // namespace NEO
|
|
|
|
|
|
|
|
HWCMDTEST_F(IGFX_GEN8_CORE, DeviceQueueHwTest, whenSlbEndOffsetGreaterThanZeroThenOverwriteOneEnqueue) {
|
|
|
|
std::unique_ptr<DeviceQueueHwMock<FamilyType>> mockDeviceQueueHw(new DeviceQueueHwMock<FamilyType>(pContext, device, deviceQueueProperties::minimumProperties[0]));
|
|
|
|
|
|
|
|
auto slb = mockDeviceQueueHw->getSlbBuffer();
|
|
|
|
auto commandsSize = mockDeviceQueueHw->getMinimumSlbSize() + mockDeviceQueueHw->getWaCommandsSize();
|
|
|
|
auto slbCopy = malloc(slb->getUnderlyingBufferSize());
|
|
|
|
memset(slb->getUnderlyingBuffer(), 0xFE, slb->getUnderlyingBufferSize());
|
|
|
|
memcpy(slbCopy, slb->getUnderlyingBuffer(), slb->getUnderlyingBufferSize());
|
|
|
|
|
|
|
|
auto igilCmdQueue = reinterpret_cast<IGIL_CommandQueue *>(mockDeviceQueueHw->getQueueBuffer()->getUnderlyingBuffer());
|
|
|
|
|
|
|
|
// slbEndOffset < commandsSize * 128
|
|
|
|
// always fill only 1 enqueue (after offset)
|
|
|
|
auto offset = static_cast<int>(commandsSize) * 50;
|
|
|
|
igilCmdQueue->m_controls.m_SLBENDoffsetInBytes = offset;
|
|
|
|
mockDeviceQueueHw->resetDeviceQueue();
|
|
|
|
EXPECT_EQ(0, memcmp(slb->getUnderlyingBuffer(), slbCopy, offset)); // dont touch memory before offset
|
|
|
|
EXPECT_NE(0, memcmp(ptrOffset(slb->getUnderlyingBuffer(), offset),
|
|
|
|
slbCopy, commandsSize)); // change 1 enqueue
|
|
|
|
EXPECT_EQ(0, memcmp(ptrOffset(slb->getUnderlyingBuffer(), offset + commandsSize),
|
|
|
|
slbCopy, offset)); // dont touch memory after (offset + 1 enqueue)
|
|
|
|
|
|
|
|
// slbEndOffset == commandsSize * 128
|
|
|
|
// dont fill commands
|
|
|
|
memset(slb->getUnderlyingBuffer(), 0xFEFEFEFE, slb->getUnderlyingBufferSize());
|
|
|
|
offset = static_cast<int>(commandsSize) * 128;
|
|
|
|
igilCmdQueue->m_controls.m_SLBENDoffsetInBytes = static_cast<int>(commandsSize);
|
|
|
|
mockDeviceQueueHw->resetDeviceQueue();
|
|
|
|
EXPECT_EQ(0, memcmp(slb->getUnderlyingBuffer(), slbCopy, commandsSize * 128)); // dont touch memory for enqueues
|
|
|
|
|
|
|
|
free(slbCopy);
|
|
|
|
}
|
2019-12-04 13:45:32 +01:00
|
|
|
|
|
|
|
using KernelMultiRootDeviceTest = MultiRootDeviceFixture;
|
|
|
|
|
2020-05-06 19:02:35 +02:00
|
|
|
TEST_F(KernelMultiRootDeviceTest, WhenGettingRootDeviceIndexThenCorrectRootDeviceIndexIsReturned) {
|
2019-12-04 13:45:32 +01:00
|
|
|
auto kernelInfo = std::make_unique<KernelInfo>();
|
|
|
|
|
|
|
|
// setup private memory
|
|
|
|
SPatchAllocateStatelessPrivateSurface tokenSPS;
|
|
|
|
tokenSPS.SurfaceStateHeapOffset = 64;
|
|
|
|
tokenSPS.DataParamOffset = 40;
|
|
|
|
tokenSPS.DataParamSize = 8;
|
|
|
|
tokenSPS.PerThreadPrivateMemorySize = 112;
|
|
|
|
kernelInfo->patchInfo.pAllocateStatelessPrivateSurface = &tokenSPS;
|
|
|
|
|
2020-02-20 08:12:44 +01:00
|
|
|
MockProgram program(*device->getExecutionEnvironment(), context.get(), false, &device->getDevice());
|
2019-12-04 13:45:32 +01:00
|
|
|
std::unique_ptr<MockKernel> kernel(new MockKernel(&program, *kernelInfo, *device.get()));
|
|
|
|
kernel->initialize();
|
|
|
|
|
|
|
|
auto privateSurface = kernel->getPrivateSurface();
|
|
|
|
ASSERT_NE(nullptr, privateSurface);
|
|
|
|
EXPECT_EQ(expectedRootDeviceIndex, privateSurface->getRootDeviceIndex());
|
|
|
|
}
|
2020-01-11 18:25:26 +01:00
|
|
|
|
|
|
|
TEST(KernelCreateTest, whenInitFailedThenReturnNull) {
|
|
|
|
struct MockProgram {
|
2020-02-20 08:12:44 +01:00
|
|
|
Device &getDevice() { return mDevice.getDevice(); }
|
2020-01-11 18:25:26 +01:00
|
|
|
void getSource(std::string &) {}
|
2020-02-20 08:12:44 +01:00
|
|
|
MockClDevice mDevice{new MockDevice};
|
2020-01-11 18:25:26 +01:00
|
|
|
} mockProgram;
|
|
|
|
struct MockKernel {
|
2020-02-20 08:12:44 +01:00
|
|
|
MockKernel(MockProgram *, const KernelInfo &, ClDevice &) {}
|
2020-01-11 18:25:26 +01:00
|
|
|
int initialize() { return -1; };
|
|
|
|
};
|
|
|
|
|
|
|
|
KernelInfo info;
|
|
|
|
info.gpuPointerSize = 8;
|
|
|
|
|
|
|
|
auto ret = Kernel::create<MockKernel>(&mockProgram, info, nullptr);
|
|
|
|
EXPECT_EQ(nullptr, ret);
|
|
|
|
}
|
2020-02-13 16:22:48 +01:00
|
|
|
|
2020-02-24 22:07:46 +01:00
|
|
|
TEST(ArgTypeTraits, GivenDefaultInitializedArgTypeMetadataThenAddressSpaceIsGlobal) {
|
|
|
|
ArgTypeTraits metadata;
|
|
|
|
EXPECT_EQ(NEO::KernelArgMetadata::AddrGlobal, metadata.addressQualifier);
|
2020-02-13 16:22:48 +01:00
|
|
|
}
|