fix: return error when cannot obtain debug surface size

remove default max debug surface size
check state save area size only for debug scenarios
reduce state save area size in unit tests - rely on values from mock

Signed-off-by: Mateusz Jablonski <mateusz.jablonski@intel.com>
This commit is contained in:
Mateusz Jablonski
2025-05-22 09:43:25 +00:00
committed by Compute-Runtime-Automation
parent efbf505744
commit 98bf872fdd
24 changed files with 108 additions and 198 deletions

View File

@@ -155,7 +155,6 @@ DECLARE_DEBUG_VARIABLE(int32_t, DebuggerForceSbaTrackingMode, -1, "-1: default,
DECLARE_DEBUG_VARIABLE(bool, DisableSupportForL0Debugger, 0, "0: default setting for product, 1: disable l0 debugger")
DECLARE_DEBUG_VARIABLE(int32_t, DebugApiUsed, 0, "0: default L0 Debug API not used, 1: L0 Debug API used")
DECLARE_DEBUG_VARIABLE(int32_t, OverrideCsrAllocationSize, -1, "-1: default, >0: use value for size of CSR allocation")
DECLARE_DEBUG_VARIABLE(int32_t, OverrideSipKernelMaxDbgSurfaceSize, -1, "-1: default, >0: use value as max debug surface size for sip kernel")
DECLARE_DEBUG_VARIABLE(int32_t, ComputeOverdispatchDisable, -1, "Set Compute Overdispatch Disable field, -1: do not set.")
DECLARE_DEBUG_VARIABLE(int32_t, CFEWeightedDispatchModeDisable, -1, "Set Weighted Dispatch Mode Disable field in CFE_STATE on XEHP, -1: do not set.")
DECLARE_DEBUG_VARIABLE(int32_t, CFESingleSliceDispatchCCSMode, -1, "Set Single Slice Dispatch CCS Mode in CFE_STATE on XEHP, -1 - do not set")