|
@@ -26,13 +26,19 @@ static const int kSpvImageSampledImageIdInIdx = 0;
|
|
|
static const int kSpvLoadPtrIdInIdx = 0;
|
|
static const int kSpvLoadPtrIdInIdx = 0;
|
|
|
static const int kSpvAccessChainBaseIdInIdx = 0;
|
|
static const int kSpvAccessChainBaseIdInIdx = 0;
|
|
|
static const int kSpvAccessChainIndex0IdInIdx = 1;
|
|
static const int kSpvAccessChainIndex0IdInIdx = 1;
|
|
|
-static const int kSpvTypePointerTypeIdInIdx = 1;
|
|
|
|
|
static const int kSpvTypeArrayLengthIdInIdx = 1;
|
|
static const int kSpvTypeArrayLengthIdInIdx = 1;
|
|
|
static const int kSpvConstantValueInIdx = 0;
|
|
static const int kSpvConstantValueInIdx = 0;
|
|
|
static const int kSpvVariableStorageClassInIdx = 0;
|
|
static const int kSpvVariableStorageClassInIdx = 0;
|
|
|
|
|
|
|
|
} // anonymous namespace
|
|
} // anonymous namespace
|
|
|
|
|
|
|
|
|
|
+// Avoid unused variable warning/error on Linux
|
|
|
|
|
+#ifndef NDEBUG
|
|
|
|
|
+#define USE_ASSERT(x) assert(x)
|
|
|
|
|
+#else
|
|
|
|
|
+#define USE_ASSERT(x) ((void)(x))
|
|
|
|
|
+#endif
|
|
|
|
|
+
|
|
|
namespace spvtools {
|
|
namespace spvtools {
|
|
|
namespace opt {
|
|
namespace opt {
|
|
|
|
|
|
|
@@ -48,14 +54,25 @@ uint32_t InstBindlessCheckPass::GenDebugReadLength(
|
|
|
uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
|
|
uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
|
|
|
uint32_t desc_idx_id,
|
|
uint32_t desc_idx_id,
|
|
|
InstructionBuilder* builder) {
|
|
InstructionBuilder* builder) {
|
|
|
- uint32_t desc_set_base_id =
|
|
|
|
|
- builder->GetUintConstantId(kDebugInputBindlessInitOffset);
|
|
|
|
|
- uint32_t desc_set_idx_id = builder->GetUintConstantId(var2desc_set_[var_id]);
|
|
|
|
|
uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
|
|
uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
|
|
|
uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
|
|
uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
|
|
|
- return GenDebugDirectRead(
|
|
|
|
|
- {desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
|
|
|
|
|
- builder);
|
|
|
|
|
|
|
+ // If desc index checking is not enabled, we know the offset of initialization
|
|
|
|
|
+ // entries is 1, so we can avoid loading this value and just add 1 to the
|
|
|
|
|
+ // descriptor set.
|
|
|
|
|
+ if (!desc_idx_enabled_) {
|
|
|
|
|
+ uint32_t desc_set_idx_id =
|
|
|
|
|
+ builder->GetUintConstantId(var2desc_set_[var_id] + 1);
|
|
|
|
|
+ return GenDebugDirectRead({desc_set_idx_id, binding_idx_id, u_desc_idx_id},
|
|
|
|
|
+ builder);
|
|
|
|
|
+ } else {
|
|
|
|
|
+ uint32_t desc_set_base_id =
|
|
|
|
|
+ builder->GetUintConstantId(kDebugInputBindlessInitOffset);
|
|
|
|
|
+ uint32_t desc_set_idx_id =
|
|
|
|
|
+ builder->GetUintConstantId(var2desc_set_[var_id]);
|
|
|
|
|
+ return GenDebugDirectRead(
|
|
|
|
|
+ {desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
|
|
|
|
|
+ builder);
|
|
|
|
|
+ }
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
uint32_t InstBindlessCheckPass::CloneOriginalReference(
|
|
uint32_t InstBindlessCheckPass::CloneOriginalReference(
|
|
@@ -156,13 +173,9 @@ uint32_t InstBindlessCheckPass::GetImageId(Instruction* inst) {
|
|
|
return 0;
|
|
return 0;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-Instruction* InstBindlessCheckPass::GetDescriptorTypeInst(
|
|
|
|
|
- Instruction* var_inst) {
|
|
|
|
|
- uint32_t var_type_id = var_inst->type_id();
|
|
|
|
|
- Instruction* var_type_inst = get_def_use_mgr()->GetDef(var_type_id);
|
|
|
|
|
- uint32_t desc_type_id =
|
|
|
|
|
- var_type_inst->GetSingleWordInOperand(kSpvTypePointerTypeIdInIdx);
|
|
|
|
|
- return get_def_use_mgr()->GetDef(desc_type_id);
|
|
|
|
|
|
|
+Instruction* InstBindlessCheckPass::GetPointeeTypeInst(Instruction* ptr_inst) {
|
|
|
|
|
+ uint32_t pte_ty_id = GetPointeeTypeId(ptr_inst);
|
|
|
|
|
+ return get_def_use_mgr()->GetDef(pte_ty_id);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
|
|
bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
|
|
@@ -187,7 +200,7 @@ bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
|
|
|
return false;
|
|
return false;
|
|
|
break;
|
|
break;
|
|
|
}
|
|
}
|
|
|
- Instruction* desc_type_inst = GetDescriptorTypeInst(var_inst);
|
|
|
|
|
|
|
+ Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
|
|
|
switch (desc_type_inst->opcode()) {
|
|
switch (desc_type_inst->opcode()) {
|
|
|
case SpvOpTypeArray:
|
|
case SpvOpTypeArray:
|
|
|
case SpvOpTypeRuntimeArray:
|
|
case SpvOpTypeRuntimeArray:
|
|
@@ -195,11 +208,11 @@ bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
|
|
|
// do not want to instrument loads of descriptors here which are part of
|
|
// do not want to instrument loads of descriptors here which are part of
|
|
|
// an image-based reference.
|
|
// an image-based reference.
|
|
|
if (ptr_inst->NumInOperands() < 3) return false;
|
|
if (ptr_inst->NumInOperands() < 3) return false;
|
|
|
- ref->index_id =
|
|
|
|
|
|
|
+ ref->desc_idx_id =
|
|
|
ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
|
|
ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
|
|
|
break;
|
|
break;
|
|
|
default:
|
|
default:
|
|
|
- ref->index_id = 0;
|
|
|
|
|
|
|
+ ref->desc_idx_id = 0;
|
|
|
break;
|
|
break;
|
|
|
}
|
|
}
|
|
|
return true;
|
|
return true;
|
|
@@ -229,14 +242,14 @@ bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
|
|
|
ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
|
|
ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
|
|
|
Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
|
|
Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
|
|
|
if (ptr_inst->opcode() == SpvOp::SpvOpVariable) {
|
|
if (ptr_inst->opcode() == SpvOp::SpvOpVariable) {
|
|
|
- ref->index_id = 0;
|
|
|
|
|
|
|
+ ref->desc_idx_id = 0;
|
|
|
ref->var_id = ref->ptr_id;
|
|
ref->var_id = ref->ptr_id;
|
|
|
} else if (ptr_inst->opcode() == SpvOp::SpvOpAccessChain) {
|
|
} else if (ptr_inst->opcode() == SpvOp::SpvOpAccessChain) {
|
|
|
if (ptr_inst->NumInOperands() != 2) {
|
|
if (ptr_inst->NumInOperands() != 2) {
|
|
|
assert(false && "unexpected bindless index number");
|
|
assert(false && "unexpected bindless index number");
|
|
|
return false;
|
|
return false;
|
|
|
}
|
|
}
|
|
|
- ref->index_id =
|
|
|
|
|
|
|
+ ref->desc_idx_id =
|
|
|
ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
|
|
ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
|
|
|
ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
|
|
ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
|
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
|
|
@@ -251,9 +264,150 @@ bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
|
|
|
return true;
|
|
return true;
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
+uint32_t InstBindlessCheckPass::FindStride(uint32_t ty_id,
|
|
|
|
|
+ uint32_t stride_deco) {
|
|
|
|
|
+ uint32_t stride = 0xdeadbeef;
|
|
|
|
|
+ bool found = !get_decoration_mgr()->WhileEachDecoration(
|
|
|
|
|
+ ty_id, stride_deco, [&stride](const Instruction& deco_inst) {
|
|
|
|
|
+ stride = deco_inst.GetSingleWordInOperand(2u);
|
|
|
|
|
+ return false;
|
|
|
|
|
+ });
|
|
|
|
|
+ USE_ASSERT(found && "stride not found");
|
|
|
|
|
+ return stride;
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+uint32_t InstBindlessCheckPass::ByteSize(uint32_t ty_id) {
|
|
|
|
|
+ analysis::TypeManager* type_mgr = context()->get_type_mgr();
|
|
|
|
|
+ const analysis::Type* sz_ty = type_mgr->GetType(ty_id);
|
|
|
|
|
+ if (sz_ty->kind() == analysis::Type::kPointer) {
|
|
|
|
|
+ // Assuming PhysicalStorageBuffer pointer
|
|
|
|
|
+ return 8;
|
|
|
|
|
+ }
|
|
|
|
|
+ uint32_t size = 1;
|
|
|
|
|
+ if (sz_ty->kind() == analysis::Type::kMatrix) {
|
|
|
|
|
+ const analysis::Matrix* m_ty = sz_ty->AsMatrix();
|
|
|
|
|
+ size = m_ty->element_count() * size;
|
|
|
|
|
+ uint32_t stride = FindStride(ty_id, SpvDecorationMatrixStride);
|
|
|
|
|
+ if (stride != 0) return size * stride;
|
|
|
|
|
+ sz_ty = m_ty->element_type();
|
|
|
|
|
+ }
|
|
|
|
|
+ if (sz_ty->kind() == analysis::Type::kVector) {
|
|
|
|
|
+ const analysis::Vector* v_ty = sz_ty->AsVector();
|
|
|
|
|
+ size = v_ty->element_count() * size;
|
|
|
|
|
+ sz_ty = v_ty->element_type();
|
|
|
|
|
+ }
|
|
|
|
|
+ switch (sz_ty->kind()) {
|
|
|
|
|
+ case analysis::Type::kFloat: {
|
|
|
|
|
+ const analysis::Float* f_ty = sz_ty->AsFloat();
|
|
|
|
|
+ size *= f_ty->width();
|
|
|
|
|
+ } break;
|
|
|
|
|
+ case analysis::Type::kInteger: {
|
|
|
|
|
+ const analysis::Integer* i_ty = sz_ty->AsInteger();
|
|
|
|
|
+ size *= i_ty->width();
|
|
|
|
|
+ } break;
|
|
|
|
|
+ default: { assert(false && "unexpected type"); } break;
|
|
|
|
|
+ }
|
|
|
|
|
+ size /= 8;
|
|
|
|
|
+ return size;
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+uint32_t InstBindlessCheckPass::GenLastByteIdx(ref_analysis* ref,
|
|
|
|
|
+ InstructionBuilder* builder) {
|
|
|
|
|
+ // Find outermost buffer type and its access chain index
|
|
|
|
|
+ Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
|
|
|
|
|
+ Instruction* desc_ty_inst = GetPointeeTypeInst(var_inst);
|
|
|
|
|
+ uint32_t buff_ty_id;
|
|
|
|
|
+ uint32_t ac_in_idx = 1;
|
|
|
|
|
+ switch (desc_ty_inst->opcode()) {
|
|
|
|
|
+ case SpvOpTypeArray:
|
|
|
|
|
+ case SpvOpTypeRuntimeArray:
|
|
|
|
|
+ buff_ty_id = desc_ty_inst->GetSingleWordInOperand(0);
|
|
|
|
|
+ ++ac_in_idx;
|
|
|
|
|
+ break;
|
|
|
|
|
+ default:
|
|
|
|
|
+ assert(desc_ty_inst->opcode() == SpvOpTypeStruct &&
|
|
|
|
|
+ "unexpected descriptor type");
|
|
|
|
|
+ buff_ty_id = desc_ty_inst->result_id();
|
|
|
|
|
+ break;
|
|
|
|
|
+ }
|
|
|
|
|
+ // Process remaining access chain indices
|
|
|
|
|
+ Instruction* ac_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
|
|
|
|
|
+ uint32_t curr_ty_id = buff_ty_id;
|
|
|
|
|
+ uint32_t sum_id = 0;
|
|
|
|
|
+ while (ac_in_idx < ac_inst->NumInOperands()) {
|
|
|
|
|
+ uint32_t curr_idx_id = ac_inst->GetSingleWordInOperand(ac_in_idx);
|
|
|
|
|
+ Instruction* curr_idx_inst = get_def_use_mgr()->GetDef(curr_idx_id);
|
|
|
|
|
+ Instruction* curr_ty_inst = get_def_use_mgr()->GetDef(curr_ty_id);
|
|
|
|
|
+ uint32_t curr_offset_id = 0;
|
|
|
|
|
+ switch (curr_ty_inst->opcode()) {
|
|
|
|
|
+ case SpvOpTypeArray:
|
|
|
|
|
+ case SpvOpTypeRuntimeArray:
|
|
|
|
|
+ case SpvOpTypeMatrix: {
|
|
|
|
|
+ // Get array/matrix stride and multiply by current index
|
|
|
|
|
+ uint32_t stride_deco = (curr_ty_inst->opcode() == SpvOpTypeMatrix)
|
|
|
|
|
+ ? SpvDecorationMatrixStride
|
|
|
|
|
+ : SpvDecorationArrayStride;
|
|
|
|
|
+ uint32_t arr_stride = FindStride(curr_ty_id, stride_deco);
|
|
|
|
|
+ uint32_t arr_stride_id = builder->GetUintConstantId(arr_stride);
|
|
|
|
|
+ Instruction* curr_offset_inst = builder->AddBinaryOp(
|
|
|
|
|
+ GetUintId(), SpvOpIMul, arr_stride_id, curr_idx_id);
|
|
|
|
|
+ curr_offset_id = curr_offset_inst->result_id();
|
|
|
|
|
+ // Get element type for next step
|
|
|
|
|
+ curr_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
|
|
|
|
|
+ } break;
|
|
|
|
|
+ case SpvOpTypeVector: {
|
|
|
|
|
+ // Stride is size of component type
|
|
|
|
|
+ uint32_t comp_ty_id = curr_ty_inst->GetSingleWordInOperand(0u);
|
|
|
|
|
+ uint32_t vec_stride = ByteSize(comp_ty_id);
|
|
|
|
|
+ uint32_t vec_stride_id = builder->GetUintConstantId(vec_stride);
|
|
|
|
|
+ Instruction* curr_offset_inst = builder->AddBinaryOp(
|
|
|
|
|
+ GetUintId(), SpvOpIMul, vec_stride_id, curr_idx_id);
|
|
|
|
|
+ curr_offset_id = curr_offset_inst->result_id();
|
|
|
|
|
+ // Get element type for next step
|
|
|
|
|
+ curr_ty_id = comp_ty_id;
|
|
|
|
|
+ } break;
|
|
|
|
|
+ case SpvOpTypeStruct: {
|
|
|
|
|
+ // Get buffer byte offset for the referenced member
|
|
|
|
|
+ assert(curr_idx_inst->opcode() == SpvOpConstant &&
|
|
|
|
|
+ "unexpected struct index");
|
|
|
|
|
+ uint32_t member_idx = curr_idx_inst->GetSingleWordInOperand(0);
|
|
|
|
|
+ uint32_t member_offset = 0xdeadbeef;
|
|
|
|
|
+ bool found = !get_decoration_mgr()->WhileEachDecoration(
|
|
|
|
|
+ curr_ty_id, SpvDecorationOffset,
|
|
|
|
|
+ [&member_idx, &member_offset](const Instruction& deco_inst) {
|
|
|
|
|
+ if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
|
|
|
|
|
+ return true;
|
|
|
|
|
+ member_offset = deco_inst.GetSingleWordInOperand(3u);
|
|
|
|
|
+ return false;
|
|
|
|
|
+ });
|
|
|
|
|
+ USE_ASSERT(found && "member offset not found");
|
|
|
|
|
+ curr_offset_id = builder->GetUintConstantId(member_offset);
|
|
|
|
|
+ // Get element type for next step
|
|
|
|
|
+ curr_ty_id = curr_ty_inst->GetSingleWordInOperand(member_idx);
|
|
|
|
|
+ } break;
|
|
|
|
|
+ default: { assert(false && "unexpected non-composite type"); } break;
|
|
|
|
|
+ }
|
|
|
|
|
+ if (sum_id == 0)
|
|
|
|
|
+ sum_id = curr_offset_id;
|
|
|
|
|
+ else {
|
|
|
|
|
+ Instruction* sum_inst =
|
|
|
|
|
+ builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, curr_offset_id);
|
|
|
|
|
+ sum_id = sum_inst->result_id();
|
|
|
|
|
+ }
|
|
|
|
|
+ ++ac_in_idx;
|
|
|
|
|
+ }
|
|
|
|
|
+ // Add in offset of last byte of referenced object
|
|
|
|
|
+ uint32_t bsize = ByteSize(curr_ty_id);
|
|
|
|
|
+ uint32_t last = bsize - 1;
|
|
|
|
|
+ uint32_t last_id = builder->GetUintConstantId(last);
|
|
|
|
|
+ Instruction* sum_inst =
|
|
|
|
|
+ builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, last_id);
|
|
|
|
|
+ return sum_inst->result_id();
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
void InstBindlessCheckPass::GenCheckCode(
|
|
void InstBindlessCheckPass::GenCheckCode(
|
|
|
- uint32_t check_id, uint32_t error_id, uint32_t length_id,
|
|
|
|
|
- uint32_t stage_idx, ref_analysis* ref,
|
|
|
|
|
|
|
+ uint32_t check_id, uint32_t error_id, uint32_t offset_id,
|
|
|
|
|
+ uint32_t length_id, uint32_t stage_idx, ref_analysis* ref,
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
|
BasicBlock* back_blk_ptr = &*new_blocks->back();
|
|
BasicBlock* back_blk_ptr = &*new_blocks->back();
|
|
|
InstructionBuilder builder(
|
|
InstructionBuilder builder(
|
|
@@ -279,9 +433,19 @@ void InstBindlessCheckPass::GenCheckCode(
|
|
|
// Gen invalid block
|
|
// Gen invalid block
|
|
|
new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
|
|
new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
|
|
|
builder.SetInsertPoint(&*new_blk_ptr);
|
|
builder.SetInsertPoint(&*new_blk_ptr);
|
|
|
- uint32_t u_index_id = GenUintCastCode(ref->index_id, &builder);
|
|
|
|
|
- GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
|
|
|
|
|
- {error_id, u_index_id, length_id}, &builder);
|
|
|
|
|
|
|
+ uint32_t u_index_id = GenUintCastCode(ref->desc_idx_id, &builder);
|
|
|
|
|
+ if (offset_id != 0)
|
|
|
|
|
+ GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
|
|
|
|
|
+ {error_id, u_index_id, offset_id, length_id}, &builder);
|
|
|
|
|
+ else if (buffer_bounds_enabled_)
|
|
|
|
|
+ // So all error modes will use same debug stream write function
|
|
|
|
|
+ GenDebugStreamWrite(
|
|
|
|
|
+ uid2offset_[ref->ref_inst->unique_id()], stage_idx,
|
|
|
|
|
+ {error_id, u_index_id, length_id, builder.GetUintConstantId(0)},
|
|
|
|
|
+ &builder);
|
|
|
|
|
+ else
|
|
|
|
|
+ GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
|
|
|
|
|
+ {error_id, u_index_id, length_id}, &builder);
|
|
|
// Remember last invalid block id
|
|
// Remember last invalid block id
|
|
|
uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
|
|
uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
|
|
|
// Gen zero for invalid reference
|
|
// Gen zero for invalid reference
|
|
@@ -305,7 +469,7 @@ void InstBindlessCheckPass::GenCheckCode(
|
|
|
context()->KillInst(ref->ref_inst);
|
|
context()->KillInst(ref->ref_inst);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-void InstBindlessCheckPass::GenBoundsCheckCode(
|
|
|
|
|
|
|
+void InstBindlessCheckPass::GenDescIdxCheckCode(
|
|
|
BasicBlock::iterator ref_inst_itr,
|
|
BasicBlock::iterator ref_inst_itr,
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
@@ -318,19 +482,19 @@ void InstBindlessCheckPass::GenBoundsCheckCode(
|
|
|
// If index and bound both compile-time constants and index < bound,
|
|
// If index and bound both compile-time constants and index < bound,
|
|
|
// return without changing
|
|
// return without changing
|
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
|
|
Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
|
|
|
- Instruction* desc_type_inst = GetDescriptorTypeInst(var_inst);
|
|
|
|
|
|
|
+ Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
|
|
|
uint32_t length_id = 0;
|
|
uint32_t length_id = 0;
|
|
|
if (desc_type_inst->opcode() == SpvOpTypeArray) {
|
|
if (desc_type_inst->opcode() == SpvOpTypeArray) {
|
|
|
length_id =
|
|
length_id =
|
|
|
desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
|
|
desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
|
|
|
- Instruction* index_inst = get_def_use_mgr()->GetDef(ref.index_id);
|
|
|
|
|
|
|
+ Instruction* index_inst = get_def_use_mgr()->GetDef(ref.desc_idx_id);
|
|
|
Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
|
|
Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
|
|
|
if (index_inst->opcode() == SpvOpConstant &&
|
|
if (index_inst->opcode() == SpvOpConstant &&
|
|
|
length_inst->opcode() == SpvOpConstant &&
|
|
length_inst->opcode() == SpvOpConstant &&
|
|
|
index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
|
|
index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
|
|
|
length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
|
|
length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
|
|
|
return;
|
|
return;
|
|
|
- } else if (!input_length_enabled_ ||
|
|
|
|
|
|
|
+ } else if (!desc_idx_enabled_ ||
|
|
|
desc_type_inst->opcode() != SpvOpTypeRuntimeArray) {
|
|
desc_type_inst->opcode() != SpvOpTypeRuntimeArray) {
|
|
|
return;
|
|
return;
|
|
|
}
|
|
}
|
|
@@ -352,9 +516,9 @@ void InstBindlessCheckPass::GenBoundsCheckCode(
|
|
|
// Generate full runtime bounds test code with true branch
|
|
// Generate full runtime bounds test code with true branch
|
|
|
// being full reference and false branch being debug output and zero
|
|
// being full reference and false branch being debug output and zero
|
|
|
// for the referenced value.
|
|
// for the referenced value.
|
|
|
- Instruction* ult_inst =
|
|
|
|
|
- builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, ref.index_id, length_id);
|
|
|
|
|
- GenCheckCode(ult_inst->result_id(), error_id, length_id, stage_idx, &ref,
|
|
|
|
|
|
|
+ Instruction* ult_inst = builder.AddBinaryOp(GetBoolId(), SpvOpULessThan,
|
|
|
|
|
+ ref.desc_idx_id, length_id);
|
|
|
|
|
+ GenCheckCode(ult_inst->result_id(), error_id, 0u, length_id, stage_idx, &ref,
|
|
|
new_blocks);
|
|
new_blocks);
|
|
|
// Move original block's remaining code into remainder/merge block and add
|
|
// Move original block's remaining code into remainder/merge block and add
|
|
|
// to new blocks
|
|
// to new blocks
|
|
@@ -362,13 +526,30 @@ void InstBindlessCheckPass::GenBoundsCheckCode(
|
|
|
MovePostludeCode(ref_block_itr, back_blk_ptr);
|
|
MovePostludeCode(ref_block_itr, back_blk_ptr);
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
-void InstBindlessCheckPass::GenInitCheckCode(
|
|
|
|
|
|
|
+void InstBindlessCheckPass::GenDescInitCheckCode(
|
|
|
BasicBlock::iterator ref_inst_itr,
|
|
BasicBlock::iterator ref_inst_itr,
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
|
// Look for reference through descriptor. If not, return.
|
|
// Look for reference through descriptor. If not, return.
|
|
|
ref_analysis ref;
|
|
ref_analysis ref;
|
|
|
if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
|
|
if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
|
|
|
|
|
+ // Determine if we can only do initialization check
|
|
|
|
|
+ bool init_check = false;
|
|
|
|
|
+ if (ref.desc_load_id != 0 || !buffer_bounds_enabled_) {
|
|
|
|
|
+ init_check = true;
|
|
|
|
|
+ } else {
|
|
|
|
|
+ // For now, only do bounds check for non-aggregate types. Otherwise
|
|
|
|
|
+ // just do descriptor initialization check.
|
|
|
|
|
+ // TODO(greg-lunarg): Do bounds check for aggregate loads and stores
|
|
|
|
|
+ Instruction* ref_ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
|
|
|
|
|
+ Instruction* pte_type_inst = GetPointeeTypeInst(ref_ptr_inst);
|
|
|
|
|
+ uint32_t pte_type_op = pte_type_inst->opcode();
|
|
|
|
|
+ if (pte_type_op == SpvOpTypeArray || pte_type_op == SpvOpTypeRuntimeArray ||
|
|
|
|
|
+ pte_type_op == SpvOpTypeStruct)
|
|
|
|
|
+ init_check = true;
|
|
|
|
|
+ }
|
|
|
|
|
+ // If initialization check and not enabled, return
|
|
|
|
|
+ if (init_check && !desc_init_enabled_) return;
|
|
|
// Move original block's preceding instructions into first new block
|
|
// Move original block's preceding instructions into first new block
|
|
|
std::unique_ptr<BasicBlock> new_blk_ptr;
|
|
std::unique_ptr<BasicBlock> new_blk_ptr;
|
|
|
MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
|
|
MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
|
|
@@ -376,19 +557,25 @@ void InstBindlessCheckPass::GenInitCheckCode(
|
|
|
context(), &*new_blk_ptr,
|
|
context(), &*new_blk_ptr,
|
|
|
IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
|
|
IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
|
|
|
new_blocks->push_back(std::move(new_blk_ptr));
|
|
new_blocks->push_back(std::move(new_blk_ptr));
|
|
|
- // Read initialization status from debug input buffer. If index id not yet
|
|
|
|
|
|
|
+ // If initialization check, use reference value of zero.
|
|
|
|
|
+ // Else use the index of the last byte referenced.
|
|
|
|
|
+ uint32_t ref_id = init_check ? builder.GetUintConstantId(0u)
|
|
|
|
|
+ : GenLastByteIdx(&ref, &builder);
|
|
|
|
|
+ // Read initialization/bounds from debug input buffer. If index id not yet
|
|
|
// set, binding is single descriptor, so set index to constant 0.
|
|
// set, binding is single descriptor, so set index to constant 0.
|
|
|
- uint32_t zero_id = builder.GetUintConstantId(0u);
|
|
|
|
|
- if (ref.index_id == 0) ref.index_id = zero_id;
|
|
|
|
|
- uint32_t init_id = GenDebugReadInit(ref.var_id, ref.index_id, &builder);
|
|
|
|
|
- // Generate full runtime non-zero init test code with true branch
|
|
|
|
|
|
|
+ if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
|
|
|
|
|
+ uint32_t init_id = GenDebugReadInit(ref.var_id, ref.desc_idx_id, &builder);
|
|
|
|
|
+ // Generate runtime initialization/bounds test code with true branch
|
|
|
// being full reference and false branch being debug output and zero
|
|
// being full reference and false branch being debug output and zero
|
|
|
// for the referenced value.
|
|
// for the referenced value.
|
|
|
- Instruction* uneq_inst =
|
|
|
|
|
- builder.AddBinaryOp(GetBoolId(), SpvOpINotEqual, init_id, zero_id);
|
|
|
|
|
- uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessUninit);
|
|
|
|
|
- GenCheckCode(uneq_inst->result_id(), error_id, zero_id, stage_idx, &ref,
|
|
|
|
|
- new_blocks);
|
|
|
|
|
|
|
+ Instruction* ult_inst =
|
|
|
|
|
+ builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, ref_id, init_id);
|
|
|
|
|
+ uint32_t error =
|
|
|
|
|
+ init_check ? kInstErrorBindlessUninit : kInstErrorBindlessBuffOOB;
|
|
|
|
|
+ uint32_t error_id = builder.GetUintConstantId(error);
|
|
|
|
|
+ GenCheckCode(ult_inst->result_id(), error_id, init_check ? 0 : ref_id,
|
|
|
|
|
+ init_check ? builder.GetUintConstantId(0u) : init_id, stage_idx,
|
|
|
|
|
+ &ref, new_blocks);
|
|
|
// Move original block's remaining code into remainder/merge block and add
|
|
// Move original block's remaining code into remainder/merge block and add
|
|
|
// to new blocks
|
|
// to new blocks
|
|
|
BasicBlock* back_blk_ptr = &*new_blocks->back();
|
|
BasicBlock* back_blk_ptr = &*new_blocks->back();
|
|
@@ -400,7 +587,7 @@ void InstBindlessCheckPass::InitializeInstBindlessCheck() {
|
|
|
InitializeInstrument();
|
|
InitializeInstrument();
|
|
|
// If runtime array length support enabled, create variable mappings. Length
|
|
// If runtime array length support enabled, create variable mappings. Length
|
|
|
// support is always enabled if descriptor init check is enabled.
|
|
// support is always enabled if descriptor init check is enabled.
|
|
|
- if (input_length_enabled_)
|
|
|
|
|
|
|
+ if (desc_idx_enabled_ || buffer_bounds_enabled_)
|
|
|
for (auto& anno : get_module()->annotations())
|
|
for (auto& anno : get_module()->annotations())
|
|
|
if (anno.opcode() == SpvOpDecorate) {
|
|
if (anno.opcode() == SpvOpDecorate) {
|
|
|
if (anno.GetSingleWordInOperand(1u) == SpvDecorationDescriptorSet)
|
|
if (anno.GetSingleWordInOperand(1u) == SpvDecorationDescriptorSet)
|
|
@@ -418,19 +605,19 @@ Pass::Status InstBindlessCheckPass::ProcessImpl() {
|
|
|
[this](BasicBlock::iterator ref_inst_itr,
|
|
[this](BasicBlock::iterator ref_inst_itr,
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
|
- return GenBoundsCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
|
|
|
|
|
- new_blocks);
|
|
|
|
|
|
|
+ return GenDescIdxCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
|
|
|
|
|
+ new_blocks);
|
|
|
};
|
|
};
|
|
|
bool modified = InstProcessEntryPointCallTree(pfn);
|
|
bool modified = InstProcessEntryPointCallTree(pfn);
|
|
|
- if (input_init_enabled_) {
|
|
|
|
|
|
|
+ if (desc_init_enabled_ || buffer_bounds_enabled_) {
|
|
|
// Perform descriptor initialization check on each entry point function in
|
|
// Perform descriptor initialization check on each entry point function in
|
|
|
// module
|
|
// module
|
|
|
pfn = [this](BasicBlock::iterator ref_inst_itr,
|
|
pfn = [this](BasicBlock::iterator ref_inst_itr,
|
|
|
UptrVectorIterator<BasicBlock> ref_block_itr,
|
|
UptrVectorIterator<BasicBlock> ref_block_itr,
|
|
|
uint32_t stage_idx,
|
|
uint32_t stage_idx,
|
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
|
|
|
- return GenInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
|
|
|
|
|
- new_blocks);
|
|
|
|
|
|
|
+ return GenDescInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
|
|
|
|
|
+ new_blocks);
|
|
|
};
|
|
};
|
|
|
modified |= InstProcessEntryPointCallTree(pfn);
|
|
modified |= InstProcessEntryPointCallTree(pfn);
|
|
|
}
|
|
}
|