Cast failure in SelectionDAGBuilder

I’m trying to track down the problem with the assertion failure in SelectionDAGBuilder.cpp. This is the code:

  **case** **Intrinsic**::gcroot:
    **if** (GFI) {
      **const** Value *Alloca = I.getArgOperand(0);
      **const** Constant *TypeMap = cast<Constant>(I.getArgOperand(1));

<b>      FrameIndexSDNode *FI = cast<FrameIndexSDNode>(getValue(Alloca).getNode());
</b>      GFI->addStackRoot(FI->getIndex(), TypeMap);
    }
    **return** 0

The cast is what’s failing. Apparently the node isn’t a FrameIndexSDNode.

Now, we discussed a similar problem on this list earlier, and it was stated that the cause was due to calls to llvm.gcroot() not being in the first block. However, that’s not the case this time - here’s the function being processed (according to I.Parent.Parent->dump()):

define internal %tart.core.Object* @“tart.core.Object.coerceint32->tart.core.Object”(i32 %value) gc “tart-gc” {

prologue:

call void @llvm.dbg.declare(metadata !{i32 %value}, metadata !48487)

%gc_root = alloca %“tart.core.ValueRef[char]”*, !dbg !48488

store %“tart.core.ValueRef[char]”* null, %“tart.core.ValueRef[char]”** %gc_root

%0 = bitcast %“tart.core.ValueRef[char]”** %gc_root to i8**, !dbg !48488

call void @llvm.gcroot(i8** %0, i8* null), !dbg !48488

br label %entry, !dbg !48488

entry: ; preds = %prologue

%ValueRef_new = call %“tart.core.ValueRef[char]”* @“tart.core.ValueRef[int32].type.alloc”(), !dbg !48488

store %“tart.core.ValueRef[char]”* %ValueRef_new, %“tart.core.ValueRef[char]”** %gc_root, !dbg !48488

%construct = call %tart.core.Hashable @“tart.core.ValueRef[int32].construct(int32)”(%“tart.core.ValueRef[char]”* %ValueRef_new, i32 %value), !dbg !48488

store %“tart.core.ValueRef[char]”* null, %“tart.core.ValueRef[char]”** %gc_root, !dbg !48488

%upcast = getelementptr inbounds %“tart.core.ValueRef[char]”* %ValueRef_new, i32 0, i32 0, i32 0, !dbg !48488

ret %tart.core.Object* %upcast, !dbg !48488

}

As you can see, the only call to llvm.gcroot() is in fact in the first block.

Now, here’s the weird part: If I turn on optimization, the assertion failure no longer happens. What’s the difference? Here’s what the function looks like (I.Parent.Parent->dump()) with -O2:

define internal %7* @“tart.reflect.Type.iterate->tart.core.Iterator[tart.reflect.Type]”(%“tart.reflect.Type”* %self) nounwind gc “tart-gc” {
prologue:
%gc_root = alloca i8*, align 8
store i8* null, i8** %gc_root
call void @llvm.gcroot(i8** %gc_root, i8* null), !dbg !48487
%new.i = call i8* @malloc(i64 32) nounwind, !dbg !48488
%0 = bitcast i8* %new.i to %tart.core.TypeInfoBlock**
store %tart.core.TypeInfoBlock* bitcast (%25* @“tart.reflect.Type.ArrayIterator.type.tib” to %tart.core.TypeInfoBlock*), %tart.core.TypeInfoBlock** %0, align 8, !dbg !48488
tail call void @llvm.dbg.declare(metadata !{null}, metadata !48489)
%1 = getelementptr inbounds i8* %new.i, i64 16
%2 = bitcast i8* %1 to %“tart.reflect.Type”**
store %“tart.reflect.Type”* %self, %“tart.reflect.Type”** %2, align 8, !dbg !48490
%3 = getelementptr inbounds i8* %new.i, i64 24
%4 = bitcast i8* %3 to i64*
store i64 0, i64* %4, align 8, !dbg !48491
%intf_ptr = bitcast i8* %new.i to %7*
ret %7* %intf_ptr, !dbg !48487
}

OK, I’m looking at the crash in the debugger, and it looks like the NodeType (getValue(Alloca).getNode().getOpcode()) is value 41. According to what the debugger is telling me, that value corresponse to llvm::ISD::CopyFromReg.

When I debug the optimized version, the node value is 13 (llvm::ISD::FrameIndex) which seems right.

Does any of that make any sense?

Sorry about the formatting on the code samples…I was trying to get them not to line wrap. (What the world really needs is a pretty-printer for LLVM IR.)

So…anyone?