diff options
Diffstat (limited to 'test/CodeGen/PowerPC/ctrloop-i64.ll')
-rw-r--r-- | test/CodeGen/PowerPC/ctrloop-i64.ll | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/test/CodeGen/PowerPC/ctrloop-i64.ll b/test/CodeGen/PowerPC/ctrloop-i64.ll index 9e01392..5c66a68 100644 --- a/test/CodeGen/PowerPC/ctrloop-i64.ll +++ b/test/CodeGen/PowerPC/ctrloop-i64.ll @@ -10,8 +10,8 @@ entry: for.body: ; preds = %for.body, %entry %i.06 = phi i32 [ 0, %entry ], [ %inc, %for.body ] %x.05 = phi i64 [ 0, %entry ], [ %conv1, %for.body ] - %arrayidx = getelementptr inbounds i64* %n, i32 %i.06 - %0 = load i64* %arrayidx, align 8 + %arrayidx = getelementptr inbounds i64, i64* %n, i32 %i.06 + %0 = load i64, i64* %arrayidx, align 8 %conv = udiv i64 %x.05, %d %conv1 = add i64 %conv, %0 %inc = add nsw i32 %i.06, 1 @@ -32,8 +32,8 @@ entry: for.body: ; preds = %for.body, %entry %i.06 = phi i32 [ 0, %entry ], [ %inc, %for.body ] %x.05 = phi i64 [ 0, %entry ], [ %conv1, %for.body ] - %arrayidx = getelementptr inbounds i64* %n, i32 %i.06 - %0 = load i64* %arrayidx, align 8 + %arrayidx = getelementptr inbounds i64, i64* %n, i32 %i.06 + %0 = load i64, i64* %arrayidx, align 8 %conv = sdiv i64 %x.05, %d %conv1 = add i64 %conv, %0 %inc = add nsw i32 %i.06, 1 @@ -54,8 +54,8 @@ entry: for.body: ; preds = %for.body, %entry %i.06 = phi i32 [ 0, %entry ], [ %inc, %for.body ] %x.05 = phi i64 [ 0, %entry ], [ %conv1, %for.body ] - %arrayidx = getelementptr inbounds i64* %n, i32 %i.06 - %0 = load i64* %arrayidx, align 8 + %arrayidx = getelementptr inbounds i64, i64* %n, i32 %i.06 + %0 = load i64, i64* %arrayidx, align 8 %conv = urem i64 %x.05, %d %conv1 = add i64 %conv, %0 %inc = add nsw i32 %i.06, 1 @@ -76,8 +76,8 @@ entry: for.body: ; preds = %for.body, %entry %i.06 = phi i32 [ 0, %entry ], [ %inc, %for.body ] %x.05 = phi i64 [ 0, %entry ], [ %conv1, %for.body ] - %arrayidx = getelementptr inbounds i64* %n, i32 %i.06 - %0 = load i64* %arrayidx, align 8 + %arrayidx = getelementptr inbounds i64, i64* %n, i32 %i.06 + %0 = load i64, i64* %arrayidx, align 8 %conv = srem i64 %x.05, %d %conv1 = add i64 %conv, %0 %inc = add nsw i32 %i.06, 1 |