Skip to content
This repository has been archived by the owner on Jan 20, 2024. It is now read-only.

Commit

Permalink
Clear RIManger locally if compiling for non-GPU target. This ensures …
Browse files Browse the repository at this point in the history
…that

multiple reduction regions in the same function will not inadvertantly share
reduction info. This needs to be fixed for GPU if multiple loops with
reductions are specified within a single target region.
  • Loading branch information
jsjodin committed Jan 6, 2024
1 parent 76a37ba commit 9c4aa51
Showing 1 changed file with 6 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -888,7 +888,6 @@ convertOmpWsLoop(Operation &opInst, llvm::IRBuilderBase &builder,
moduleTranslation.lookupValue(loop.getScheduleChunkVar());
chunk = builder.CreateSExtOrTrunc(chunkVar, ivType);
}

SmallVector<omp::ReductionDeclareOp> reductionDecls;
collectReductionDecls(loop, reductionDecls);
llvm::OpenMPIRBuilder::InsertPointTy allocaIP =
Expand Down Expand Up @@ -1053,6 +1052,9 @@ convertOmpWsLoop(Operation &opInst, llvm::IRBuilderBase &builder,
tempTerminator->eraseFromParent();
builder.restoreIP(nextInsertionPoint);

if (!ompBuilder->Config.isGPU())
ompBuilder->RIManager.clear();

return success();
}

Expand Down Expand Up @@ -1168,6 +1170,9 @@ convertOmpParallel(Operation &opInst1, llvm::IRBuilderBase &builder,
ompBuilder->createParallel(ompLoc, allocaIP, bodyGenCB, privCB, finiCB,
ifCond, numThreads, pbKind, isCancellable));

if (!ompBuilder->Config.isGPU())
ompBuilder->RIManager.clear();

return bodyGenStatus;
}

Expand Down Expand Up @@ -2315,7 +2320,6 @@ convertOmpDistribute(Operation &opInst, llvm::IRBuilderBase &builder,
// DistributeOp has only one region associated with it.
builder.restoreIP(codeGenIP);
ompBuilder->RIManager.setPrivateVarAllocaIP(allocaIP);

auto regionBlock =
convertOmpOpRegions(opInst.getRegion(0), "omp.distribute.region",
builder, moduleTranslation, bodyGenStatus);
Expand Down

0 comments on commit 9c4aa51

Please sign in to comment.