Skip to content

Commit

Permalink
[ROCm] Skip GemmFusionIsNoOpWhenGemmFusionAutotunerFallsBackToCublas …
Browse files Browse the repository at this point in the history
…test

in gpu_compiler_test because not using autotuner on ROCM yet.
  • Loading branch information
zoranjovanovic-ns committed Jun 12, 2024
1 parent 9bdf8cb commit 569bd86
Showing 1 changed file with 12 additions and 0 deletions.
12 changes: 12 additions & 0 deletions xla/service/gpu/gpu_compiler_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,10 @@ using ::testing::Not;
using ::testing::TempDir;

class GpuCompilerTest : public HloTestBase {
const auto& device_desc() {
return backend().default_stream_executor()->GetDeviceDescription();
}

public:
absl::Status Schedule(HloModule* module) {
auto compiler = backend().compiler();
Expand All @@ -73,6 +77,10 @@ class GpuCompilerTest : public HloTestBase {
return tensorflow::down_cast<GpuCompiler*>(compiler)
->RunPostSchedulingPipelines(module, 4 * 1024 * 1024, gpu_device_info);
}

const se::GpuComputeCapability& GpuComputeComp() {
return device_desc().gpu_compute_capability();
}
};

TEST_F(GpuCompilerTest, CompiledProgramsCount) {
Expand Down Expand Up @@ -335,6 +343,10 @@ ENTRY main {

TEST_F(GpuCompilerTest,
GemmFusionIsNoOpWhenGemmFusionAutotunerFallsBackToCublas) {
if (std::holds_alternative<se::RocmComputeCapability>(GpuComputeComp())) {
GTEST_SKIP() << "Not using autotuner on ROCM yet.";
}

const absl::string_view hlo_string = R"(
HloModule test
Expand Down

0 comments on commit 569bd86

Please sign in to comment.