Skip to content

Commit 13628b8

Browse files
committed
Retry fixing
1 parent 6d11bf9 commit 13628b8

File tree

4 files changed

+18
-16
lines changed

4 files changed

+18
-16
lines changed

CMakeLists.txt

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,6 @@ announce_configured_options(CCACHE_PROGRAM)
9999

100100
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
101101

102-
103102
if(NOT EXECUTORCH_ENABLE_LOGGING)
104103
# Avoid pulling in the logging strings, which can be large. Note that this
105104
# will set the compiler flag for all targets in this directory, and for all
@@ -882,10 +881,14 @@ if(EXECUTORCH_BUILD_PYBIND)
882881
# This goes from executorch/extension/pybindings up to site-packages, then to
883882
# torch/lib
884883
if(APPLE)
885-
set_target_properties(
886-
portable_lib PROPERTIES BUILD_RPATH "@loader_path/../../../torch/lib"
887-
INSTALL_RPATH "@loader_path/../../../torch/lib"
888-
)
884+
get_target_property(existing_rpath portable_lib INSTALL_RPATH)
885+
string(FIND "${existing_rpath}" "@loader_path" pos)
886+
if(pos EQUAL -1)
887+
set_target_properties(
888+
portable_lib PROPERTIES BUILD_RPATH "@loader_path/../../../torch/lib"
889+
INSTALL_RPATH "@loader_path/../../../torch/lib"
890+
)
891+
endif()
889892
else()
890893
set_target_properties(
891894
portable_lib PROPERTIES BUILD_RPATH "$ORIGIN/../../../torch/lib"

backends/cuda/runtime/cuda_backend.cpp

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -106,13 +106,13 @@ class ET_EXPERIMENTAL CudaBackend final
106106
method_name.empty() ? "so_blob" : method_name + "_so_blob";
107107

108108
const NamedDataMap* named_data_map = context.get_named_data_map();
109-
auto aoti_cuda_buffer = named_data_map->get_data(so_blob_key.c_str());
109+
auto aoti_dso_buffer = named_data_map->get_data(so_blob_key.c_str());
110110
ET_CHECK_OR_RETURN_ERROR(
111-
aoti_cuda_buffer.ok(),
111+
aoti_dso_buffer.ok(),
112112
Internal,
113113
"Failed to get data for key %s: 0x%x",
114114
so_blob_key.c_str(),
115-
static_cast<uint32_t>(aoti_cuda_buffer.error()));
115+
static_cast<uint32_t>(aoti_dso_buffer.error()));
116116

117117
// Generate dynamic temporary file path
118118
filesystem::path temp_dir = filesystem::temp_directory_path();
@@ -126,12 +126,12 @@ class ET_EXPERIMENTAL CudaBackend final
126126
ET_LOG(
127127
Info,
128128
"Writing %zu bytes to %s",
129-
aoti_cuda_buffer->size(),
129+
aoti_dso_buffer->size(),
130130
so_path.c_str());
131131

132132
outfile.write(
133-
static_cast<const char*>(aoti_cuda_buffer->data()),
134-
aoti_cuda_buffer->size());
133+
static_cast<const char*>(aoti_dso_buffer->data()),
134+
aoti_dso_buffer->size());
135135

136136
ET_CHECK_OR_RETURN_ERROR(
137137
outfile, AccessFailed, "Failed to write to file %s", so_path.c_str());
@@ -140,7 +140,7 @@ class ET_EXPERIMENTAL CudaBackend final
140140
outfile.close();
141141

142142
// Free the buffer immediately after writing to disk
143-
aoti_cuda_buffer->Free();
143+
aoti_dso_buffer->Free();
144144
// Load the ELF using dlopen
145145
void* so_handle = dlopen(so_path.c_str(), RTLD_LAZY | RTLD_LOCAL);
146146
ET_CHECK_OR_RETURN_ERROR(

examples/models/moshi/mimi/install_requirements.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
set -x
99

1010
conda install -c conda-forge "ffmpeg<8" -y
11-
pip install torchcodec==0.7.0.dev20250929 --extra-index-url https://download.pytorch.org/whl/nightly/cpu
12-
pip install moshi==0.2.4
11+
pip install torchcodec==0.7.0.dev20251012 --extra-index-url https://download.pytorch.org/whl/nightly/cpu
12+
pip install moshi==0.2.4 --no-deps
1313
pip install bitsandbytes soundfile
1414
# Run llama2/install requirements for torchao deps
1515
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )

examples/models/moshi/mimi/test_mimi.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -189,8 +189,7 @@ def forward(self, x):
189189
x = self.mimi_model.upsample(x)
190190
(emb,) = self.mimi_model.decoder_transformer(x)
191191
emb.transpose(1, 2)
192-
with self.mimi_model._context_for_encoder_decoder:
193-
out = self.mimi_model.decoder(emb)
192+
out = self.mimi_model.decoder(emb)
194193
return out
195194

196195
emb_input = torch.rand(1, 1, 512, device="cpu")

0 commit comments

Comments
 (0)