diff --git a/packages/react-native-executorch/common/rnexecutorch/host_objects/ModelHostObject.h b/packages/react-native-executorch/common/rnexecutorch/host_objects/ModelHostObject.h index c175be829..90241eac0 100644 --- a/packages/react-native-executorch/common/rnexecutorch/host_objects/ModelHostObject.h +++ b/packages/react-native-executorch/common/rnexecutorch/host_objects/ModelHostObject.h @@ -436,6 +436,7 @@ template class ModelHostObject : public JsiHostObject { JSI_HOST_FUNCTION(unload) { try { model->unload(); + thisValue.asObject(runtime).setExternalMemoryPressure(runtime, 0); } catch (const RnExecutorchError &e) { jsi::Object errorData(runtime); errorData.setProperty(runtime, "code", e.getNumericCode()); diff --git a/packages/react-native-executorch/common/rnexecutorch/models/llm/LLM.cpp b/packages/react-native-executorch/common/rnexecutorch/models/llm/LLM.cpp index 03afd4ed0..95ebed1d5 100644 --- a/packages/react-native-executorch/common/rnexecutorch/models/llm/LLM.cpp +++ b/packages/react-native-executorch/common/rnexecutorch/models/llm/LLM.cpp @@ -255,6 +255,9 @@ int32_t LLM::getMaxContextLength() const { return runner_->get_max_context_length(); } -void LLM::unload() noexcept { runner_.reset(nullptr); } +void LLM::unload() noexcept { + runner_.reset(nullptr); + BaseModel::unload(); +} } // namespace rnexecutorch::models::llm diff --git a/packages/react-native-executorch/src/controllers/LLMController.ts b/packages/react-native-executorch/src/controllers/LLMController.ts index dc67727e3..7ce921f6f 100644 --- a/packages/react-native-executorch/src/controllers/LLMController.ts +++ b/packages/react-native-executorch/src/controllers/LLMController.ts @@ -121,6 +121,10 @@ export class LLMController { await ResourceFetcher.fs.readAsString(tokenizerConfigPath!) ); + if (this.nativeModule) { + this.nativeModule.unload(); + } + this.nativeModule = await global.loadLLM( modelPath, tokenizerPath,