@@ -88,15 +88,33 @@ const runInference = async () => {
8888
8989** Use cases:** Download management, storage cleanup, progress tracking, offline-first apps.
9090
91+ ## Built-in adapters vs custom adapter
92+
93+ React Native ExecuTorch does not bundle a resource fetcher — you bring your own. Two ready-made adapters are provided:
94+
95+ - ** ` ExpoResourceFetcher ` ** from ` react-native-executorch-expo-resource-fetcher ` — for Expo projects
96+ - ** ` BareResourceFetcher ` ** from ` react-native-executorch-bare-resource-fetcher ` — for bare React Native projects
97+
98+ Register the adapter once at app startup before using any hooks or modules:
99+
100+ ``` typescript
101+ import { initExecutorch } from ' react-native-executorch' ;
102+ import { ExpoResourceFetcher } from ' react-native-executorch-expo-resource-fetcher' ;
103+
104+ initExecutorch ({ resourceFetcher: ExpoResourceFetcher });
105+ ```
106+
107+ If neither adapter fits your needs (custom download library, private server, custom caching), you can implement the ` ResourceFetcherAdapter ` interface yourself. See [ Custom Adapter] ( https://docs.swmansion.com/react-native-executorch/docs/resource-fetcher/custom-adapter ) for details.
108+
91109## Basic Usage
92110
93111``` typescript
94- import { ResourceFetcher } from ' react-native-executorch' ;
112+ import { ExpoResourceFetcher } from ' react-native-executorch-expo-resource-fetcher ' ;
95113
96114// Download multiple resources with progress tracking
97115const downloadModels = async () => {
98116 try {
99- const uris = await ResourceFetcher .fetch (
117+ const uris = await ExpoResourceFetcher .fetch (
100118 (progress ) =>
101119 console .log (` Download progress: ${(progress * 100 ).toFixed (1 )}% ` ),
102120 ' https://example.com/llama3_2.pte' ,
@@ -117,22 +135,22 @@ const downloadModels = async () => {
117135## Pause and Resume Downloads
118136
119137``` typescript
120- import { ResourceFetcher } from ' react-native-executorch' ;
138+ import { ExpoResourceFetcher } from ' react-native-executorch-expo-resource-fetcher ' ;
121139
122- const uris = ResourceFetcher .fetch (
140+ const uris = ExpoResourceFetcher .fetch (
123141 (progress ) => console .log (' Total progress:' , progress ),
124142 ' https://.../llama3_2.pte' ,
125143 ' https://.../qwen3.pte'
126144).then ((uris ) => {
127- console .log (' URI resolved as: ' , uris ); // since we pause the fetch, uris is resolved to null
145+ console .log (' URI resolved as: ' , uris ); // null, since we paused
128146});
129147
130- await ResourceFetcher .pauseFetching (
148+ await ExpoResourceFetcher .pauseFetching (
131149 ' https://.../llama3_2.pte' ,
132150 ' https://.../qwen3.pte'
133151);
134152
135- const resolvedUris = await ResourceFetcher .resumeFetching (
153+ const resolvedUris = await ExpoResourceFetcher .resumeFetching (
136154 ' https://.../llama3_2.pte' ,
137155 ' https://.../qwen3.pte'
138156);
@@ -141,17 +159,17 @@ const resolvedUris = await ResourceFetcher.resumeFetching(
141159## Cancel Downloads
142160
143161``` typescript
144- import { ResourceFetcher } from ' react-native-executorch' ;
162+ import { ExpoResourceFetcher } from ' react-native-executorch-expo-resource-fetcher ' ;
145163
146- const uris = ResourceFetcher .fetch (
164+ const uris = ExpoResourceFetcher .fetch (
147165 (progress ) => console .log (' Total progress:' , progress ),
148166 ' https://.../llama3_2.pte' ,
149167 ' https://.../qwen3.pte'
150168).then ((uris ) => {
151- console .log (' URI resolved as: ' , uris ); // since we cancel the fetch, uris is resolved to null
169+ console .log (' URI resolved as: ' , uris ); // null, since we cancelled
152170});
153171
154- await ResourceFetcher .cancelFetching (
172+ await ExpoResourceFetcher .cancelFetching (
155173 ' https://.../llama3_2.pte' ,
156174 ' https://.../qwen3.pte'
157175);
@@ -160,22 +178,22 @@ await ResourceFetcher.cancelFetching(
160178## Manage Downloaded Resources
161179
162180``` typescript
163- import { ResourceFetcher } from ' react-native-executorch' ;
181+ import { ExpoResourceFetcher } from ' react-native-executorch-expo-resource-fetcher ' ;
164182
165183// List all downloaded files
166184const listFiles = async () => {
167- const files = await ResourceFetcher .listDownloadedFiles ();
185+ const files = await ExpoResourceFetcher .listDownloadedFiles ();
168186 console .log (' All downloaded files:' , files );
169187
170- const models = await ResourceFetcher .listDownloadedModels ();
188+ const models = await ExpoResourceFetcher .listDownloadedModels ();
171189 console .log (' Model files:' , models );
172190};
173191
174192// Clean up old resources
175193const cleanup = async () => {
176- const oldModelUrl = ' https://example.com/old_model.pte ' ;
177-
178- await ResourceFetcher . deleteResources ( oldModelUrl );
194+ await ExpoResourceFetcher . deleteResources (
195+ ' https://example.com/old_model.pte '
196+ );
179197 console .log (' Old model deleted' );
180198};
181199```
@@ -195,11 +213,12 @@ Resources can be:
195213** Progress callback:** Progress is reported as 0-1 for all downloads combined.
196214** Null return:** If ` fetch() ` returns ` null ` , download was paused or cancelled.
197215** Network errors:** Implement retry logic with exponential backoff for reliability.
198- ** Storage location :** Downloaded files are stored in application's document directory under ` react-native-executorch/ `
216+ ** Pause/resume on Android :** ` BareResourceFetcher ` does not support pause/resume on Android. Use ` ExpoResourceFetcher ` if you need this on Android.
199217
200218## Additional references
201219
202- - [ ResourceFetcher full reference docs] ( https://docs.swmansion.com/react-native-executorch/docs/utilities/resource-fetcher )
220+ - [ ResourceFetcher usage docs] ( https://docs.swmansion.com/react-native-executorch/docs/resource-fetcher/usage )
221+ - [ Custom Adapter docs] ( https://docs.swmansion.com/react-native-executorch/docs/resource-fetcher/custom-adapter )
203222- [ Loading Models guide] ( https://docs.swmansion.com/react-native-executorch/docs/fundamentals/loading-models )
204223
205224---
@@ -220,13 +239,13 @@ import {
220239 RnExecutorchErrorCode ,
221240} from ' react-native-executorch' ;
222241
223- const llm = new LLMModule ({
224- tokenCallback : (token ) => console .log (token ),
225- messageHistoryCallback : (messages ) => console .log (messages ),
226- });
227-
228242try {
229- await llm .load (LLAMA3_2_1B_QLORA , (progress ) => console .log (progress ));
243+ const llm = await LLMModule .fromModelName (
244+ LLAMA3_2_1B_QLORA ,
245+ (progress ) => console .log (progress ),
246+ (token ) => console .log (token ),
247+ (messages ) => console .log (messages )
248+ );
230249 await llm .sendMessage (' Hello!' );
231250} catch (err ) {
232251 if (err instanceof RnExecutorchError ) {
@@ -242,21 +261,27 @@ try {
242261
243262``` typescript
244263import {
264+ LLMModule ,
265+ LLAMA3_2_1B_QLORA ,
245266 RnExecutorchError ,
246267 RnExecutorchErrorCode ,
247268} from ' react-native-executorch' ;
248269
249- const handleModelError = async (llm , message : string ) => {
270+ const llm = await LLMModule .fromModelName (
271+ LLAMA3_2_1B_QLORA ,
272+ (progress ) => console .log (progress ),
273+ (token ) => console .log (token ),
274+ (messages ) => console .log (messages )
275+ );
276+
277+ const handleModelError = async (message : string ) => {
250278 try {
251279 await llm .sendMessage (message );
252280 } catch (err ) {
253281 if (err instanceof RnExecutorchError ) {
254282 switch (err .code ) {
255283 case RnExecutorchErrorCode .ModuleNotLoaded :
256- console .error (' Model not loaded. Loading now...' );
257- await llm .load (LLAMA3_2_1B_QLORA );
258- // Retry the message
259- await llm .sendMessage (message );
284+ console .error (' Model not loaded.' );
260285 break ;
261286
262287 case RnExecutorchErrorCode .ModelGenerating :
@@ -267,7 +292,9 @@ const handleModelError = async (llm, message: string) => {
267292 case RnExecutorchErrorCode .InvalidConfig :
268293 console .error (' Invalid configuration:' , err .message );
269294 // Reset to default config
270- await llm .configure ({ topp: 0.9 , temperature: 0.7 });
295+ await llm .configure ({
296+ generationConfig: { topp: 0.9 , temperature: 0.7 },
297+ });
271298 break ;
272299
273300 default :
0 commit comments