Manjunath Kudlur commited on
Commit
bb3b9a0
·
1 Parent(s): cea27f5

Added caching

Browse files
Files changed (3) hide show
  1. decoder_worker.js +42 -1
  2. encoder_worker.js +42 -1
  3. streaming_asr.js +11 -6
decoder_worker.js CHANGED
@@ -7,8 +7,33 @@ importScripts('https://cdn.jsdelivr.net/npm/[email protected]/dist/ort.min.
7
  // Configure ONNX Runtime to find WASM files from CDN
8
  ort.env.wasm.wasmPaths = 'https://cdn.jsdelivr.net/npm/[email protected]/dist/';
9
 
10
- // Helper to fetch model with progress reporting
 
 
11
  async function fetchModelWithProgress(url, modelName) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  const response = await fetch(url);
13
  if (!response.ok) {
14
  throw new Error(`Failed to fetch ${modelName}: ${response.status}`);
@@ -27,6 +52,13 @@ async function fetchModelWithProgress(url, modelName) {
27
  total: buffer.byteLength,
28
  done: true
29
  });
 
 
 
 
 
 
 
30
  return buffer;
31
  }
32
 
@@ -66,6 +98,15 @@ async function fetchModelWithProgress(url, modelName) {
66
  offset += chunk.length;
67
  }
68
 
 
 
 
 
 
 
 
 
 
69
  return result.buffer;
70
  }
71
 
 
7
  // Configure ONNX Runtime to find WASM files from CDN
8
  ort.env.wasm.wasmPaths = 'https://cdn.jsdelivr.net/npm/[email protected]/dist/';
9
 
10
+ const MODEL_CACHE_NAME = 'moonshine-models-v1';
11
+
12
+ // Helper to fetch model with progress reporting and caching
13
  async function fetchModelWithProgress(url, modelName) {
14
+ // Try to get from cache first
15
+ try {
16
+ const cache = await caches.open(MODEL_CACHE_NAME);
17
+ const cachedResponse = await cache.match(url);
18
+
19
+ if (cachedResponse) {
20
+ const buffer = await cachedResponse.arrayBuffer();
21
+ self.postMessage({
22
+ type: 'progress',
23
+ model: modelName,
24
+ loaded: buffer.byteLength,
25
+ total: buffer.byteLength,
26
+ done: true,
27
+ cached: true
28
+ });
29
+ console.log(`${modelName} loaded from cache`);
30
+ return buffer;
31
+ }
32
+ } catch (e) {
33
+ console.warn('Cache API not available:', e.message);
34
+ }
35
+
36
+ // Fetch from network
37
  const response = await fetch(url);
38
  if (!response.ok) {
39
  throw new Error(`Failed to fetch ${modelName}: ${response.status}`);
 
52
  total: buffer.byteLength,
53
  done: true
54
  });
55
+ // Cache the response
56
+ try {
57
+ const cache = await caches.open(MODEL_CACHE_NAME);
58
+ await cache.put(url, new Response(buffer.slice(0)));
59
+ } catch (e) {
60
+ console.warn('Failed to cache model:', e.message);
61
+ }
62
  return buffer;
63
  }
64
 
 
98
  offset += chunk.length;
99
  }
100
 
101
+ // Cache the result
102
+ try {
103
+ const cache = await caches.open(MODEL_CACHE_NAME);
104
+ await cache.put(url, new Response(result.slice(0)));
105
+ console.log(`${modelName} cached`);
106
+ } catch (e) {
107
+ console.warn('Failed to cache model:', e.message);
108
+ }
109
+
110
  return result.buffer;
111
  }
112
 
encoder_worker.js CHANGED
@@ -7,8 +7,33 @@ importScripts('https://cdn.jsdelivr.net/npm/[email protected]/dist/ort.min.
7
  // Configure ONNX Runtime to find WASM files from CDN
8
  ort.env.wasm.wasmPaths = 'https://cdn.jsdelivr.net/npm/[email protected]/dist/';
9
 
10
- // Helper to fetch model with progress reporting
 
 
11
  async function fetchModelWithProgress(url, modelName) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  const response = await fetch(url);
13
  if (!response.ok) {
14
  throw new Error(`Failed to fetch ${modelName}: ${response.status}`);
@@ -27,6 +52,13 @@ async function fetchModelWithProgress(url, modelName) {
27
  total: buffer.byteLength,
28
  done: true
29
  });
 
 
 
 
 
 
 
30
  return buffer;
31
  }
32
 
@@ -66,6 +98,15 @@ async function fetchModelWithProgress(url, modelName) {
66
  offset += chunk.length;
67
  }
68
 
 
 
 
 
 
 
 
 
 
69
  return result.buffer;
70
  }
71
 
 
7
  // Configure ONNX Runtime to find WASM files from CDN
8
  ort.env.wasm.wasmPaths = 'https://cdn.jsdelivr.net/npm/[email protected]/dist/';
9
 
10
+ const MODEL_CACHE_NAME = 'moonshine-models-v1';
11
+
12
+ // Helper to fetch model with progress reporting and caching
13
  async function fetchModelWithProgress(url, modelName) {
14
+ // Try to get from cache first
15
+ try {
16
+ const cache = await caches.open(MODEL_CACHE_NAME);
17
+ const cachedResponse = await cache.match(url);
18
+
19
+ if (cachedResponse) {
20
+ const buffer = await cachedResponse.arrayBuffer();
21
+ self.postMessage({
22
+ type: 'progress',
23
+ model: modelName,
24
+ loaded: buffer.byteLength,
25
+ total: buffer.byteLength,
26
+ done: true,
27
+ cached: true
28
+ });
29
+ console.log(`${modelName} loaded from cache`);
30
+ return buffer;
31
+ }
32
+ } catch (e) {
33
+ console.warn('Cache API not available:', e.message);
34
+ }
35
+
36
+ // Fetch from network
37
  const response = await fetch(url);
38
  if (!response.ok) {
39
  throw new Error(`Failed to fetch ${modelName}: ${response.status}`);
 
52
  total: buffer.byteLength,
53
  done: true
54
  });
55
+ // Cache the response
56
+ try {
57
+ const cache = await caches.open(MODEL_CACHE_NAME);
58
+ await cache.put(url, new Response(buffer.slice(0)));
59
+ } catch (e) {
60
+ console.warn('Failed to cache model:', e.message);
61
+ }
62
  return buffer;
63
  }
64
 
 
98
  offset += chunk.length;
99
  }
100
 
101
+ // Cache the result
102
+ try {
103
+ const cache = await caches.open(MODEL_CACHE_NAME);
104
+ await cache.put(url, new Response(result.slice(0)));
105
+ console.log(`${modelName} cached`);
106
+ } catch (e) {
107
+ console.warn('Failed to cache model:', e.message);
108
+ }
109
+
110
  return result.buffer;
111
  }
112
 
streaming_asr.js CHANGED
@@ -314,7 +314,7 @@ class PipelinedStreamingASR {
314
  // Progress update from worker
315
  break;
316
  case 'progress':
317
- onProgress?.(e.data.model, { loaded: e.data.loaded, total: e.data.total });
318
  break;
319
  case 'model_done':
320
  onModelDone?.(e.data.model);
@@ -368,7 +368,7 @@ class PipelinedStreamingASR {
368
  case 'status':
369
  break;
370
  case 'progress':
371
- onProgress?.(e.data.model, { loaded: e.data.loaded, total: e.data.total });
372
  break;
373
  case 'model_done':
374
  onModelDone?.(e.data.model);
@@ -888,10 +888,15 @@ class ASRDemoUI {
888
 
889
  // Update details with current model and byte progress
890
  if (currentModel && currentProgress.total > 0) {
891
- const loadedMB = (currentProgress.loaded / (1024 * 1024)).toFixed(1);
892
- const totalMB = (currentProgress.total / (1024 * 1024)).toFixed(1);
893
- const percent = Math.round((currentProgress.loaded / currentProgress.total) * 100);
894
- this.loadingDetails.textContent = `${currentModel}: ${loadedMB} / ${totalMB} MB (${percent}%)`;
 
 
 
 
 
895
  } else if (currentModel) {
896
  this.loadingDetails.textContent = `Loading ${currentModel}...`;
897
  }
 
314
  // Progress update from worker
315
  break;
316
  case 'progress':
317
+ onProgress?.(e.data.model, { loaded: e.data.loaded, total: e.data.total, cached: e.data.cached });
318
  break;
319
  case 'model_done':
320
  onModelDone?.(e.data.model);
 
368
  case 'status':
369
  break;
370
  case 'progress':
371
+ onProgress?.(e.data.model, { loaded: e.data.loaded, total: e.data.total, cached: e.data.cached });
372
  break;
373
  case 'model_done':
374
  onModelDone?.(e.data.model);
 
888
 
889
  // Update details with current model and byte progress
890
  if (currentModel && currentProgress.total > 0) {
891
+ if (currentProgress.cached) {
892
+ const sizeMB = (currentProgress.total / (1024 * 1024)).toFixed(1);
893
+ this.loadingDetails.textContent = `${currentModel}: ${sizeMB} MB (cached)`;
894
+ } else {
895
+ const loadedMB = (currentProgress.loaded / (1024 * 1024)).toFixed(1);
896
+ const totalMB = (currentProgress.total / (1024 * 1024)).toFixed(1);
897
+ const percent = Math.round((currentProgress.loaded / currentProgress.total) * 100);
898
+ this.loadingDetails.textContent = `${currentModel}: ${loadedMB} / ${totalMB} MB (${percent}%)`;
899
+ }
900
  } else if (currentModel) {
901
  this.loadingDetails.textContent = `Loading ${currentModel}...`;
902
  }