From 6d36dbdf6207961b445d16e39b646aaf85d7c8a1 Mon Sep 17 00:00:00 2001 From: Taylor Hanayik Date: Mon, 15 Jul 2024 21:01:02 +0100 Subject: [PATCH] refactor and make things run --- .gitignore | 43 +++++++++++++++ README.md | 9 ++- index.html | 2 +- main.js | 138 +++++++++++++++++++++++++++------------------- package-lock.json | 3 +- package.json | 8 ++- 6 files changed, 140 insertions(+), 63 deletions(-) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6b2c72e --- /dev/null +++ b/.gitignore @@ -0,0 +1,43 @@ +.DS_Store +node_modules +/build +/dist +distForTests +tests-out +dist_intermediate +devdocs +/_md_docs +/docs/niivue*.js +/demos/niivue*.js +/demos/dist +/tests/niivue*.js +niivue.es.js +niivue.umd.js +/downloads +__diff_output__ +/coverage +# local env files +.env.local +.env.*.local + +# Log files +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* + +# Editor directories and files +.idea +.vscode +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? +.DS_Store +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ +/playwright/e2e/index.js +/tests/index.js \ No newline at end of file diff --git a/README.md b/README.md index 4f0f1da..b7eb5df 100644 --- a/README.md +++ b/README.md @@ -8,8 +8,15 @@ You can serve a hot-reloadable web page that allows you to interactively modify ```bash git git@github.com:neurolabusc/niivue-onnx.git -cd niivue-neglect +cd niivue-onnx npm install npm run dev ``` +#### to build and serve the built version + +```bash +npm run build +npx http-server dist/ +``` + diff --git a/index.html b/index.html index 2ac337d..2f6044d 100644 --- a/index.html +++ b/index.html @@ -5,7 +5,7 @@ - Niivue Neglect Predictions + Niivue ONNX diff --git a/main.js b/main.js index bd2955a..a434b00 100644 --- a/main.js +++ b/main.js @@ -1,5 +1,7 @@ import { Niivue } from '@niivue/niivue' -import * as ort from 'onnxruntime-web'; +// IMPORTANT: we need to import this specific file. +import * as ort from "./node_modules/onnxruntime-web/dist/ort.all.mjs" +console.log(ort); async function main() { aboutBtn.onclick = function () { let url = "https://github.com/axinging/mlmodel-convension-demo/blob/main/onnx/onnx-brainchop.html" @@ -17,79 +19,103 @@ async function main() { const nv1 = new Niivue(defaults) nv1.attachToCanvas(gl1) await nv1.loadVolumes([{ url: './t1_crop.nii.gz' }]) + // FIXME: Do we want to conform? + const conformed = await nv1.conform( + nv1.volumes[0], + false, + true, + true + ) + nv1.removeVolume(nv1.volumes[0]) + nv1.addVolume(conformed) let feedsInfo = []; function getFeedInfo(feed, type, data, dims) { - const warmupTimes = 0; - const runTimes = 1; - for (let i = 0; i < warmupTimes + runTimes; i++) { - let typedArray; - let typeBytes; - if (type === 'bool') { - data = [data]; - dims = [1]; - typeBytes = 1; - } else if (type === 'int8') { - typedArray = Int8Array; - } else if (type === 'float16') { - typedArray = Uint16Array; - } else if (type === 'int32') { - typedArray = Int32Array; - } else if (type === 'uint32') { - typedArray = Uint32Array; - } else if (type === 'float32') { - typedArray = Float32Array; - } else if (type === 'int64') { - typedArray = BigInt64Array; - } - if (typeBytes === undefined) { - typeBytes = typedArray.BYTES_PER_ELEMENT; - } + const warmupTimes = 0; + const runTimes = 1; + for (let i = 0; i < warmupTimes + runTimes; i++) { + let typedArray; + let typeBytes; + if (type === 'bool') { + data = [data]; + dims = [1]; + typeBytes = 1; + } else if (type === 'int8') { + typedArray = Int8Array; + } else if (type === 'float16') { + typedArray = Uint16Array; + } else if (type === 'int32') { + typedArray = Int32Array; + } else if (type === 'uint32') { + typedArray = Uint32Array; + } else if (type === 'float32') { + typedArray = Float32Array; + } else if (type === 'int64') { + typedArray = BigInt64Array; + } + if (typeBytes === undefined) { + typeBytes = typedArray.BYTES_PER_ELEMENT; + } - let size, _data; - if (Array.isArray(data) || ArrayBuffer.isView(data)) { - size = data.length; - _data = data; - } else { - size = dims.reduce((a, b) => a * b); - if (data === 'random') { - _data = typedArray.from({ length: size }, () => getRandom(type)); - } else { - _data = typedArray.from({ length: size }, () => data); - } - } + let size, _data; + if (Array.isArray(data) || ArrayBuffer.isView(data)) { + size = data.length; + _data = data; + } else { + size = dims.reduce((a, b) => a * b); + if (data === 'random') { + _data = typedArray.from({ length: size }, () => getRandom(type)); + } else { + _data = typedArray.from({ length: size }, () => data); + } + } - if (i > feedsInfo.length - 1) { - feedsInfo.push(new Map()); - } - feedsInfo[i].set(feed, [type, _data, dims, Math.ceil(size * typeBytes / 16) * 16]); + if (i > feedsInfo.length - 1) { + feedsInfo.push(new Map()); } - return feedsInfo; + feedsInfo[i].set(feed, [type, _data, dims, Math.ceil(size * typeBytes / 16) * 16]); + } + return feedsInfo; } const option = { - executionProviders: [ - { - //name: 'webgpu', - name: 'webgl', - }, - ], - graphOptimizationLevel: 'extended', - optimizedModelFilepath: 'opt.onnx' + executionProviders: [ + { + name: 'webgpu', + }, + ], + graphOptimizationLevel: 'extended', + optimizedModelFilepath: 'opt.onnx' }; const session = await ort.InferenceSession.create('./model_5_channels.onnx', option); const shape = [1, 1, 256, 256, 256]; - const temp = getFeedInfo("input.1", "float32", 0, shape); + // FIXME: Do we want to use a real image for inference? + const imgData = nv1.volumes[0].img; + const expectedLength = shape.reduce((a, b) => a * b); + // FIXME: Do we need want this? + if (imgData.length !== expectedLength) { + throw new Error(`imgData length (${imgData.length}) does not match expected tensor length (${expectedLength})`); + } + + const temp = getFeedInfo("input.1", "float32", imgData, shape); let dataA = temp[0].get('input.1')[1]; - // let dataTemp = await loadJSON("./onnx-branchchop-input64.jsonc"); - // dataA = dataTemp['data']; const tensorA = new ort.Tensor('float32', dataA, shape); - + const feeds = { "input.1": tensorA }; // feed inputs and run console.log("before run"); const results = await session.run(feeds); - console.log("after run"); + console.log(results); + console.log("after run") + // FIXME: is this really the output data? It doesn't make sense when rendered, + // but then again, maybe the input was wrong? + const outData = results[39].data + const newImg = nv1.cloneVolume(0); + newImg.img = outData + // Add the output to niivue + nv1.addVolume(newImg) + nv1.setColormap(newImg.id, "red") + nv1.setOpacity(1, 0.5) } main() diff --git a/package-lock.json b/package-lock.json index 459e227..ca0dd8b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "0.1.0", "dependencies": { "@niivue/niivue": "^0.43.3", - "onnxruntime-web": "1.19.0-dev.20240713-281ed8c12d" + "onnxruntime-web": "^1.19.0-dev.20240713-281ed8c12d" }, "devDependencies": { "vite": "^5.2.0" @@ -869,7 +869,6 @@ "version": "1.19.0-dev.20240713-281ed8c12d", "resolved": "https://registry.npmjs.org/onnxruntime-web/-/onnxruntime-web-1.19.0-dev.20240713-281ed8c12d.tgz", "integrity": "sha512-ezuhSjNisPo071SQh+9sBIvJG23dC0Bztfm7LcVLilmE2z49GlLCds0ZIyxROr0yXF9DR3eU+poBN195pEQ0+A==", - "license": "MIT", "dependencies": { "flatbuffers": "^1.12.0", "guid-typescript": "^1.0.9", diff --git a/package.json b/package.json index 134eb0a..5c42b9d 100644 --- a/package.json +++ b/package.json @@ -5,12 +5,14 @@ "type": "module", "scripts": { "dev": "vite", - "build": "vite build", + "build": "vite build && npm run copyMJS && npm run copyWASM", + "copyMJS": "cp ./node_modules/onnxruntime-web/dist/*.mjs ./dist/assets/", + "copyWASM": "cp ./node_modules/onnxruntime-web/dist/*.wasm ./dist/assets/", "preview": "vite preview" }, "dependencies": { - "@niivue/niivue":"^0.43.3", - "onnxruntime-web": "1.19.0-dev.20240713-281ed8c12d" + "@niivue/niivue": "^0.43.3", + "onnxruntime-web": "^1.19.0-dev.20240713-281ed8c12d" }, "devDependencies": { "vite": "^5.2.0"