diff --git a/webnn/conformance_tests/byob_readtensor.https.any.js b/webnn/conformance_tests/byob_readtensor.https.any.js index 2f51753f66ae64..9a12bf02439919 100644 --- a/webnn/conformance_tests/byob_readtensor.https.any.js +++ b/webnn/conformance_tests/byob_readtensor.https.any.js @@ -32,7 +32,7 @@ promise_setup(async () => { mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [2, 4], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }); } catch (e) { throw new AssertionError( @@ -141,7 +141,7 @@ promise_test(async (t) => { const tensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [2, 2], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); const arrayBufferView = new Int32Array(2 * 2); const arrayBuffer = arrayBufferView.buffer; @@ -159,7 +159,7 @@ promise_test(async (t) => { const tensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [2, 2], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); const arrayBufferView = new Int32Array(2 * 2); const arrayBuffer = arrayBufferView.buffer; diff --git a/webnn/conformance_tests/parallel-dispatch.https.any.js b/webnn/conformance_tests/parallel-dispatch.https.any.js index 1264fa82eabd0a..5594d845d9beb7 100644 --- a/webnn/conformance_tests/parallel-dispatch.https.any.js +++ b/webnn/conformance_tests/parallel-dispatch.https.any.js @@ -33,7 +33,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const [mlGraph, inputTensor1, inputTensor2, outputTensor] = @@ -73,7 +73,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const mlGraph = await buildMulGraph(mlContext, operandDescriptor, 3); @@ -101,7 +101,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const mlGraph = await buildMulGraph(mlContext, operandDescriptor, 10); @@ -140,7 +140,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const mlGraph = await buildMulGraph(mlContext, operandDescriptor, 9); @@ -178,7 +178,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const mlGraph = await buildMulGraph(mlContext, operandDescriptor, 2); @@ -211,7 +211,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; // write/write... @@ -250,7 +250,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; // write/write... @@ -288,7 +288,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const graphs = await Promise.all([3, 2].map(async (multiplier) => { @@ -324,7 +324,7 @@ promise_test(async () => { const operandDescriptor = { dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const graphs = await Promise.all([2, 3].map(async (multiplier) => { diff --git a/webnn/conformance_tests/tensor.https.any.js b/webnn/conformance_tests/tensor.https.any.js index 46850674661980..68153b8afd4224 100644 --- a/webnn/conformance_tests/tensor.https.any.js +++ b/webnn/conformance_tests/tensor.https.any.js @@ -167,7 +167,7 @@ const testWriteTensor = (testName) => { const tensorDescriptor = { dataType: 'int32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO, + usage: MLTensorUsage.WRITE, }; let mlTensor = await mlContext.createTensor(tensorDescriptor); @@ -216,7 +216,7 @@ const testWriteTensor = (testName) => { const tensorDescriptor = { dataType: 'int32', dimensions: [2, 2], - usage: MLTensorUsage.WRITE_TO, + usage: MLTensorUsage.WRITE, }; let mlTensor = await mlContext.createTensor(tensorDescriptor); @@ -233,7 +233,7 @@ const testWriteTensor = (testName) => { const tensorDescriptor = { dataType: 'int32', dimensions: [2, 3], - usage: MLTensorUsage.WRITE_TO, + usage: MLTensorUsage.WRITE, }; let mlTensor = await mlContext.createTensor(tensorDescriptor); @@ -252,7 +252,7 @@ const testWriteTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }); // Initialize the tensor. @@ -275,7 +275,7 @@ const testWriteTensor = (testName) => { const tensorDescriptor = { dataType: 'int32', dimensions: [2, 2], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; let mlTensor = await mlContext.createTensor(tensorDescriptor); @@ -326,7 +326,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [2, 2], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); // Reading a destroyed MLTensor should reject. @@ -340,7 +340,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [2, 3], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); let promise = mlContext.readTensor(mlTensor); @@ -356,7 +356,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [1024], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); await assert_tensor_data_equals(mlContext, mlTensor, new Uint32Array(1024)); @@ -366,7 +366,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [1], - usage: MLTensorUsage.READ_FROM | MLTensorUsage.WRITE_TO, + usage: MLTensorUsage.READ | MLTensorUsage.WRITE, }); // Initialize the tensor. @@ -382,7 +382,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }); // Initialize the tensor. @@ -400,7 +400,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }); // Initialize the tensor. @@ -418,7 +418,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }); // Initialize the tensor. @@ -436,7 +436,7 @@ const testReadTensor = (testName) => { let mlTensor = await mlContext.createTensor({ dataType: 'int32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }); const inputData = [0xAA, 0xAA, 0xAA, 0xAA]; @@ -453,7 +453,7 @@ const testReadTensor = (testName) => { const tensorDescriptor = { dataType: 'int32', dimensions: [2, 3], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }; let mlTensor = await mlContext.createTensor(tensorDescriptor); @@ -489,7 +489,7 @@ const testDispatchTensor = (testName) => { const tensorDescriptor = { dataType: 'float32', dimensions: shape, - usage: MLTensorUsage.WRITE_TO | MLTensorUsage.READ_FROM, + usage: MLTensorUsage.WRITE | MLTensorUsage.READ, }; const lhsOperand = builder.input('lhs', tensorDescriptor); const rhsOperand = builder.input('rhs', tensorDescriptor); diff --git a/webnn/validation_tests/destroyContext.https.any.js b/webnn/validation_tests/destroyContext.https.any.js index 5cba921c30681d..4ec31033a6d312 100644 --- a/webnn/validation_tests/destroyContext.https.any.js +++ b/webnn/validation_tests/destroyContext.https.any.js @@ -135,7 +135,7 @@ promise_test(async t => { const tensor = await context.createTensor({ dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); context.destroy(); promise_rejects_dom(t, 'InvalidStateError', context.readTensor(tensor)); @@ -146,7 +146,7 @@ promise_test(async t => { const tensor = await context.createTensor({ dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); let promise = context.readTensor(tensor); context.destroy(); @@ -161,7 +161,7 @@ promise_test(async t => { const tensor = await context.createTensor({ dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO, + usage: MLTensorUsage.WRITE, }); let arrayBuffer = new ArrayBuffer(4); context.destroy(); diff --git a/webnn/validation_tests/destroyGraph.https.any.js b/webnn/validation_tests/destroyGraph.https.any.js index 3724d6f0d34a67..9fd3a9e1ff7edf 100644 --- a/webnn/validation_tests/destroyGraph.https.any.js +++ b/webnn/validation_tests/destroyGraph.https.any.js @@ -110,17 +110,17 @@ promise_test(async t => { const lhsTensor = await context.createTensor({ dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO, + usage: MLTensorUsage.WRITE, }); const rhsTensor = await context.createTensor({ dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.WRITE_TO, + usage: MLTensorUsage.WRITE, }); const outputTensor = await context.createTensor({ dataType: 'float32', dimensions: [1], - usage: MLTensorUsage.READ_FROM, + usage: MLTensorUsage.READ, }); // Initialize inputs const inputData = new Float32Array(1).fill(2.0);