File: ONNX.webidl

package info (click to toggle)
firefox 143.0.3-1
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 4,617,328 kB
  • sloc: cpp: 7,478,492; javascript: 6,417,157; ansic: 3,720,058; python: 1,396,372; xml: 627,523; asm: 438,677; java: 186,156; sh: 63,477; makefile: 19,171; objc: 13,059; perl: 12,983; yacc: 4,583; cs: 3,846; pascal: 3,405; lex: 1,720; ruby: 1,003; exp: 762; php: 436; lisp: 258; awk: 247; sql: 66; sed: 53; csh: 10
file content (80 lines) | stat: -rw-r--r-- 2,924 bytes parent folder | download | duplicates (5)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
/* -*- Mode: IDL; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
 * You can obtain one at http://mozilla.org/MPL/2.0/.
 */

[Func="InferenceSession::InInferenceProcess", Exposed=(DedicatedWorker,Window)]
interface Tensor {
  [Throws]
  constructor(UTF8String type,
              (ArrayBufferView or sequence<any>) data,
              sequence<long> dims);

  [Cached, Pure]
  attribute sequence<long> dims;
  readonly attribute UTF8String type;
  readonly attribute ArrayBufferView data;
  readonly attribute TensorDataLocation location;
  Promise<any> getData(optional boolean releaseData);
  undefined dispose();
};

// Tensor Data Location
enum TensorDataLocation {
  "none",
  "cpu",
  "cpu-pinned",
  "texture",
  "gpu-buffer",
  "ml-tensor"
};

// Input/Output types
typedef record<UTF8String, Tensor> InferenceSessionTensorMapType;
typedef record<UTF8String, Tensor?> InferenceSessionNullableTensorMapType;
typedef InferenceSessionTensorMapType InferenceSessionFeedsType;
typedef (sequence<UTF8String> or InferenceSessionNullableTensorMapType) InferenceSessionFetchesType;
typedef InferenceSessionTensorMapType InferenceSessionReturnType;

dictionary InferenceSessionRunOptions {
  unsigned short logSeverityLevel = 0; // 0 - 4
  unsigned long logVerbosityLevel = 0;
  boolean terminate = true;
  UTF8String tag = "";
};

dictionary InferenceSessionSessionOptions {
  sequence<any> executionProviders;
  unsigned long intraOpNumThreads = 0;
  unsigned long interOpNumThreads = 0;
  record<UTF8String, unsigned long> freeDimensionOverrides;
  UTF8String graphOptimizationLevel = "all";
  boolean enableCpuMemArena = true;
  boolean enableMemPattern = true;
  UTF8String executionMode = "sequential";
  UTF8String optimizedModelFilePath = "";
  boolean enableProfiling = false;
  UTF8String profileFilePrefix = "";
  UTF8String logId = "";
  unsigned short logSeverityLevel = 4; // 0 - 4
  unsigned long logVerbosityLevel = 0;
  (TensorDataLocation or record<UTF8String, TensorDataLocation>) preferredOutputLocation;
  boolean enableGraphCapture = false;
  record<UTF8String, any> extra;
};

[Func="InferenceSession::InInferenceProcess", Exposed=(DedicatedWorker,Window)]
interface InferenceSession {
  [NewObject]
  Promise<InferenceSessionReturnType> run(InferenceSessionFeedsType feeds, optional InferenceSessionRunOptions options = {});
  [NewObject] static Promise<InferenceSession> create((UTF8String or Uint8Array) uriOrBuffer, optional InferenceSessionSessionOptions options = {});
  [BinaryName=ReleaseSession]
  Promise<undefined> release();
  undefined startProfiling();
  undefined endProfiling();
  [Cached, Pure]
  readonly attribute sequence<UTF8String> inputNames;
  [Cached, Pure]
  readonly attribute sequence<UTF8String> outputNames;
};