File: aboutInference.ftl

package info (click to toggle)
firefox 142.0.1-1
  • links: PTS, VCS
  • area: main
  • in suites: sid
  • size: 4,591,884 kB
  • sloc: cpp: 7,451,570; javascript: 6,392,463; ansic: 3,712,584; python: 1,388,569; xml: 629,223; asm: 426,919; java: 184,857; sh: 63,439; makefile: 19,150; objc: 13,059; perl: 12,983; yacc: 4,583; cs: 3,846; pascal: 3,352; lex: 1,720; ruby: 1,003; exp: 762; php: 436; lisp: 258; awk: 247; sql: 66; sed: 53; csh: 10
file content (36 lines) | stat: -rw-r--r-- 1,833 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.

about-inference-title = Inference Manager
about-inference-header = ✨ Inference Manager
about-inference-description = This page gathers all information about local inference
about-inference-warning = browser.ml.enable is set to False !
about-inference-simd-supported = SIMD supported
about-inference-onnx-runtime-file = ONNX Runtime file
about-inference-downloads-description = You can delete models here to free up disk space. They will be downloaded again on first inference.
about-inference-delete-button = 🗑️
  .title = Delete model
about-inference-processes-title = Running Inference Processes
about-inference-models-title = Models downloaded
about-inference-yes = Yes
about-inference-no = No
about-inference-total = Total
about-inference-no-processes = No inference processes are running.
about-inference-memory = Memory
about-inference-pid = PID
about-inference-file = File
about-inference-size = Size
about-inference-learn-more = Learn more about the platform here.
about-inference-category-local-inference = Local Inference
about-inference-category-http-inference = HTTP Inference
about-inference-category-models-storage = Models Storage
about-inference-last-used = Last Used
about-inference-last-updated = Last Updated
about-inference-category-benchmark = Benchmark
about-inference-benchmark-title = Running Benchmark
about-inference-benchmark-description = Performs a full benchmark on the local inference platform. This will take a while.
about-inference-benchmark-button =
  .value = Run Benchmark
about-inference-prompt-title = Confirm
about-inference-prompt-message = Are you sure you want to delete this model?