File: InferenceMode.cpp

package info (click to toggle)
pytorch 1.13.1%2Bdfsg-4
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 139,252 kB
  • sloc: cpp: 1,100,274; python: 706,454; ansic: 83,052; asm: 7,618; java: 3,273; sh: 2,841; javascript: 612; makefile: 323; xml: 269; ruby: 185; yacc: 144; objc: 68; lex: 44
file content (13 lines) | stat: -rw-r--r-- 450 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
#include <c10/core/InferenceMode.h>
#include <stdexcept>

namespace c10 {
// Invariant:
//   is_enabled() ==
//   !c10::impl::tls_is_dispatch_key_included(DispatchKey::ADInplaceOrView);
// InferenceMode::is_enabled() is in perf critical path (TensorImpl constructor)
// so it worths a separate TLS to skip the DispatchKeySet check.
bool InferenceMode::is_enabled() {
  return AutogradState::get_tls_state().get_inference_mode();
}
} // namespace c10