File: transform_rfactor.h

package info (click to toggle)
pytorch 1.7.1-7
  • links: PTS, VCS
  • area: main
  • in suites: bullseye
  • size: 80,340 kB
  • sloc: cpp: 670,830; python: 343,991; ansic: 67,845; asm: 5,503; sh: 2,924; java: 2,888; xml: 266; makefile: 244; ruby: 148; yacc: 144; objc: 51; lex: 44
file content (28 lines) | stat: -rw-r--r-- 780 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
#pragma once

#include <torch/csrc/WindowsTorchApiMacro.h>

#include <torch/csrc/jit/codegen/cuda/ir_all_nodes.h>
#include <torch/csrc/jit/codegen/cuda/transform_iter.h>

#include <algorithm>
#include <vector>

namespace torch {
namespace jit {
namespace fuser {

// TODO: Only replay dispatch is really borrowed from TransformIter, we should
// reevaluate the reuse of dispatch for classes that inherit TransformIter.
class TORCH_CUDA_API TransformRFactor {
 public:
  // Create a copy of td, change its history by presrving axes so they appear in
  // the root domain
  static TensorDomain* runReplay(TensorDomain*, std::vector<int> axes);

  static TensorDomain* runReplay2(TensorDomain*, std::vector<int> axes);
};

} // namespace fuser
} // namespace jit
} // namespace torch