File: transform_rfactor.h

package info (click to toggle)
pytorch 1.13.1%2Bdfsg-4
  • links: PTS, VCS
  • area: main
  • in suites: bookworm
  • size: 139,252 kB
  • sloc: cpp: 1,100,274; python: 706,454; ansic: 83,052; asm: 7,618; java: 3,273; sh: 2,841; javascript: 612; makefile: 323; xml: 269; ruby: 185; yacc: 144; objc: 68; lex: 44
file content (32 lines) | stat: -rw-r--r-- 940 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
#pragma once

#include <c10/macros/Export.h>

#include <torch/csrc/jit/codegen/cuda/ir_all_nodes.h>
#include <torch/csrc/jit/codegen/cuda/transform_iter.h>

#include <algorithm>
#include <vector>

namespace torch {
namespace jit {
namespace fuser {
namespace cuda {

// TODO: Only replay dispatch is really borrowed from TransformIter, we should
// reevaluate the reuse of dispatch for classes that inherit TransformIter.
class TORCH_CUDA_CU_API TransformRFactor {
 public:
  // Transform the provided tensor domain to two domains, a producer and
  // consumer domain. These domains are created by taking axes and reducing them
  // in the producer domain, and taking the remaining reduction axes and
  // reducing them in the consumer domain.
  static std::pair<TensorDomain*, TensorDomain*> runReplay(
      TensorDomain*,
      std::vector<int> axes);
};

} // namespace cuda
} // namespace fuser
} // namespace jit
} // namespace torch