File: CMakeLists.txt

package info (click to toggle)
pytorch-cuda 2.6.0%2Bdfsg-7
  • links: PTS, VCS
  • area: contrib
  • in suites: forky, sid, trixie
  • size: 161,620 kB
  • sloc: python: 1,278,832; cpp: 900,322; ansic: 82,710; asm: 7,754; java: 3,363; sh: 2,811; javascript: 2,443; makefile: 597; ruby: 195; xml: 84; objc: 68
file content (79 lines) | stat: -rw-r--r-- 2,647 bytes parent folder | download | duplicates (3)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79

set(AOT_INDUCTOR_TEST_ROOT ${TORCH_ROOT}/test/cpp/aoti_inference)

# Build custom TorchScript op for AOTInductor
add_library(aoti_custom_class SHARED aoti_custom_class.cpp)
set_target_properties(aoti_custom_class PROPERTIES
    LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
if(USE_CUDA)
  target_compile_definitions(aoti_custom_class PRIVATE USE_CUDA)
elseif(USE_ROCM)
    target_compile_definitions(aoti_custom_class PRIVATE USE_ROCM)
endif()
# Link against LibTorch
target_link_libraries(aoti_custom_class torch)

# the custom command that generates the TorchScript module
add_custom_command(
    OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/script_data.pt
           ${CMAKE_CURRENT_BINARY_DIR}/script_model_cpu.pt
           ${CMAKE_CURRENT_BINARY_DIR}/script_model_cuda.pt
    COMMAND python ${AOT_INDUCTOR_TEST_ROOT}/compile_model.py
    DEPENDS compile_model.py
)
add_custom_target(aoti_script_model ALL
    DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/script_data.pt
    DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/script_model_cpu.pt
    DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/script_model_cuda.pt
)
add_dependencies(aoti_script_model aoti_custom_class)

# Build the cpp gtest binary containing the cpp-only tests.
set(INDUCTOR_TEST_SRCS
  ${AOT_INDUCTOR_TEST_ROOT}/test.cpp
)

add_executable(test_aoti_inference
  ${TORCH_ROOT}/test/cpp/common/main.cpp
  ${INDUCTOR_TEST_SRCS}
  data.pt
  script_data.pt
  script_model_cpu.pt
  script_model_cuda.pt
)
add_dependencies(test_aoti_inference aoti_custom_class aoti_script_model)

# TODO temporary until we can delete the old gtest polyfills.
target_compile_definitions(test_aoti_inference PRIVATE USE_GTEST)

# Define a custom command to generate the library
add_custom_command(
        OUTPUT data.pt
        COMMAND python ${AOT_INDUCTOR_TEST_ROOT}/test.py
        DEPENDS ${AOT_INDUCTOR_TEST_ROOT}/test.py
)

target_link_libraries(test_aoti_inference PRIVATE
  torch
  gtest
  -Wl,--no-as-needed aoti_custom_class
)

if(USE_CUDA)
  target_include_directories(test_aoti_inference PRIVATE ${ATen_CUDA_INCLUDE})
  target_compile_definitions(test_aoti_inference PRIVATE USE_CUDA)
elseif(USE_ROCM)
    target_include_directories(test_aoti_inference PRIVATE ${ATen_HIP_INCLUDE})
    target_compile_definitions(test_aoti_inference PRIVATE USE_ROCM)
endif()
target_compile_definitions(test_aoti_inference PRIVATE
    CMAKE_CURRENT_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR}
)

if(INSTALL_TEST)
  install(TARGETS test_aoti_inference DESTINATION bin)
  # Install PDB files for MSVC builds
  if(MSVC AND BUILD_SHARED_LIBS)
    install(FILES $<TARGET_PDB_FILE:test_aoti_inference> DESTINATION bin OPTIONAL)
  endif()
endif()