1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108
|
import operator_benchmark as op_bench
import torch
import torch.nn as nn
"""
Microbenchmarks for the softmax operators.
"""
# Configs for softmax ops
softmax_configs_short = op_bench.config_list(
attr_names=["N", "C", "H", "W"],
attrs=[
[1, 3, 256, 256],
[4, 3, 256, 256],
],
cross_product_configs={
"device": ["cpu", "cuda"],
},
tags=["short"],
)
softmax_configs_long = op_bench.cross_product_configs(
N=[8, 16],
C=[3],
H=[256, 512],
W=[256, 512],
device=["cpu", "cuda"],
tags=["long"],
)
softmax_ops_list = op_bench.op_list(
attr_names=["op_name", "op_func"],
attrs=[
["Softmax", nn.Softmax],
["Softmax2d", nn.Softmax2d],
["LogSoftmax", nn.LogSoftmax],
],
)
softmax_two_dims_ops_list = op_bench.op_list(
attr_names=["op_name", "op_func"],
attrs=[
["Softmax", nn.Softmax],
["LogSoftmax", nn.LogSoftmax],
],
)
softmax_two_dims_configs = op_bench.config_list(
attr_names=["M", "N", "dim"],
attrs=[
[700, 23258, 0],
[700, 23258, 1],
[1024, 23258, 1],
[128, 128, 1],
[48, 128, 1],
[16, 1024, 1],
[32, 1024, 1],
[48, 1024, 1],
[16, 512, 1],
[32, 512, 1],
[48, 512, 1],
[16, 256, 1],
[32, 256, 1],
[48, 256, 1],
],
cross_product_configs={
"device": ["cpu", "cuda"],
},
tags=["long"],
)
class SoftmaxBenchmark(op_bench.TorchBenchmarkBase):
def init(self, N, C, H, W, device, op_func):
self.inputs = {"input": torch.rand(N, C, H, W, device=device)}
self.op_func = op_func()
def forward(self, input):
return self.op_func(input)
class Softmax2DimsBenchmark(op_bench.TorchBenchmarkBase):
def init(self, M, N, dim, device, op_func):
self.inputs = {"input": torch.rand(M, N, device=device)}
self.op_func = op_func(dim=dim)
def forward(self, input):
return self.op_func(input)
op_bench.generate_pt_tests_from_op_list(
softmax_ops_list, softmax_configs_short + softmax_configs_long, SoftmaxBenchmark
)
op_bench.generate_pt_tests_from_op_list(
softmax_two_dims_ops_list, softmax_two_dims_configs, Softmax2DimsBenchmark
)
if __name__ == "__main__":
op_bench.benchmark_runner.main()
|