1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122
|
/******************************************************************************
* Copyright (c) Intel Corporation - All rights reserved. *
* This file is part of the LIBXSMM library. *
* *
* For information on the license, see the LICENSE file. *
* Further information: https://github.com/hfp/libxsmm/ *
* SPDX-License-Identifier: BSD-3-Clause *
******************************************************************************/
/* Sasikanth Avancha, Dhiraj Kalamkar (Intel Corp.)
******************************************************************************/
#pragma once
#include <string>
#include <stdio.h>
#include "Node.hpp"
#include "Engine.hpp"
#include "Params.hpp"
#include "Tensor.hpp"
#include "proto/gxm.pb.h"
#include "ReLUImpl.hpp"
#include "ReLUXSMM.hpp"
using namespace std;
using namespace gxm;
class ReLUParams : public NNParams
{
public:
ReLUParams(void) {}
virtual ~ReLUParams(void) {}
void set_negative_slope(float s) { neg_slope_ = s; }
float get_negative_slope() { return neg_slope_; }
void set_data_type(int t) { data_type_ = t; }
int get_data_type() { return data_type_; }
void set_compute_engine(int ce) { compute_engine_ = ce; }
int get_compute_engine() { return compute_engine_; }
void set_algo_type(int at) { algotype_ = at; }
int get_algo_type() { return algotype_; }
protected:
float neg_slope_;
int compute_engine_, algotype_, data_type_;
};
static MLParams* parseReLUParams(NodeParameter* np)
{
ReLUParams* rp = new ReLUParams();
// Set name of node
string str = np->name();
assert(!str.empty());
rp->set_node_name(str);
//Set node type (ReLU)
str = np->type();
assert(!str.empty());
rp->set_node_type(str);
//Set tensor names
assert(np->bottom_size() == 1);
assert(!np->bottom(0).empty());
rp->set_bottom_names(np->bottom(0));
assert(np->top_size() == 1);
assert(!np->top(0).empty());
rp->set_top_names(np->top(0));
//Set Mode for the node
assert((np->mode() == TRAIN) || (np->mode() == TEST));
rp->set_mode(np->mode());
//Set backprop needed/not needed flag for this node
rp->set_bprop_flag(np->propagate_down());
ReLUParameter p = np->relu_param();
rp->set_negative_slope(p.negative_slope());
rp->set_data_type(p.data_type());
rp->set_compute_engine(p.engine());
rp->set_algo_type(p.algotype());
return rp;
}
class ReLUNode : public NNNode
{
public:
ReLUNode(ReLUParams* p, MLEngine* e);
virtual ~ReLUNode(void) {}
protected:
void forwardPropagate();
void backPropagate();
void configure(int engine);
void shape_setzero(Shape* s)
{
for(int i=0; i<MAX_DIMS; i++)
s->dims[i] = 0;
}
Tensor* tenTop_; // Output tensor pointer
Tensor* tenBot_; // Input tensor pointer
ReLUImplParams gparams_;
TensorBuf *tenBotDiff_, *tenBotData_; // Data & Gradients with respect to input
TensorBuf *tenTopData_, *tenTopDiff_; // Output data
int count_;
int bot_cengine_;
Shape ts_;
ReLUImpl *impl;
MLEngine* eptr_;
};
|