SMAUG
Simulating Machine Learning Applications on gem5-Aladdin
ref_activation_fun_op.h
1 #ifndef _OPERATORS_REF_ACTIVATION_FUN_OP_H_
2 #define _OPERATORS_REF_ACTIVATION_FUN_OP_H_
3 
8 #include "assert.h"
9 #include "stdio.h"
10 #include "math.h"
11 
12 #include "smaug/operators/common.h"
13 
14 #ifdef __cplusplus
15 extern "C" {
16 #endif
17 
18 // All the activation function implementations need to live in here for function
19 // inlining.
20 
22 static inline void relu(float* inputs, float* results, int input_size) {
23  relu_loop:
24  for (int i = 0; i < input_size; i++) {
25  float value = inputs[i];
26  if (value < 0.0) {
27  results[i] = 0.0;
28  } else {
29  results[i] = value;
30  }
31  }
32 }
33 
35 static inline void lrelu(float* inputs,
36  float* results,
37  int input_size,
38  float slope) {
39  lrelu_loop:
40  for (int i = 0; i < input_size; i++) {
41  float value = inputs[i];
42  if (value < 0.0) {
43  results[i] = slope * value;
44  } else {
45  results[i] = value;
46  }
47  }
48 }
49 
51 static inline void elu(float* inputs,
52  float* results,
53  int input_size,
54  float alpha) {
55  elu_loop:
56  for (int i = 0; i < input_size; i++) {
57  float value = inputs[i];
58  if (value < 0.0) {
59  results[i] = alpha * (exp(value) - 1);
60  } else {
61  results[i] = value;
62  }
63  }
64 }
65 
67 static inline void selu(float* inputs,
68  float* results,
69  int input_size,
70  float alpha,
71  float lambda) {
72  elu(inputs, results, input_size, alpha);
73  selu_loop:
74  for (int i = 0; i < input_size; i++) {
75  results[i] = lambda * results[i];
76  }
77 }
78 
80 static inline void sigmoid(float* inputs, float* results, int input_size) {
81  sigmoid_loop:
82  for (int i = 0; i < input_size; i++) {
83  results[i] = 1.0 / (1.0 + exp(-inputs[i]));
84  }
85 }
86 
88 static inline void tanh_act(float* inputs, float* results, int input_size) {
89  int i;
90  tanh_act_loop1:
91  for (i = 0; i < input_size; i++) {
92  results[i] = 2 * inputs[i];
93  }
94 
95  sigmoid(results, results, input_size);
96 
97  tanh_act_loop2:
98  for (i = 0; i < input_size; i++) {
99  results[i] = 2 * results[i] - 1;
100  }
101 }
102 
104 static inline void hard_tanh_act(
105  float* inputs, float* results, int input_size, float min, float max) {
106  hard_tanh_loop:
107  for (int i = 0; i < input_size; i++) {
108  float value = inputs[i];
109  results[i] = (value < min) ? min : (value > max) ? max : value;
110  }
111 }
112 
114 static inline void activation_fun(float* inputs,
115  float* results,
116  int inputs_size,
117  activation_type function,
118  activation_param_t params) {
119  if (function == RELU) {
120  relu(inputs, results, inputs_size);
121  } else if (function == LRELU) {
122  lrelu(inputs, results, inputs_size, params.slope);
123  } else if (function == ELU) {
124  elu(inputs, results, inputs_size, params.alpha);
125  } else if (function == SELU) {
126  selu(inputs, results, inputs_size, params.alpha, params.lambda);
127  } else if (function == TANH) {
128  tanh_act(inputs, results, inputs_size);
129  } else if (function == HARD_TANH) {
130  hard_tanh_act(inputs, results, inputs_size, params.min, params.max);
131  } else if (function == SIGMOID) {
132  sigmoid(inputs, results, inputs_size);
133  } else if (function == SOFTMAX) {
134  assert(false && "Softmax not added yet!");
135  }
136 }
137 
141 void ref_activation_fun_nc(float* inputs,
142  float* results,
143  int inputs_size,
144  activation_type function,
145  activation_param_t params);
146 
147 #ifdef __cplusplus
148 } // extern "C"
149 #endif
150 
154 #endif
activation_type
enum _activation_type activation_type
The activation function to apply to an operator's output in hardware.
ref_activation_fun_nc
void ref_activation_fun_nc(float *inputs, float *results, int inputs_size, activation_type function, activation_param_t params)
Top level entry point for all Reference activation functions.
Definition: ref_activation_fun_op.cpp:8
_activation_param_t
Parameters to the activation function hardware.
Definition: common.h:194
common.h
Utilities for writing and invoking Aladdin kernels from Operators.
ALWAYS_INLINE
#define ALWAYS_INLINE
We have to disable all function inlining at the global level for Aladdin + LLVM-Tracer to work,...
Definition: common.h:521