SMAUG
Simulating Machine Learning Applications on gem5-Aladdin
tensor.proto
1 syntax = "proto3";
2 
3 package smaug;
4 
5 import "smaug/core/types.proto";
6 
7 message TensorShapeProto {
8  repeated int32 dims = 1;
9  DataLayout layout = 2;
10  int32 alignment = 3;
11 }
12 
13 message TensorProto {
14  string name = 1;
15  DataType data_type = 2;
16  TensorShapeProto shape = 3;
17  DataStorageFormat data_format = 4;
18  // When we create a graph in Python, this field is not set and instead, all
19  // tensor data is stored in a TensorDataArray so that we can dump the topology
20  // and parameters in two separate proto buffers. It gets set only from
21  // Tensor::asTensorProto, where an intermediate tensor is required to be
22  // materialized for a one-off use case.
23  TensorData data = 5;
24 }
25 
26 message TensorData {
27  // The data stored in the tensor. Only the field with data_type will be set.
28 
29  string name = 1;
30 
31  // Float16. This will be used for quantization. Note that since protobuf has
32  // no int16 type, we will pack two half-precision floats into one element
33  // here.
34  repeated int32 half_data = 2 [packed = true];
35 
36  // Float32.
37  repeated float float_data = 3 [packed = true];
38 
39  // Float64.
40  repeated double double_data = 4 [packed = true];
41 
42  // Int32
43  repeated int32 int_data = 5 [packed = true];
44 
45  // Int64
46  repeated int64 int64_data = 6 [packed = true];
47 
48  // Bool
49  repeated bool bool_data = 7 [packed = true];
50 }
51 
52 // The tensor data is stored separately from the TensorProto. Each TensorData
53 // message is linked to the corresponding tensors in network topology file by
54 // the tensor's name. Therefore, we can easily look at the network topology
55 // from a small txt file. It also enables us to compress the parameters.
56 message TensorDataArray {
57  repeated TensorData data_array = 1;
58 }