-
Notifications
You must be signed in to change notification settings - Fork 58
Expand file tree
/
Copy pathRNTensorFlowInferenceModule.java
More file actions
executable file
·148 lines (128 loc) · 4.87 KB
/
RNTensorFlowInferenceModule.java
File metadata and controls
executable file
·148 lines (128 loc) · 4.87 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
package com.rntensorflow;
import com.facebook.react.bridge.*;
import org.tensorflow.DataType;
import org.tensorflow.Graph;
import org.tensorflow.Session;
import org.tensorflow.Tensor;
import org.tensorflow.contrib.android.RunStats;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.DoubleBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.rntensorflow.converter.ArrayConverter.*;
public class RNTensorFlowInferenceModule extends ReactContextBaseJavaModule {
private final ReactApplicationContext reactContext;
private Map<String, RNTensorflowInference> inferenceMap = new HashMap<>();
public RNTensorFlowInferenceModule(ReactApplicationContext reactContext) {
super(reactContext);
this.reactContext = reactContext;
}
@Override
public String getName() {
return "RNTensorFlowInference";
}
@Override
public void onCatalystInstanceDestroy() {
for (String id : inferenceMap.keySet()) {
RNTensorflowInference inference = this.inferenceMap.remove(id);
if(inference != null) {
inference.close();
}
}
}
@ReactMethod
public void initTensorFlowInference(String id, String model, Promise promise) {
try {
RNTensorflowInference inference = RNTensorflowInference.init(reactContext, model);
inferenceMap.put(id, inference);
RNTensorFlowGraphModule graphModule = reactContext.getNativeModule(RNTensorFlowGraphModule.class);
graphModule.init(id, inference.getTfContext().graph);
promise.resolve(true);
} catch (Exception e) {
promise.reject(e);
}
}
@ReactMethod
public void feed(String id, ReadableMap data, Promise promise) {
try {
RNTensorflowInference inference = inferenceMap.get(id);
String inputName = data.getString("name");
long[] shape = data.hasKey("shape") ? readableArrayToLongArray(data.getArray("shape")) : new long[0];
DataType dtype = data.hasKey("dtype")
? DataType.valueOf(data.getString("dtype").toUpperCase())
: DataType.DOUBLE;
if(dtype == DataType.DOUBLE) {
double[] srcData = readableArrayToDoubleArray(data.getArray("data"));
inference.feed(inputName, Tensor.create(shape, DoubleBuffer.wrap(srcData)));
} else if(dtype == DataType.FLOAT) {
float[] srcData = readableArrayToFloatArray(data.getArray("data"));
inference.feed(inputName, Tensor.create(shape, FloatBuffer.wrap(srcData)));
} else if(dtype == DataType.INT32) {
int[] srcData = readableArrayToIntArray(data.getArray("data"));
inference.feed(inputName, Tensor.create(shape, IntBuffer.wrap(srcData)));
} else if(dtype == DataType.INT64) {
double[] srcData = readableArrayToDoubleArray(data.getArray("data"));
inference.feed(inputName, Tensor.create(shape, DoubleBuffer.wrap(srcData)));
} else if(dtype == DataType.UINT8) {
int[] srcData = readableArrayToIntArray(data.getArray("data"));
inference.feed(inputName, Tensor.create(shape, IntBuffer.wrap(srcData)));
} else if(dtype == DataType.BOOL) {
byte[] srcData = readableArrayToByteBoolArray(data.getArray("data"));
inference.feed(inputName, Tensor.create(Boolean.class, shape, ByteBuffer.wrap(srcData)));
} else if(dtype == DataType.STRING) {
byte[] srcData = readableArrayToByteStringArray(data.getArray("data"));
inference.feed(inputName, Tensor.create(String.class, shape, ByteBuffer.wrap(srcData)));
} else {
promise.reject(new IllegalArgumentException("Data type is not supported"));
return;
}
promise.resolve(true);
} catch (Exception e) {
promise.reject(e);
}
}
@ReactMethod
public void run(String id, ReadableArray outputNames, boolean enableStats, Promise promise) {
try {
RNTensorflowInference inference = inferenceMap.get(id);
inference.run(readableArrayToStringArray(outputNames), enableStats);
promise.resolve(true);
} catch (Exception e) {
promise.reject(e);
}
}
@ReactMethod
public void fetch(String id, String outputName, Promise promise) {
try {
RNTensorflowInference inference = inferenceMap.get(id);
promise.resolve(inference.fetch(outputName));
} catch (Exception e) {
promise.reject(e);
}
}
@ReactMethod
public void reset(String id, Promise promise) {
try {
RNTensorflowInference inference = inferenceMap.get(id);
inference.getTfContext().reset();
promise.resolve(true);
} catch (Exception e) {
promise.reject(e);
}
}
@ReactMethod
public void close(String id, Promise promise) {
try {
RNTensorflowInference inference = inferenceMap.get(id);
inference.close();
promise.resolve(true);
} catch (Exception e) {
promise.reject(e);
}
}
}