11 std::getline(file, line,
'\n');
12 if(line ==
"item {") {
13 while(std::getline(file, line,
'\n')) {
15 for (
size_t i = 0; i < line.size(); i++) {
16 if (line.at(i) ==
'"') {
26 if (start >= 0 && end >= 0) {
27 objs.emplace_back(std::move(line.substr(start, end - start)));
32 while(std::getline(file, line,
'\n')) {
39TfModel::TfModel(std::initializer_list<std::pair<const char*, Optimization> > models,
size_t th) {
41 for(
auto item = models.begin(); item != models.end(); item++) {
42 this->
map = tflite::FlatBufferModel::BuildFromFile(item->first);
45 this->
resolver.AddCustom(edgetpu::kCustomOp, edgetpu::RegisterCustomOp());
46 tflite::InterpreterBuilder builder(*this->
map, this->
resolver);
47 builder.SetNumThreads(th);
48 builder(&this->
model);
49 this->
edgetpu_context = edgetpu::EdgeTpuManager::GetSingleton()->OpenDevice();
52 tflite::InterpreterBuilder builder(*this->
map, this->
resolver);
53 builder.SetNumThreads(th);
54 builder(&this->
model);
63 this->
model->AllocateTensors();
64 if(this->
model->inputs().size() == 1) {
65 TfLiteTensor* input = this->
model->input_tensor(0);
66 TfLiteIntArray* dims = input->dims;
67 this->
input_size = cv::Size(dims->data[1], dims->data[2]);
68 if(dims->data[3] == 3 && input->type == kTfLiteUInt8) {
std::unique_ptr< tflite::FlatBufferModel > map
std::unique_ptr< tflite::Interpreter > model
tflite::ops::builtin::BuiltinOpResolver resolver
std::shared_ptr< edgetpu::EdgeTpuContext > edgetpu_context
void loadObjectLabels(const std::string &f, std::vector< std::string > &objs)