3#include "cpp-tools/src/resources.h"
8VisionServer::VisionServer() {
9 std::cout <<
"Init VisionServer" << std::endl;
10 ntable()->PutNumber(
"Cameras Available", 0);
11 ntable()->PutNumber(
"Pipelines Available", 0);
12 ntable()->PutString(
"Status",
"Offline");
22 this->SetVideoMode(cam.GetVideoMode());
23 this->
input.SetSource(cam);
28 this->SetVideoMode(pipe.GetVideoMode());
29 this->input.SetSource(pipe);
34 this->SetVideoMode(src.GetVideoMode());
35 this->input.SetSource(src);
40 this->input.GrabFrame(io_frame);
47 _inst.
cameras.emplace_back(std::move(c));
49 _inst.
cameras.back().setNetworkAdjustable();
58 size_t num = cms.size();
62 std::make_move_iterator(cms.begin()),
63 std::make_move_iterator(cms.end())
66 for(
size_t i = _inst.
cameras.size() - num; i < _inst.
cameras.size(); i++) {
68 _inst.
cameras.at(i).setNetworkAdjustable();
80 for(
size_t i = 0; i < _inst.
cameras.size(); i++) {
82 _inst.
cameras.at(i).setNetworkAdjustable();
145 _inst.
streams.emplace_back(
"Stream " + std::to_string(_inst.
streams.size() + 1));
182 for(
size_t i = 0; i < n; i++) {
183 _inst.
streams.emplace_back(
"Stream " + std::to_string(_inst.
streams.size() + 1));
193 for(
auto itr = strms.begin(); itr != strms.end(); itr++) {
194 _inst.
streams.emplace_back(*itr);
204 for(
auto itr = strms.begin(); itr != strms.end(); itr++) {
205 _inst.
streams.emplace_back(itr->first, itr->second);
215 for(
size_t i = 0; i < strms.size(); i++) {
216 _inst.
streams.push_back(cs::MjpegServer(strms.at(i)));
225 MjpegServer(std::move(s)), table(
OutputStream::
ntable()->GetSubTable(this->GetName()))
228 this->
table->GetEntry(
"Port").SetDouble(this->GetPort());
232 if(this->local_idx != i) {
238 this->table->GetEntry(
"Source Index").SetDouble(this->local_idx = i);
242 int nt = this->table->GetEntry(
"Source Index").GetDouble(0);
243 if(nt != this->local_idx) {
244 this->local_idx = nt;
255 pipe->
table->PutBoolean(
"Enable Processing",
true);
256 pipe->
table->PutNumber(
"Source Index", 1);
257 pipe->
table->PutNumber(
"Statistics Verbosity", 0);
258 pipe->
table->PutNumber(
"Max FPS", fps_cap);
261 std::shared_ptr<nt::NetworkTable> stats{pipe->
table->GetSubTable(
"stats")};
265 double init_time = 0, proc_time = 0, out_time = 0, active_time = 1, full_time = 0;
266 HRC::time_point beg_frame, beg_proc, end_proc, end_frame, last;
267 cv::Mat frame = cv::Mat::zeros(cv::Size(1, 1), CV_8UC3);
271 beg_frame = HRC::now();
272 int n = pipe->
table->GetEntry(
"Source Index").GetDouble(0);
281 if(pipe->
table->GetEntry(
"Enable Processing").GetBoolean(
false) &&
282 pipe->
input.GrabFrame(frame)
284 beg_proc = HRC::now();
286 end_proc = HRC::now();
287 int verbosity = pipe->
table->GetEntry(
"Statistics Verbosity").GetDouble(0);
290 frame, std::to_string(fps),
291 cv::Point(5, 20), cv::FONT_HERSHEY_DUPLEX,
292 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
297 frame,
"Active: " + std::to_string(active_time * 1000) +
"ms",
298 cv::Point(5, 45), cv::FONT_HERSHEY_DUPLEX,
299 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
304 frame,
"Init: " + std::to_string(init_time * 1000) +
"ms",
305 cv::Point(5, 70), cv::FONT_HERSHEY_DUPLEX,
306 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
309 frame,
"Process: " + std::to_string(proc_time * 1000) +
"ms",
310 cv::Point(5, 95), cv::FONT_HERSHEY_DUPLEX,
311 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
314 frame,
"Output: " + std::to_string(out_time * 1000) +
"ms",
315 cv::Point(5, 120), cv::FONT_HERSHEY_DUPLEX,
316 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
319 pipe->PutFrame(frame);
320 end_frame = HRC::now();
322 beg_proc = end_proc = end_frame = HRC::now();
325 init_time = std::chrono::duration<double>(beg_proc - beg_frame).count();
326 proc_time = std::chrono::duration<double>(end_proc - beg_proc).count();
327 out_time = std::chrono::duration<double>(end_frame - end_proc).count();
328 full_time = std::chrono::duration<double>(beg_frame - last).count();
329 active_time = init_time + proc_time + out_time;
332 stats->PutNumber(
"FPS: ", fps);
333 stats->PutNumber(
"Active time(ms): ", active_time * 1000);
334 stats->PutNumber(
"Init time(ms): ", init_time * 1000);
335 stats->PutNumber(
"Process time(ms): ", proc_time * 1000);
336 stats->PutNumber(
"Output time(ms): ", out_time * 1000);
338 fps_cap = pipe->
table->GetNumber(
"Max FPS", fps_cap);
339 std::this_thread::sleep_for(
340 std::chrono::nanoseconds((uint64_t)(1E9 / fps_cap)) - (HRC::now() - beg_frame)
357 for(
size_t i = 0; i < _inst.
cameras.size(); i++) {
358 if(_inst.
cameras.at(i).IsConnected()) {
367 std::cout <<
"Compensation: connected [pipeline] "
368 << _inst.
pipelines[i]->getName() <<
" to [camera] "
369 << _inst.
cameras[active].GetName() << newline;
372 for(
size_t j = 0; j < _inst.
streams.size(); j++) {
373 _inst.
streams.at(j).setPipelineIdx(1);
374 std::cout <<
"Compensation: connected [stream] "
375 << _inst.
streams[j].GetName() <<
" to [pipeline] "
376 << _inst.
pipelines[0]->getName() << newline;
379 for(
size_t j = 0; j < _inst.
streams.size(); j++) {
380 _inst.
streams.at(j).setCameraIdx(active + 1);
381 std::cout <<
"Compensation: connected [stream] "
382 << _inst.
streams[i].GetName() <<
" to [camera] "
383 << _inst.
cameras[active].GetName() << newline;
389 std::cout <<
"Compensation failed. No cameras available." << std::endl;
402 HRC::time_point tbuff;
404 uint64_t max_nanos = 1E9 / fps_cap;
406 std::vector<std::thread> runners;
407 for(
size_t i = 0; i < _inst.
pipelines.size(); i++) {
413 for(
size_t i = 0; i < _inst.
streams.size(); i++) {
416 nt::NetworkTableInstance::GetDefault().Flush();
418 std::this_thread::sleep_for(
419 std::chrono::nanoseconds(max_nanos) - (HRC::now() - tbuff)
423 for(
size_t i = 0; i < runners.size(); i++) {
424 runners.at(i).join();
445 int c_idx = 0, p_idx = 0;
447 double init_time = 0, proc_time = 0, out_time = 0, active_time = 1, full_time = 0;
448 HRC::time_point beg_frame, beg_proc, end_proc, end_frame, last;
449 cv::Mat frame = cv::Mat::zeros(cv::Size(1, 1), CV_8UC3);
453 beg_frame = HRC::now();
459 for(
size_t i = 0; i < _inst.
streams.size(); i++) {
460 _inst.
streams[i].setSourceIdx(n);
470 for(
size_t i = 0; i < _inst.
streams.size(); i++) {
474 _inst.
pipelines[p_idx]->input.GrabFrame(frame)
476 beg_proc = HRC::now();
478 end_proc = HRC::now();
482 frame, std::to_string(fps),
483 cv::Point(5, 20), cv::FONT_HERSHEY_DUPLEX,
484 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
489 frame,
"Active: " + std::to_string(active_time * 1000) +
"ms",
490 cv::Point(5, 45), cv::FONT_HERSHEY_DUPLEX,
491 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
496 frame,
"Init: " + std::to_string(init_time * 1000) +
"ms",
497 cv::Point(5, 70), cv::FONT_HERSHEY_DUPLEX,
498 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
501 frame,
"Process: " + std::to_string(proc_time * 1000) +
"ms",
502 cv::Point(5, 95), cv::FONT_HERSHEY_DUPLEX,
503 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
506 frame,
"Output: " + std::to_string(out_time * 1000) +
"ms",
507 cv::Point(5, 120), cv::FONT_HERSHEY_DUPLEX,
508 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
512 end_frame = HRC::now();
514 beg_proc = end_proc = end_frame = HRC::now();
517 init_time = std::chrono::duration<double>(beg_proc - beg_frame).count();
518 proc_time = std::chrono::duration<double>(end_proc - beg_proc).count();
519 out_time = std::chrono::duration<double>(end_frame - end_proc).count();
520 full_time = std::chrono::duration<double>(beg_frame - last).count();
521 active_time = init_time + proc_time + out_time;
524 stats->PutNumber(
"FPS: ", fps);
525 stats->PutNumber(
"Active time(ms): ", active_time * 1000);
526 stats->PutNumber(
"Init time(ms): ", init_time * 1000);
527 stats->PutNumber(
"Process time(ms): ", proc_time * 1000);
528 stats->PutNumber(
"Output time(ms): ", out_time * 1000);
530 nt::NetworkTableInstance::GetDefault().Flush();
532 std::this_thread::sleep_for(
533 std::chrono::nanoseconds(
534 (uint64_t)(1E9 / fps_cap)
535 ) - (HRC::now() - beg_frame)
558 for(
size_t i = 0; i < _inst.
streams.size(); i++) {
561 std::this_thread::sleep_for(std::chrono::milliseconds(100));
595 if(_inst.
head.joinable()) {
static const cv::Mat_< float > default_matrix
const cv::Mat_< float > & getDistortionCoefs() const
const cv::Mat_< float > & getCameraMatrix() const
static const cv::Mat_< float > default_distort
const cv::Mat_< float > * src_distort
void setSource(const cs::VideoSource &)
const std::shared_ptr< nt::NetworkTable > table
void setCamera(const VisionCamera &)
virtual void process(cv::Mat &io_frame)=0
void setPipeline(const BasePipe &)
const cv::Mat_< float > * src_matrix
static VisionServer & inst()
static size_t numCameras()
static size_t numPipelines()
static bool addPipeline()
static bool setPipelines()
static const std::shared_ptr< nt::NetworkTable > & ntable()
std::vector< std::unique_ptr< BasePipe > > heap_allocated
static bool runSingleThread(float fps_cap=30.f)
static bool addCameras(std::vector< VisionCamera > &&)
static bool run(float fps_cap=30.f)
static bool runThread(float fps_cap=30.f)
static bool addStreams(size_t=2)
std::vector< VisionCamera > cameras
static void pipelineRunner(BasePipe *, float fps_cap)
static VisionServer & getInstance()
std::atomic< bool > is_running
static bool addPipelines()
std::vector< BasePipe * > pipelines
static bool addCamera(VisionCamera &&)
static bool runRawThread()
static bool runSingle(float fps_cap=30.f)
static bool setCameras(std::vector< VisionCamera > &&)
std::vector< OutputStream > streams
const std::shared_ptr< nt::NetworkTable > table
OutputStream(cs::MjpegServer &&s)
std::atomic_int local_idx