VisionServer  v2.1.1-1-g21dc5465
FRC vision library
visionserver2.cpp
Go to the documentation of this file.
1#include "visionserver2.h"
2
3#include "cpp-tools/src/resources.h"
4
5
6using namespace vs2;
7
8VisionServer::VisionServer() {
9 std::cout << "Init VisionServer" << std::endl;
10 ntable()->PutNumber("Cameras Available", 0);
11 ntable()->PutNumber("Pipelines Available", 0);
12 ntable()->PutString("Status", "Offline");
13}
15 // ntable()->Delete("Cameras Available");
16 // ntable()->Delete("Pipelines Available");
17 // ntable()->Delete("Status");
18}
19
20
22 this->SetVideoMode(cam.GetVideoMode());
23 this->input.SetSource(cam);
24 this->src_matrix = &cam.getCameraMatrix();
25 this->src_distort = &cam.getDistortionCoefs();
26}
28 this->SetVideoMode(pipe.GetVideoMode());
29 this->input.SetSource(pipe);
30 this->src_matrix = pipe.src_matrix;
31 this->src_distort = pipe.src_distort;
32}
33void VisionServer::BasePipe::setSource(const cs::VideoSource& src) {
34 this->SetVideoMode(src.GetVideoMode());
35 this->input.SetSource(src);
36 this->src_matrix = &VisionCamera::default_matrix;
37 this->src_distort = &VisionCamera::default_distort;
38}
39void VisionServer::BasePipe::getFrame(cv::Mat& io_frame) {
40 this->input.GrabFrame(io_frame);
41}
42
43
46 VisionServer& _inst = getInstance();
47 _inst.cameras.emplace_back(std::move(c));
48 _inst.cameras.back().setNetworkBase(VisionServer::ntable());
49 _inst.cameras.back().setNetworkAdjustable();
50 VisionServer::ntable()->GetEntry("Cameras Available").SetDouble(_inst.cameras.size());
51 return true;
52 }
53 return false;
54}
55bool VisionServer::addCameras(std::vector<VisionCamera>&& cms) {
57 VisionServer& _inst = getInstance();
58 size_t num = cms.size();
59 _inst.cameras.reserve(_inst.cameras.size() + num);
60 _inst.cameras.insert(
61 _inst.cameras.end(),
62 std::make_move_iterator(cms.begin()),
63 std::make_move_iterator(cms.end())
64 );
65 cms.clear();
66 for(size_t i = _inst.cameras.size() - num; i < _inst.cameras.size(); i++) {
67 _inst.cameras.at(i).setNetworkBase(VisionServer::ntable());
68 _inst.cameras.at(i).setNetworkAdjustable();
69 }
70 VisionServer::ntable()->GetEntry("Cameras Available").SetDouble(_inst.cameras.size());
71 return true;
72 }
73 return false;
74}
75bool VisionServer::setCameras(std::vector<VisionCamera>&& cms) {
77 VisionServer& _inst = getInstance();
78 _inst.cameras = std::move(cms);
79 cms.clear();
80 for(size_t i = 0; i < _inst.cameras.size(); i++) {
81 _inst.cameras.at(i).setNetworkBase(VisionServer::ntable());
82 _inst.cameras.at(i).setNetworkAdjustable();
83 }
84 VisionServer::ntable()->GetEntry("Cameras Available").SetDouble(_inst.cameras.size());
85 return true;
86 }
87 return false;
88}
89
92 VisionServer& _inst = getInstance();
93 _inst.pipelines.push_back(p);
94 VisionServer::ntable()->GetEntry("Pipelines Available").SetDouble(_inst.pipelines.size());
95 return true;
96 }
97 return false;
98}
99bool VisionServer::addPipelines(std::vector<BasePipe*>&& ps) {
101 VisionServer& _inst = getInstance();
102 _inst.pipelines.reserve(_inst.pipelines.size() + ps.size());
103 _inst.pipelines.insert(_inst.pipelines.end(), ps.begin(), ps.end());
104 VisionServer::ntable()->GetEntry("Pipelines Available").SetDouble(_inst.pipelines.size());
105 return true;
106 }
107 return false;
108}
109bool VisionServer::addPipelines(std::initializer_list<BasePipe*> ps) {
111 VisionServer& _inst = getInstance();
112 _inst.pipelines.reserve(_inst.pipelines.size() + ps.size());
113 _inst.pipelines.insert(_inst.pipelines.end(), ps.begin(), ps.end());
114 VisionServer::ntable()->GetEntry("Pipelines Available").SetDouble(_inst.pipelines.size());
115 return true;
116 }
117 return false;
118}
119bool VisionServer::setPipelines(std::vector<BasePipe*>&& ps) {
121 VisionServer& _inst = getInstance();
122 _inst.pipelines.clear();
123 _inst.heap_allocated.clear();
124 _inst.pipelines = std::move(ps);
125 VisionServer::ntable()->GetEntry("Pipelines Available").SetDouble(_inst.pipelines.size());
126 return true;
127 }
128 return false;
129}
130bool VisionServer::setPipelines(std::initializer_list<BasePipe*> ps) {
132 VisionServer& _inst = getInstance();
133 _inst.pipelines.clear();
134 _inst.heap_allocated.clear();
135 _inst.pipelines = ps;
136 VisionServer::ntable()->GetEntry("Pipelines Available").SetDouble(_inst.pipelines.size());
137 return true;
138 }
139 return false;
140}
141
144 VisionServer& _inst = getInstance();
145 _inst.streams.emplace_back("Stream " + std::to_string(_inst.streams.size() + 1));
146 return true;
147 }
148 return false;
149}
150bool VisionServer::addStream(std::string_view name) {
152 inst().streams.emplace_back(name);
153 return true;
154 }
155 return false;
156}
157bool VisionServer::addStream(std::string_view name, int port) {
159 inst().streams.emplace_back(name, port);
160 return true;
161 }
162 return false;
163}
164bool VisionServer::addStream(const cs::MjpegServer& s) {
166 inst().streams.emplace_back(cs::MjpegServer(s));
167 return true;
168 }
169 return false;
170}
171bool VisionServer::addStream(cs::MjpegServer&& s) {
173 inst().streams.emplace_back(std::move(s));
174 return true;
175 }
176 return false;
177}
180 VisionServer& _inst = getInstance();
181 _inst.streams.reserve(_inst.streams.size() + n);
182 for(size_t i = 0; i < n; i++) {
183 _inst.streams.emplace_back("Stream " + std::to_string(_inst.streams.size() + 1));
184 }
185 return true;
186 }
187 return false;
188}
189bool VisionServer::addStreams(std::initializer_list<std::string_view> strms) {
191 VisionServer& _inst = getInstance();
192 _inst.streams.reserve(_inst.streams.size() + strms.size());
193 for(auto itr = strms.begin(); itr != strms.end(); itr++) {
194 _inst.streams.emplace_back(*itr);
195 }
196 return true;
197 }
198 return false;
199}
200bool VisionServer::addStreams(std::initializer_list<std::pair<std::string_view, int> > strms) {
202 VisionServer& _inst = getInstance();
203 _inst.streams.reserve(_inst.streams.size() + strms.size());
204 for(auto itr = strms.begin(); itr != strms.end(); itr++) {
205 _inst.streams.emplace_back(itr->first, itr->second);
206 }
207 return true;
208 }
209 return false;
210}
211bool VisionServer::addStreams(const std::vector<cs::MjpegServer>& strms) {
213 VisionServer& _inst = getInstance();
214 _inst.streams.reserve(_inst.streams.size() + strms.size());
215 for(size_t i = 0; i < strms.size(); i++) {
216 _inst.streams.push_back(cs::MjpegServer(strms.at(i)));
217 }
218 return true;
219 }
220 return false;
221}
222
223
225 MjpegServer(std::move(s)), table(OutputStream::ntable()->GetSubTable(this->GetName()))
226{
227 this->table->GetEntry("Source Index").SetDouble(this->local_idx);
228 this->table->GetEntry("Port").SetDouble(this->GetPort());
229}
232 if(this->local_idx != i) {
233 if(i < 0 && i >= -(int)VisionServer::numCameras()) {
234 this->SetSource(inst().cameras.at((-i) - 1));
235 } else if(i > 0 && i <= (int)VisionServer::numPipelines()) {
236 this->SetSource(*(inst().pipelines.at(i - 1)));
237 }
238 this->table->GetEntry("Source Index").SetDouble(this->local_idx = i);
239 }
240}
242 int nt = this->table->GetEntry("Source Index").GetDouble(0);
243 if(nt != this->local_idx) {
244 this->local_idx = nt;
245 if(nt < 0 && nt >= -(int)VisionServer::numCameras()) {
246 this->SetSource(inst().cameras.at((-nt) - 1));
247 } else if(nt > 0 && nt <= (int)VisionServer::numPipelines()) {
248 this->SetSource(*(inst().pipelines.at(nt - 1)));
249 }
250 }
251}
252
253
254void VisionServer::pipelineRunner(BasePipe* pipe, float fps_cap) {
255 pipe->table->PutBoolean("Enable Processing", true);
256 pipe->table->PutNumber("Source Index", 1);
257 pipe->table->PutNumber("Statistics Verbosity", 0);
258 pipe->table->PutNumber("Max FPS", fps_cap);
259
261 std::shared_ptr<nt::NetworkTable> stats{pipe->table->GetSubTable("stats")};
262
263 int idx = 1;
264 float fps = 0.f;
265 double init_time = 0, proc_time = 0, out_time = 0, active_time = 1, full_time = 0;
266 HRC::time_point beg_frame, beg_proc, end_proc, end_frame, last;
267 cv::Mat frame = cv::Mat::zeros(cv::Size(1, 1), CV_8UC3);
268
269 pipe->init();
270 while(VisionServer::isRunning()) {
271 beg_frame = HRC::now();
272 int n = pipe->table->GetEntry("Source Index").GetDouble(0);
273 if(n != idx) {
274 idx = n;
275 if(idx > 0 && idx <= numCameras()) {
276 pipe->setCamera(_inst.cameras.at(idx - 1));
277 } else if(idx < 0 && idx >= -((int)numPipelines)) {
278 pipe->setPipeline(*(_inst.pipelines.at((-idx) - 1)));
279 }
280 }
281 if(pipe->table->GetEntry("Enable Processing").GetBoolean(false) &&
282 pipe->input.GrabFrame(frame/*, max_ftime / 2.f*/)
283 ) {
284 beg_proc = HRC::now();
285 pipe->process(frame);
286 end_proc = HRC::now();
287 int verbosity = pipe->table->GetEntry("Statistics Verbosity").GetDouble(0);
288 if(verbosity > 0) {
289 cv::putText(
290 frame, std::to_string(fps),
291 cv::Point(5, 20), cv::FONT_HERSHEY_DUPLEX,
292 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
293 );
294 }
295 if(verbosity > 1) {
296 cv::putText(
297 frame, "Active: " + std::to_string(active_time * 1000) + "ms",
298 cv::Point(5, 45), cv::FONT_HERSHEY_DUPLEX,
299 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
300 );
301 }
302 if(verbosity > 2) {
303 cv::putText(
304 frame, "Init: " + std::to_string(init_time * 1000) + "ms",
305 cv::Point(5, 70), cv::FONT_HERSHEY_DUPLEX,
306 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
307 );
308 cv::putText(
309 frame, "Process: " + std::to_string(proc_time * 1000) + "ms",
310 cv::Point(5, 95), cv::FONT_HERSHEY_DUPLEX,
311 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
312 );
313 cv::putText(
314 frame, "Output: " + std::to_string(out_time * 1000) + "ms",
315 cv::Point(5, 120), cv::FONT_HERSHEY_DUPLEX,
316 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
317 );
318 }
319 pipe->PutFrame(frame);
320 end_frame = HRC::now();
321 } else {
322 beg_proc = end_proc = end_frame = HRC::now();
323 }
324
325 init_time = std::chrono::duration<double>(beg_proc - beg_frame).count();
326 proc_time = std::chrono::duration<double>(end_proc - beg_proc).count();
327 out_time = std::chrono::duration<double>(end_frame - end_proc).count();
328 full_time = std::chrono::duration<double>(beg_frame - last).count();
329 active_time = init_time + proc_time + out_time;
330 fps = 1.f/full_time;
331
332 stats->PutNumber("FPS: ", fps);
333 stats->PutNumber("Active time(ms): ", active_time * 1000);
334 stats->PutNumber("Init time(ms): ", init_time * 1000);
335 stats->PutNumber("Process time(ms): ", proc_time * 1000);
336 stats->PutNumber("Output time(ms): ", out_time * 1000);
337
338 fps_cap = pipe->table->GetNumber("Max FPS", fps_cap);
339 std::this_thread::sleep_for(
340 std::chrono::nanoseconds((uint64_t)(1E9 / fps_cap)) - (HRC::now() - beg_frame)
341 );
342
343 last = beg_frame;
344
345 }
346 pipe->close();
347 // pipe->table->Delete("Enable Processing");
348 // pipe->table->Delete("Source Index");
349 // pipe->table->Delete("Statistics Verbosity");
350 // pipe->table->Delete("Max FPS");
351
352}
353
355 VisionServer& _inst = inst();
356 int16_t active = -1;
357 for(size_t i = 0; i < _inst.cameras.size(); i++) {
358 if(_inst.cameras.at(i).IsConnected()) {
359 active = i;
360 break;
361 }
362 }
363 if(active + 1) {
364 size_t i = 0;
365 for(; i < _inst.pipelines.size(); i++) {
366 _inst.pipelines.at(i)->setCamera(_inst.cameras.at(active));
367 std::cout << "Compensation: connected [pipeline] "
368 << _inst.pipelines[i]->getName() << " to [camera] "
369 << _inst.cameras[active].GetName() << newline;
370 }
371 if(i) {
372 for(size_t j = 0; j < _inst.streams.size(); j++) {
373 _inst.streams.at(j).setPipelineIdx(1);
374 std::cout << "Compensation: connected [stream] "
375 << _inst.streams[j].GetName() << " to [pipeline] "
376 << _inst.pipelines[0]->getName() << newline;
377 }
378 } else {
379 for(size_t j = 0; j < _inst.streams.size(); j++) {
380 _inst.streams.at(j).setCameraIdx(active + 1);
381 std::cout << "Compensation: connected [stream] "
382 << _inst.streams[i].GetName() << " to [camera] "
383 << _inst.cameras[active].GetName() << newline;
384 }
385 }
386 std::cout.flush();
387 return true;
388 } else {
389 std::cout << "Compensation failed. No cameras available." << std::endl;
390 return false;
391 }
392}
393
394
395bool VisionServer::run(float fps_cap) {
397 VisionServer& _inst = getInstance();
398 _inst.is_running = true;
399
400 VisionServer::ntable()->GetEntry("Status").SetString("Running Multithreaded");
401
402 HRC::time_point tbuff;
403 // assert fps_cap == 0 ?
404 uint64_t max_nanos = 1E9 / fps_cap;
405
406 std::vector<std::thread> runners;
407 for(size_t i = 0; i < _inst.pipelines.size(); i++) {
408 runners.emplace_back(std::thread(pipelineRunner, _inst.pipelines.at(i), fps_cap));
409 }
410 while(isRunning()) { // main loop
411 tbuff = HRC::now();
412
413 for(size_t i = 0; i < _inst.streams.size(); i++) {
414 _inst.streams[i].syncIdx();
415 }
416 nt::NetworkTableInstance::GetDefault().Flush();
417
418 std::this_thread::sleep_for(
419 std::chrono::nanoseconds(max_nanos) - (HRC::now() - tbuff)
420 );
421 }
422
423 for(size_t i = 0; i < runners.size(); i++) {
424 runners.at(i).join();
425 }
426 VisionServer::ntable()->GetEntry("Status").SetString("Offline");
427 return true;
428 }
429 return false;
430}
431bool VisionServer::runSingle(float fps_cap) {
433 VisionServer& _inst = getInstance();
434 _inst.is_running = true;
435
436 VisionServer::ntable()->GetEntry("Status").SetString("Running Singlethreaded");
437 VisionServer::ntable()->PutBoolean("Enable Processing", true);
438 VisionServer::ntable()->PutNumber("Camera Index", 1);
439 VisionServer::ntable()->PutNumber("Pipeline Index", 1);
440 VisionServer::ntable()->PutNumber("Statistics Verbosity", 0);
441 VisionServer::ntable()->PutNumber("Max FPS", fps_cap);
442
443 std::shared_ptr<nt::NetworkTable> stats = VisionServer::ntable()->GetSubTable("stats");
444
445 int c_idx = 0, p_idx = 0;
446 float fps = 0.f;
447 double init_time = 0, proc_time = 0, out_time = 0, active_time = 1, full_time = 0;
448 HRC::time_point beg_frame, beg_proc, end_proc, end_frame, last;
449 cv::Mat frame = cv::Mat::zeros(cv::Size(1, 1), CV_8UC3);
450
451 _inst.pipelines[p_idx]->init();
452 while(isRunning()) {
453 beg_frame = HRC::now();
454 int n = VisionServer::ntable()->GetEntry("Pipeline Index").GetDouble(0);
455 if(n != p_idx + 1 && n > 0 && n <= numPipelines()) {
456 _inst.pipelines[p_idx]->close();
457 p_idx = n - 1;
458 _inst.pipelines[p_idx]->init();
459 for(size_t i = 0; i < _inst.streams.size(); i++) {
460 _inst.streams[i].setSourceIdx(n);
461 }
462 }
463 n = VisionServer::ntable()->GetEntry("Camera Index").GetDouble(0);
464 if(n != c_idx) {
465 c_idx = n;
466 if(c_idx > 0 && c_idx <= numCameras()) {
467 _inst.pipelines[p_idx]->setCamera(_inst.cameras[c_idx]);
468 }
469 }
470 for(size_t i = 0; i < _inst.streams.size(); i++) {
471 _inst.streams[i].syncIdx();
472 }
473 if(VisionServer::ntable()->GetEntry("Enable Processing").GetBoolean(false) &&
474 _inst.pipelines[p_idx]->input.GrabFrame(frame)
475 ) {
476 beg_proc = HRC::now();
477 _inst.pipelines[p_idx]->process(frame);
478 end_proc = HRC::now();
479 int verbosity = VisionServer::ntable()->GetEntry("Statistics Verbosity").GetDouble(0);
480 if(verbosity > 0) {
481 cv::putText(
482 frame, std::to_string(fps),
483 cv::Point(5, 20), cv::FONT_HERSHEY_DUPLEX,
484 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
485 );
486 }
487 if(verbosity > 1) {
488 cv::putText(
489 frame, "Active: " + std::to_string(active_time * 1000) + "ms",
490 cv::Point(5, 45), cv::FONT_HERSHEY_DUPLEX,
491 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
492 );
493 }
494 if(verbosity > 2) {
495 cv::putText(
496 frame, "Init: " + std::to_string(init_time * 1000) + "ms",
497 cv::Point(5, 70), cv::FONT_HERSHEY_DUPLEX,
498 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
499 );
500 cv::putText(
501 frame, "Process: " + std::to_string(proc_time * 1000) + "ms",
502 cv::Point(5, 95), cv::FONT_HERSHEY_DUPLEX,
503 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
504 );
505 cv::putText(
506 frame, "Output: " + std::to_string(out_time * 1000) + "ms",
507 cv::Point(5, 120), cv::FONT_HERSHEY_DUPLEX,
508 0.65, cv::Scalar(0, 255, 0), 1, cv::LINE_AA
509 );
510 }
511 _inst.pipelines[p_idx]->PutFrame(frame);
512 end_frame = HRC::now();
513 } else {
514 beg_proc = end_proc = end_frame = HRC::now();
515 }
516
517 init_time = std::chrono::duration<double>(beg_proc - beg_frame).count();
518 proc_time = std::chrono::duration<double>(end_proc - beg_proc).count();
519 out_time = std::chrono::duration<double>(end_frame - end_proc).count();
520 full_time = std::chrono::duration<double>(beg_frame - last).count();
521 active_time = init_time + proc_time + out_time;
522 fps = 1.f/full_time;
523
524 stats->PutNumber("FPS: ", fps);
525 stats->PutNumber("Active time(ms): ", active_time * 1000);
526 stats->PutNumber("Init time(ms): ", init_time * 1000);
527 stats->PutNumber("Process time(ms): ", proc_time * 1000);
528 stats->PutNumber("Output time(ms): ", out_time * 1000);
529
530 nt::NetworkTableInstance::GetDefault().Flush();
531 fps_cap = VisionServer::ntable()->GetNumber("Max FPS", fps_cap);
532 std::this_thread::sleep_for(
533 std::chrono::nanoseconds(
534 (uint64_t)(1E9 / fps_cap)
535 ) - (HRC::now() - beg_frame)
536 );
537
538 last = beg_frame;
539 }
540 _inst.pipelines[p_idx]->close();
541 VisionServer::ntable()->GetEntry("Status").SetString("Offline");
542 // VisionServer::ntable()->Delete("Enable Processing");
543 // VisionServer::ntable()->Delete("Camera Index");
544 // VisionServer::ntable()->Delete("Pipeline Index");
545 // VisionServer::ntable()->Delete("Statistics Verbosity");
546 // VisionServer::ntable()->Delete("Max FPS");
547
548 return true;
549 }
550 return false;
551}
553 VisionServer& _inst = getInstance();
555 VisionServer::ntable()->GetEntry("Status").SetString("Streaming Raw");
556 _inst.is_running = true;
557 while(VisionServer::isRunning()) {
558 for(size_t i = 0; i < _inst.streams.size(); i++) {
559 _inst.streams.at(i).syncIdx();
560 }
561 std::this_thread::sleep_for(std::chrono::milliseconds(100));
562 }
563 VisionServer::ntable()->GetEntry("Status").SetString("Offline");
564 return true;
565 }
566 return false;
567}
568bool VisionServer::runThread(float fps_cap) {
569 VisionServer& _inst = getInstance();
570 if(!VisionServer::isRunning() && !_inst.head.joinable()) {
571 _inst.head = std::move(std::thread(VisionServer::run, fps_cap));
572 return true;
573 }
574 return false;
575}
576bool VisionServer::runSingleThread(float fps_cap) {
577 VisionServer& _inst = getInstance();
578 if(!VisionServer::isRunning() && !_inst.head.joinable()) {
579 _inst.head = std::move(std::thread(VisionServer::runSingle, fps_cap));
580 return true;
581 }
582 return false;
583}
585 VisionServer& _inst = getInstance();
586 if(!VisionServer::isRunning() && !_inst.head.joinable()) {
587 _inst.head = std::move(std::thread(VisionServer::runRaw));
588 return true;
589 }
590 return false;
591}
593 VisionServer& _inst = getInstance();
594 _inst.is_running = false;
595 if(_inst.head.joinable()) {
596 _inst.head.join();
597 return true;
598 }
599 return false;
600}
static const cv::Mat_< float > default_matrix
Definition: visioncamera.h:18
const cv::Mat_< float > & getDistortionCoefs() const
Definition: visioncamera.h:125
const cv::Mat_< float > & getCameraMatrix() const
Definition: visioncamera.h:120
static const cv::Mat_< float > default_distort
Definition: visioncamera.h:19
const cv::Mat_< float > * src_distort
Definition: visionserver2.h:79
void setSource(const cs::VideoSource &)
const std::shared_ptr< nt::NetworkTable > table
Definition: visionserver2.h:81
void setCamera(const VisionCamera &)
virtual void process(cv::Mat &io_frame)=0
void setPipeline(const BasePipe &)
const cv::Mat_< float > * src_matrix
Definition: visionserver2.h:78
std::thread head
static bool addStream()
static VisionServer & inst()
static size_t numCameras()
static size_t numPipelines()
static bool addPipeline()
static bool setPipelines()
static const std::shared_ptr< nt::NetworkTable > & ntable()
Definition: visionserver2.h:22
std::vector< std::unique_ptr< BasePipe > > heap_allocated
static bool runSingleThread(float fps_cap=30.f)
static bool addCameras(std::vector< VisionCamera > &&)
static bool run(float fps_cap=30.f)
static bool runThread(float fps_cap=30.f)
static bool addStreams(size_t=2)
static bool compensate()
std::vector< VisionCamera > cameras
static bool isRunning()
static void pipelineRunner(BasePipe *, float fps_cap)
static bool runRaw()
static VisionServer & getInstance()
Definition: visionserver2.h:27
std::atomic< bool > is_running
static bool addPipelines()
std::vector< BasePipe * > pipelines
static bool addCamera(VisionCamera &&)
static bool runRawThread()
static bool stop()
static bool runSingle(float fps_cap=30.f)
static bool setCameras(std::vector< VisionCamera > &&)
std::vector< OutputStream > streams
Definition: extensions.h:9
const std::shared_ptr< nt::NetworkTable > table
OutputStream(cs::MjpegServer &&s)