2 * Templates of FIFO and RING-BUFFER.
3 * (C) 2022-07-16 K.Ohta <whatisthis.sowhat@gmail.com>
7 * This presents reference FIFO and RING-BUFFER,
9 * unmutexed (faster; using from same thread)
11 * mutexed (slower; using from another threads, i.e: mailbox)
12 * This aims to be skelton of FIFO:: class and RINGBUFFER:: class.
15 * * Will support alignment for inrernal buffer.
28 class FIFO_INTERFACE {
30 std::atomic<ssize_t> m_bufSize;
32 std::shared_ptr<T> m_buf;
33 std::atomic<ssize_t> m_low_warning;
34 std::atomic<ssize_t> m_high_warning;
38 std::atomic<bool> m_is_ringbuffer;
39 std::atomic<ssize_t> m_dataCount;
40 // Use only for LOCKED_FOO, not using for UNLOCKED_FOO .
41 std::recursive_mutex m_locker;
43 inline bool is_ringbuffer() const {
44 return m_is_ringbuffer.load();
46 inline ssize_t get_buffer_size() {
47 return m_bufSize.load();
49 ssize_t realloc_buffer_size(size_t _count, bool force = false) {
50 __UNLIKELY_IF(_count == 0) {
56 __UNLIKELY_IF(_count > SSIZE_MAX) {
59 if((_count != m_bufSize.load()) || (force)) {
63 } catch (std::bad_alloc& e) {
66 if((p == nullptr) && (force)) {
69 } else if((p != nullptr) && (_count != 0)) {
77 return m_bufSize.load();
83 constexpr bool check_data_available(size_t _count = 1) {
84 __LIKELY_IF(check_buffer_available()) {
85 __UNLIKELY_IF((_count > SSIZE_MAX) || (_count == 0)) {
88 return (m_dataCount.load() >= ((ssize_t)_count)) ? true : false;
92 constexpr bool check_buffer_available() {
93 bool success = ((m_bufSize.get() <= 0) || (m_buf.get() == nullptr)) ? false : true;
96 constexpr bool check_data_writable(size_t _count = 1) {
97 __UNLIKELY_IF((_count == 0) || (_count > SSIZE_MAX)) {
100 __LIKELY_IF(check_buffer_available()) {
101 ssize_t _dcount = m_dataCount.load();
102 __UNLIKELY_IF(_dcount <= 0) {
106 ssize_t _size = m_bufSize.load();
107 if(is_ringbuffer()) {
108 __UNLIKELY_IF(_size <= 0) {
111 return (_count <= (size_t)_size) ? true : false;
113 return ((_dcount + _count) <= _size) ? true : false;
119 inline void check_offset(size_t& offset)
121 __UNLIKELY_IF(m_bufSize <= 0) {
124 offset = offset % m_bufSize;
127 inline bool check_readable_data_count(T* dst, size_t _count) {
128 __UNLIKELY_IF(dst == nullptr) {
131 return check_data_available(_count);
133 virtual T unlocked_read_base(void) {
136 virtual T unlocked_read_base(bool& success) {
137 success = check_data_available();
138 return unlocked_read_base();
140 virtual T locked_read_base(void) {
141 std::lock_guard<std::recursive_mutex> locker(m_locker);
142 return unlocked_read_base();
144 virtual T locked_read_base(bool& success) {
145 std::lock_guard<std::recursive_mutex> locker(m_locker);
146 return unlocked_read_base(success);
148 virtual T unlocked_read_not_remove_base(size_t offset) {
149 check_offset(offset);
152 virtual T unlocked_read_not_remove_base(size_t offset, bool& success) {
153 success = check_buffer_available();
154 return unlocked_read_not_remove_base(offset);
156 virtual T locked_read_not_remove_base(size_t offset) {
157 std::lock_guard<std::recursive_mutex> locker(m_locker);
158 return unlocked_read_not_remove_base(offset);
160 virtual T locked_read_not_remove_base(size_t offset, bool& success) {
162 std::lock_guard<std::recursive_mutex> locker(m_locker);
163 return unlocked_read_not_remove_base(offset, success);
165 virtual size_t unlocked_read_to_buffer_base(T* dst, size_t _count, bool& success) {
166 success = check_readable_data_count(dst, _count);
167 __UNLIKELY_IF(!(success)) {
172 virtual size_t locked_read_to_buffer_base(T* dst, size_t _count, bool& success) {
173 std::lock_guard<std::recursive_mutex> locker(m_locker);
174 return unlocked_read_to_buffer_base(dst, _count, success);
176 virtual bool unlocked_write_base(T data) {
177 bool success = check_data_writable();
180 virtual bool locked_write_base(T data) {
181 std::lock_guard<std::recursive_mutex> locker(m_locker);
182 return unlocked_write_base(data);
184 virtual bool unlocked_write_not_push_base(size_t offset, T data) {
185 bool success = check_buffer_available();
186 check_offset(offset);
189 virtual bool locked_write_not_push_base(size_t offset, T data) {
190 std::lock_guard<std::recursive_mutex> locker(m_locker);
191 return unlocked_write_not_push_base(offset, data);
193 virtual size_t unlocked_write_from_buffer_base(T* src, size_t _count, bool& success) {
194 __UNLIKELY_IF(src == nullptr) {
198 success = check_data_writable(_count);
199 __UNLIKELY_IF(!(success)) {
204 virtual size_t locked_write_from_buffer_base(T* src, size_t _count, bool& success) {
205 std::lock_guard<std::recursive_mutex> locker(m_locker);
206 return unlocked_write_from_buffer_base(src, _count, success);
209 FIFO_INTERFACE(size_t _size) :
210 m_rptr(0), m_wptr(0), m_dataCount(0),
211 m_high_warning(SSIZE_MAX - 1), m_low_warning(SSIZE_MIN + 1),
212 m_is_ringbuffer(false)
214 bool is_legal = true;
216 ssize_t bsize = realloc_buffer_size(_size, true);
217 if((bsize <= 0) || (bsize != _size) || (m.buf.get() == nullptr)) {
221 m_high_warning = SSIZE_MIN;
222 m_low_warning = SSIZE_MAX;
231 virtual void initialize() {}
232 virtual void release() {}
233 virtual void clear() {
234 std::lock_guard<std::recursive_mutex> locker(m_locker);
238 __UNLIKELY_IF(!(check_buffer_available())) {
241 for(int i = 0; i < m_bufSize; i++) {
245 virtual T read(bool& success) {
246 return unlocked_read_base(success);
248 virtual T read(void) {
249 return unlocked_read_base();
251 virtual T read_not_remove(size_t offset, bool& success) {
252 return unlocked_read_not_remove_base(offset, success);
254 virtual T read_not_remove(size_t offset) {
255 return unlocked_read_not_remove_base(offset);
257 virtual size_t read_to_buffer(T* dst, size_t _count, bool& success)
259 return unlocked_read_to_buffer_base(dst, _count, success);
261 virtual size_t read_to_buffer(T* dst, size_t _count) {
263 return read_to_buffer(dst, _count, dummy);
266 virtual bool write(T data) {
267 return unlocked_write_base(data);
269 virtual bool write_not_push(int offset, T data) {
270 return unlocked_write_not_push_base(offset, data);
272 virtual size_t write_from_buffer(T* src, size_t _count, bool& success) {
273 return unlocked_write_from_buffer_base(src, _count, success);
276 virtual size_t write_from_buffer(T* src, size_t _count) {
278 return write_from_buffer(src, _count, dummy);
282 return check_buffer_available();
286 return (check_data_available()) ? false : true;
288 bool read_ready(size_t _count = 1)
290 return check_data_available(_count);
292 virtual bool write_ready(size_t _count = 1)
294 return check_data_writable(_count);
298 bool result = ((is_ringbuffer()) || !(check_data_writable()));
304 ssize_t _count = m_dataCount.load();
305 __UNLIKELY_IF(_count < 0) {
308 return (size_t)_count;
312 ssize_t _size = m_bufSize.load();
313 __UNLIKELY_IF(_size < 0) {
316 __UNLIKELY_IF(_size > SSIZE_MAX) {
319 return (size_t)_size;
323 __UNLIKELY_IF(!(check_buffer_available())) {
326 ssize_t _size = get_buffer_size();
327 if(is_ringbuffer()) {
328 __UNLIKELY_IF(_size == SSIZE_MAX) {
332 ssize_t _count = m_dataCount.load();
333 _size = _size - _count;
335 __UNLIKELY_IF(_size < 0) {
340 void set_high_warn_value(ssize_t val = SSIZE_MAX - 1)
342 m_high_warning = val;
344 void set_low_warn_value(ssize_t val = SSIZE_MIN + 1)
350 return (m_high_warning.load() < m_dataCount.load()) ? true : false;
353 return (m_low_warning.load() > m_dataCount.load()) ? true : false;
355 bool resize(size_t _size, bool force = false, ssize_t _low_warn = SSIZE_MIN + 1, ssize_t _high_warn = SSIZE_MAX - 1) {
356 std::lock_guard<std::recursive_mutex> locker(m_locker);
357 ssize_t realsize = realloc_buffer_size(_size, force);
359 set_high_warn_value();
360 set_low_warn_value();
363 if((size_t)realsize != _size) {
364 set_high_warn_value();
365 set_low_warn_value();
368 set_high_warn_value(_high_warn);
369 set_low_warn_value(_low_warn);
375 class UNLOCKED_FIFO : publc FIFO_INTERFACE<T> {
377 virtual T unlocked_read_base() override
380 return unlocked_read_base(dummy);
382 virtual T unlocked_read_base(bool& success) override
386 __LIKELY_IF(check_data_available()) {
387 size_t buf_size = (size_t)get_buffer_size();
388 __LIKELY_IF(m_rptr.load() < buf_size) {
389 tmpval = m_buf[m_rptr++];
396 virtual T unlocked_read_not_remove_base(size_t offset, bool& success) override
398 check_offset(offset);
400 success = check_buffer_available();
401 __LIKELY_IF(success) {
402 size_t rptr = m_rptr.load();
403 rptr = optr + offset;
404 ssize_t bufsize = get_buffer_size();
405 __UNLIKELY_IF(bufsize <= 0) {
409 __UNLIKELY_IF(rptr >= bufsize) {
410 rptr = rptr % bufsize;
413 __UNLIKELY_IF(p == nullptr) {
422 virtual T unlocked_read_not_remove_base(size_t offset) override
425 return unlocked_read_not_remove_base(offset, dummy);
427 virtual size_t unlocked_read_to_buffer_base(T* dst, size_t _count, bool& success) override
430 __UNLIKELY_IF(dst == nullptr) {
433 success = check_buffer_available();
434 __LIKELY_IF(success) {
437 for(; words < _count; words++) {
438 __UNLIKELY_IF((m_rptr.load() >= bufsize) || (m_dataCount.load() <= 0)) {
441 dst[words] = p[m_rptr++];
444 __UNLIKELY_IF(words <= 0) {
452 virtual size_t unlocked_read_to_buffer_base(T* dst, size_t _count) override
455 return unlocked_read_to_buffer_base(dst, _count, dummy);
458 UNLOCKED_FIFO(size_t _size) : FIFO_INTERFACE<T>
465 virtual bool write(T data)
467 __UNLIKELY_IF((m_buf == nullptr) || (m_dataCount >= (int)m_bufSize)
468 || (m_bufSize == 0)) {
471 __UNLIKELY_IF(m_dataCount < 0) {
472 m_dataCount = 0; // OK?
474 m_buf[m_wptr++] = data;
476 __UNLIKELY_IF(m_wptr >= m_bufSize) {
479 __UNLIKELY_IF(m_dataCount >= (int)m_bufSize) {
480 m_dataCount = (int)m_bufSize;
484 virtual bool write_not_push(int offset, T data)
486 __UNLIKELY_IF((m_buf == nullptr) ||
487 (m_bufSize == 0) || (offset < 0)) {
490 unsigned int wp = m_wptr + offset;
491 __UNLIKELY_IF(wp >= (int)m_bufSize) {
497 virtual int write_from_buffer(T* src, int _count, bool& success)
499 __UNLIKELY_IF((src == nullptr) || (_count <= 0) ||
500 (m_buf == nullptr) || (m_bufSize == 0) ||
501 (m_dataCount >= (int)m_bufSize)) {
505 __UNLIKELY_IF(m_dataCount < 0) {
506 m_dataCount = 0; // OK?
508 __UNLIKELY_IF(_count > (int)m_bufSize) {
509 _count = (int)m_bufSize;
511 __UNLIKELY_IF((_count + m_dataCount) >= (int)m_bufSize) {
512 _count = (int)m_bufSize - m_dataCount;
518 __UNLIKELY_IF(m_wptr >= m_bufSize) {
519 m_wptr = m_wptr % m_bufSize;
523 if((xptr + (unsigned int)_count) > m_bufSize) {
524 int count1 = (int)(m_bufSize - (xptr % m_bufSize));
525 int count2 = _count - count1;
527 for(int i = 0; i < count1; i++) {
528 m_buf[xptr++] = src[rp++];
531 for(int i = 0; i < count2; i++) {
532 m_buf[xptr++] = src[rp++];
536 for(int i = 0; i < _count; i++) {
537 m_buf[xptr++] = src[i];
540 m_wptr = xptr % m_bufSize;
541 m_dataCount += _count;
542 __UNLIKELY_IF(m_dataCount > (int)m_bufSize) {
543 m_dataCount = (int)m_bufSize;
552 class LOCKED_FIFO : public UNLOCKED_FIFO<T> {
555 LOCKED_FIFO(int _size) :
556 UNLOCKED_FIFO<T>(_size)
562 virtual void initialize()
564 std::lock_guard<std::recursive_mutex> locker(m_locker);
566 virtual void release()
568 std::lock_guard<std::recursive_mutex> locker(m_locker);
569 UNLOCKED_FIFO<T>::release();
573 std::lock_guard<std::recursive_mutex> locker(m_locker);
574 UNLOCKED_FIFO<T>::clear();
576 virtual T read(bool& success)
578 std::lock_guard<std::recursive_mutex> locker(m_locker);
579 return UNLOCKED_FIFO<T>::read(success);
584 std::lock_guard<std::recursive_mutex> locker(m_locker);
585 return UNLOCKED_FIFO<T>::read(success);
588 virtual T read_not_remove(int offset, bool& success)
590 std::lock_guard<std::recursive_mutex> locker(m_locker);
591 return UNLOCKED_FIFO<T>::read_not_remove(offset, success);
593 virtual T read_not_remove(int offset)
596 std::lock_guard<std::recursive_mutex> locker(m_locker);
597 return UNLOCKED_FIFO<T>::read_not_remove(offset, success);
599 virtual int read_to_buffer(T* dst, int _count, bool& success)
601 std::lock_guard<std::recursive_mutex> locker(m_locker);
602 return UNLOCKED_FIFO<T>::read_to_buffer(dst, _count, success);
604 virtual bool write(T data)
606 std::lock_guard<std::recursive_mutex> locker(m_locker);
607 return UNLOCKED_FIFO<T>::write(data);
609 virtual bool write_not_push(int offset, T data)
611 std::lock_guard<std::recursive_mutex> locker(m_locker);
612 return UNLOCKED_FIFO<T>::write_not_push(offset, data);
614 virtual int write_from_buffer(T* src, int _count, bool& success)
616 std::lock_guard<std::recursive_mutex> locker(m_locker);
617 return UNLOCKED_FIFO<T>::write_from_buffer(src, _count, success);
619 virtual bool available()
621 std::lock_guard<std::recursive_mutex> locker(m_locker);
622 return UNLOCKED_FIFO<T>::available();
626 std::lock_guard<std::recursive_mutex> locker(m_locker);
627 return UNLOCKED_FIFO<T>::empty();
629 virtual bool read_ready()
631 std::lock_guard<std::recursive_mutex> locker(m_locker);
632 return UNLOCKED_FIFO<T>::read_ready();
634 virtual bool write_ready()
636 std::lock_guard<std::recursive_mutex> locker(m_locker);
637 return UNLOCKED_FIFO<T>::write_ready();
642 std::lock_guard<std::recursive_mutex> locker(m_locker);
643 return UNLOCKED_FIFO<T>::full();
647 std::lock_guard<std::recursive_mutex> locker(m_locker);
648 return UNLOCKED_FIFO<T>::count();
650 virtual int fifo_size()
652 std::lock_guard<std::recursive_mutex> locker(m_locker);
653 return UNLOCKED_FIFO<T>::fifo_size();
657 std::lock_guard<std::recursive_mutex> locker(m_locker);
658 return UNLOCKED_FIFO<T>::left();
660 virtual void set_high_warn_value(int val = INT_MAX - 1)
662 std::lock_guard<std::recursive_mutex> locker(m_locker);
663 return UNLOCKED_FIFO<T>::set_high_warn_value(val);
665 virtual void set_low_warn_value(int val = INT_MIN + 1)
667 std::lock_guard<std::recursive_mutex> locker(m_locker);
668 return UNLOCKED_FIFO<T>::set_low_warn_value(val);
670 virtual bool high_warn()
672 std::lock_guard<std::recursive_mutex> locker(m_locker);
673 return UNLOCKED_FIFO<T>::high_warn();
675 virtual bool low_warn()
677 std::lock_guard<std::recursive_mutex> locker(m_locker);
678 return UNLOCKED_FIFO<T>::low_warn();
680 virtual bool resize(int _size, int _low_warn = INT_MIN + 1, int _high_warn = INT_MAX - 1)
682 std::lock_guard<std::recursive_mutex> locker(m_locker);
683 return UNLOCKED_FIFO<T>::resize(_size, _low_warn, _high_warn);
688 class UNLOCKED_RINGBUFFER : public UNLOCKED_FIFO<T> {
690 UNLOCKED_RINGBUFFER(int _size) :
691 UNLOCKED_FIFO<T>(_size)
694 ~UNLOCKED_RINGBUFFER()
697 virtual void initialize()
699 UNLOCKED_FIFO<T>::initialize();
701 virtual void release()
703 UNLOCKED_FIFO<T>::release();
705 // RINGBUFFER : Even write to buffer when full.
706 virtual bool write(T data)
708 __UNLIKELY_IF((UNLOCKED_FIFO<T>::m_buf == nullptr) || (UNLOCKED_FIFO<T>::m_bufSize == 0)) {
712 __UNLIKELY_IF(UNLOCKED_FIFO<T>::m_dataCount < 0) {
713 UNLOCKED_FIFO<T>::m_dataCount = 0; // OK?
715 UNLOCKED_FIFO<T>::m_buf[UNLOCKED_FIFO<T>::m_wptr++] = data;
716 UNLOCKED_FIFO<T>::m_dataCount++;
717 __UNLIKELY_IF(UNLOCKED_FIFO<T>::m_wptr >= UNLOCKED_FIFO<T>::m_bufSize) {
718 UNLOCKED_FIFO<T>::m_wptr = UNLOCKED_FIFO<T>::m_wptr % UNLOCKED_FIFO<T>::m_bufSize;
720 __UNLIKELY_IF(UNLOCKED_FIFO<T>::m_dataCount > (int)UNLOCKED_FIFO<T>::m_bufSize) {
721 UNLOCKED_FIFO<T>::m_dataCount = (int)UNLOCKED_FIFO<T>::m_bufSize;
725 virtual bool write_not_push(int offset, T data)
727 __UNLIKELY_IF((UNLOCKED_FIFO<T>::m_buf == nullptr) ||
728 (UNLOCKED_FIFO<T>::m_bufSize == 0) || (offset < 0)) {
731 unsigned int wp = UNLOCKED_FIFO<T>::m_wptr + offset;
732 __UNLIKELY_IF(wp >= (int)UNLOCKED_FIFO<T>::m_bufSize) {
733 wp = wp % UNLOCKED_FIFO<T>::m_bufSize;
735 UNLOCKED_FIFO<T>::m_buf[wp] = data;
738 virtual int write_from_buffer(T* src, int _count, bool& success)
740 __UNLIKELY_IF((src == nullptr) || (_count <= 0) ||
741 (UNLOCKED_FIFO<T>::m_buf == nullptr) || (UNLOCKED_FIFO<T>::m_bufSize == 0)) {
745 __UNLIKELY_IF(UNLOCKED_FIFO<T>::m_dataCount < 0) {
746 UNLOCKED_FIFO<T>::m_dataCount = 0; // OK?
748 __UNLIKELY_IF(_count > (int)UNLOCKED_FIFO<T>::m_bufSize) {
749 _count = (int)UNLOCKED_FIFO<T>::m_bufSize;
750 __UNLIKELY_IF(_count <= 0) {
755 __UNLIKELY_IF(UNLOCKED_FIFO<T>::m_wptr >= UNLOCKED_FIFO<T>::m_bufSize) {
756 UNLOCKED_FIFO<T>::m_wptr = UNLOCKED_FIFO<T>::m_wptr % UNLOCKED_FIFO<T>::m_bufSize;
759 unsigned int xptr = UNLOCKED_FIFO<T>::m_wptr;
760 if((xptr + (unsigned int)_count) >= UNLOCKED_FIFO<T>::m_bufSize) {
761 int count1 = (int)(UNLOCKED_FIFO<T>::m_bufSize - xptr);
762 int count2 = _count - count1;
764 for(int i = 0; i < count1; i++) {
765 UNLOCKED_FIFO<T>::m_buf[xptr++] = src[rp++];
768 for(int i = 0; i < count2; i++) {
769 UNLOCKED_FIFO<T>::m_buf[xptr++] = src[rp++];
773 for(int i = 0; i < _count; i++) {
774 UNLOCKED_FIFO<T>::m_buf[xptr++] = src[i];
777 UNLOCKED_FIFO<T>::m_dataCount += _count;
778 UNLOCKED_FIFO<T>::m_wptr = (xptr % UNLOCKED_FIFO<T>::m_bufSize);
779 __UNLIKELY_IF(UNLOCKED_FIFO<T>::m_dataCount >= (int)UNLOCKED_FIFO<T>::m_bufSize) {
780 UNLOCKED_FIFO<T>::m_dataCount = UNLOCKED_FIFO<T>::m_bufSize;
785 virtual bool write_ready()
787 bool f = UNLOCKED_FIFO<T>::available();
796 __UNLIKELY_IF((UNLOCKED_FIFO<T>::m_bufSize == 0) || (UNLOCKED_FIFO<T>::m_buf == nullptr)) {
799 return (int)UNLOCKED_FIFO<T>::m_bufSize;
804 class LOCKED_RINGBUFFER : public UNLOCKED_RINGBUFFER<T> {
806 std::recursive_mutex m_locker;
808 LOCKED_RINGBUFFER(int _size) :
809 UNLOCKED_RINGBUFFER<T>(_size)
815 virtual void initialize()
817 std::lock_guard<std::recursive_mutex> locker(m_locker);
818 UNLOCKED_RINGBUFFER<T>::initialize();
820 virtual void release()
822 std::lock_guard<std::recursive_mutex> locker(m_locker);
823 UNLOCKED_RINGBUFFER<T>::release();
827 std::lock_guard<std::recursive_mutex> locker(m_locker);
828 UNLOCKED_RINGBUFFER<T>::clear();
831 virtual T read(bool& success)
833 std::lock_guard<std::recursive_mutex> locker(m_locker);
834 return UNLOCKED_RINGBUFFER<T>::read(success);
839 std::lock_guard<std::recursive_mutex> locker(m_locker);
840 return UNLOCKED_RINGBUFFER<T>::read(success);
842 virtual T read_not_remove(int offset, bool& success)
844 std::lock_guard<std::recursive_mutex> locker(m_locker);
845 return UNLOCKED_RINGBUFFER<T>::read_not_remove(offset, success);
847 virtual T read_not_remove(int offset)
850 std::lock_guard<std::recursive_mutex> locker(m_locker);
851 return UNLOCKED_RINGBUFFER<T>::read_not_remove(offset, success);
854 virtual int read_to_buffer(T* dst, int _count, bool& success)
856 std::lock_guard<std::recursive_mutex> locker(m_locker);
857 return UNLOCKED_RINGBUFFER<T>::read_to_buffer(dst, _count, success);
860 virtual bool write(T data)
862 std::lock_guard<std::recursive_mutex> locker(m_locker);
863 return UNLOCKED_RINGBUFFER<T>::write(data);
865 virtual bool write_not_push(int offset, T data)
867 std::lock_guard<std::recursive_mutex> locker(m_locker);
868 return UNLOCKED_RINGBUFFER<T>::write_not_push(offset, data);
870 virtual int write_from_buffer(T* src, int _count, bool& success)
872 std::lock_guard<std::recursive_mutex> locker(m_locker);
873 return UNLOCKED_RINGBUFFER<T>::write_from_buffer(src, _count, success);
875 virtual bool available()
877 std::lock_guard<std::recursive_mutex> locker(m_locker);
878 return UNLOCKED_RINGBUFFER<T>::available();
882 std::lock_guard<std::recursive_mutex> locker(m_locker);
883 return UNLOCKED_RINGBUFFER<T>::empty();
885 virtual bool read_ready()
887 std::lock_guard<std::recursive_mutex> locker(m_locker);
888 return UNLOCKED_RINGBUFFER<T>::read_ready();
890 virtual bool write_ready()
892 std::lock_guard<std::recursive_mutex> locker(m_locker);
893 return UNLOCKED_RINGBUFFER<T>::write_ready();
897 std::lock_guard<std::recursive_mutex> locker(m_locker);
898 return UNLOCKED_RINGBUFFER<T>::full();
902 std::lock_guard<std::recursive_mutex> locker(m_locker);
903 return UNLOCKED_RINGBUFFER<T>::count();
905 virtual int fifo_size()
907 std::lock_guard<std::recursive_mutex> locker(m_locker);
908 return UNLOCKED_RINGBUFFER<T>::fifo_size();
912 std::lock_guard<std::recursive_mutex> locker(m_locker);
913 return UNLOCKED_RINGBUFFER<T>::left();
916 virtual void set_high_warn_value(int val = INT_MAX - 1)
918 std::lock_guard<std::recursive_mutex> locker(m_locker);
919 return UNLOCKED_RINGBUFFER<T>::set_high_warn_value(val);
921 virtual void set_low_warn_value(int val = INT_MIN + 1)
923 std::lock_guard<std::recursive_mutex> locker(m_locker);
924 return UNLOCKED_RINGBUFFER<T>::set_low_warn_value(val);
926 virtual bool high_warn()
928 std::lock_guard<std::recursive_mutex> locker(m_locker);
929 return UNLOCKED_RINGBUFFER<T>::high_warn();
931 virtual bool low_warn()
933 std::lock_guard<std::recursive_mutex> locker(m_locker);
934 return UNLOCKED_RINGBUFFER<T>::low_warn();
936 virtual bool resize(int _size, int _low_warn = INT_MIN + 1, int _high_warn = INT_MAX - 1)
938 std::lock_guard<std::recursive_mutex> locker(m_locker);
939 return UNLOCKED_RINGBUFFER<T>::resize(_size, _low_warn, _high_warn);