A more RCU-friendly referenced base class, and misc ref.hh clean up.

referenced includes a "tryinc" member function that returns false if the reference count is 0, otherwise it increments the ref. count and returns true. As a bonus, the implementation does not use cmpxchg loops/retries.
上级 355502fe
#include "atomic.hh"
template <class T>
class sref {
public:
......@@ -40,65 +38,36 @@ private:
T *ptr_;
};
template <class T>
class lref {
class referenced {
public:
lref(T* p = nullptr) : ptr_(p) {
if (ptr_)
ptr_->inc();
}
// Start with 1 reference
referenced() { ref_.v = 0; }
lref(const lref<T>& pr) : ptr_(pr.ptr_) {
if (ptr_)
ptr_->inc();
}
// The number of valid references is:
// ref_.invalid ? 0 : ref_.count+1;
~lref() {
if (ptr_)
ptr_->dec();
inline bool valid() const {
return ref_.invalid == 0;
}
bool operator==(const lref<T>& pr) const { return ptr_ == pr.ptr_; }
bool operator!=(const lref<T>& pr) const { return ptr_ != pr.ptr_; }
bool operator==(T* p) const { return ptr_ == p; }
bool operator!=(T* p) const { return ptr_ != p; }
const T * operator->() const { return ptr_; }
T * operator->() { return ptr_; }
T * ptr() const { return ptr_; }
lref<T>& operator=(const lref<T>& pr) {
const T* save = ptr_;
ptr_ = pr.ptr_;
if (ptr_)
ptr_->inc();
if (save)
save->dec();
return *this;
}
private:
lref<T>& operator=( lref<T>& mp );
lref<T>& operator=( T* p );
T *ptr_;
};
class referenced {
public:
referenced() : ref_(0) {}
u64 ref() const {
return ref_;
inline void inc() const {
// If references is 0 (i.e. ref_.count is 0xffffffff) a 32-bit
// increment will increases ref_.count to 0, but ref_.invalid
// will remain unchanged.
asm volatile("lock; incl %0" : "+m" (ref_.count));
}
inline const referenced* inc() const {
++ref_;
return this;
inline bool tryinc() const {
inc();
return valid();
}
inline void dec() const {
if (--ref_ == 0)
unsigned char c;
// If references is 1 (i.e. ref_.v is 0), a 64-bit decrement will
// underflow ref_.invalid to 0xffffffff (and ref_.count to 0xffffffff).
asm volatile("lock; decq %0; sets %1" : "+m" (ref_.v), "=qm" (c));
if (c)
onzero();
}
......@@ -109,5 +78,11 @@ protected:
virtual void onzero() const { delete this; }
private:
mutable std::atomic<u64> ref_;
mutable union {
volatile u64 v;
struct {
volatile u32 count;
volatile u32 invalid;
};
} ref_;
};
......@@ -19,7 +19,6 @@ file::file(void)
: type(file::FD_NONE), readable(0), writable(0),
socket(0), pipe(nullptr), ip(nullptr), off(0)
{
inc();
}
void
......
......@@ -84,13 +84,13 @@ long
uwq_worker::wait(void)
{
acquire(&lock_);
if (uwq_->ref() == 0)
if (!uwq_->valid())
this->exit();
running_ = false;
cv_sleep(&cv_, &lock_);
if (uwq_->ref() == 0)
if (!uwq_->valid())
this->exit();
release(&lock_);
return 0;
......@@ -119,7 +119,6 @@ uwq::alloc(vmap* vmap, filetable *ftable)
ksfree(slab_userwq, len);
return nullptr;
}
u->inc();
if (mapkva(vmap->pml4, (char*)len, USERWQ, USERWQSIZE)) {
ftable->decref();
......@@ -172,7 +171,7 @@ uwq::tryworker(void)
// Try to start a worker thread
scoped_acquire lock0(&lock_);
if (ref() == 0)
if (!valid())
return false;
int slot = -1;
......
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论