A more RCU-friendly referenced base class, and misc ref.hh clean up.

referenced includes a "tryinc" member function that returns false if the reference count is 0, otherwise it increments the ref. count and returns true. As a bonus, the implementation does not use cmpxchg loops/retries.
上级 355502fe
#include "atomic.hh"
template <class T> template <class T>
class sref { class sref {
public: public:
...@@ -40,65 +38,36 @@ private: ...@@ -40,65 +38,36 @@ private:
T *ptr_; T *ptr_;
}; };
template <class T> class referenced {
class lref {
public: public:
lref(T* p = nullptr) : ptr_(p) { // Start with 1 reference
if (ptr_) referenced() { ref_.v = 0; }
ptr_->inc();
}
lref(const lref<T>& pr) : ptr_(pr.ptr_) { // The number of valid references is:
if (ptr_) // ref_.invalid ? 0 : ref_.count+1;
ptr_->inc();
}
~lref() { inline bool valid() const {
if (ptr_) return ref_.invalid == 0;
ptr_->dec();
} }
bool operator==(const lref<T>& pr) const { return ptr_ == pr.ptr_; } inline void inc() const {
bool operator!=(const lref<T>& pr) const { return ptr_ != pr.ptr_; } // If references is 0 (i.e. ref_.count is 0xffffffff) a 32-bit
bool operator==(T* p) const { return ptr_ == p; } // increment will increases ref_.count to 0, but ref_.invalid
bool operator!=(T* p) const { return ptr_ != p; } // will remain unchanged.
asm volatile("lock; incl %0" : "+m" (ref_.count));
const T * operator->() const { return ptr_; }
T * operator->() { return ptr_; }
T * ptr() const { return ptr_; }
lref<T>& operator=(const lref<T>& pr) {
const T* save = ptr_;
ptr_ = pr.ptr_;
if (ptr_)
ptr_->inc();
if (save)
save->dec();
return *this;
}
private:
lref<T>& operator=( lref<T>& mp );
lref<T>& operator=( T* p );
T *ptr_;
};
class referenced {
public:
referenced() : ref_(0) {}
u64 ref() const {
return ref_;
} }
inline const referenced* inc() const { inline bool tryinc() const {
++ref_; inc();
return this; return valid();
} }
inline void dec() const { inline void dec() const {
if (--ref_ == 0) unsigned char c;
// If references is 1 (i.e. ref_.v is 0), a 64-bit decrement will
// underflow ref_.invalid to 0xffffffff (and ref_.count to 0xffffffff).
asm volatile("lock; decq %0; sets %1" : "+m" (ref_.v), "=qm" (c));
if (c)
onzero(); onzero();
} }
...@@ -109,5 +78,11 @@ protected: ...@@ -109,5 +78,11 @@ protected:
virtual void onzero() const { delete this; } virtual void onzero() const { delete this; }
private: private:
mutable std::atomic<u64> ref_; mutable union {
volatile u64 v;
struct {
volatile u32 count;
volatile u32 invalid;
};
} ref_;
}; };
...@@ -19,7 +19,6 @@ file::file(void) ...@@ -19,7 +19,6 @@ file::file(void)
: type(file::FD_NONE), readable(0), writable(0), : type(file::FD_NONE), readable(0), writable(0),
socket(0), pipe(nullptr), ip(nullptr), off(0) socket(0), pipe(nullptr), ip(nullptr), off(0)
{ {
inc();
} }
void void
......
...@@ -84,13 +84,13 @@ long ...@@ -84,13 +84,13 @@ long
uwq_worker::wait(void) uwq_worker::wait(void)
{ {
acquire(&lock_); acquire(&lock_);
if (uwq_->ref() == 0) if (!uwq_->valid())
this->exit(); this->exit();
running_ = false; running_ = false;
cv_sleep(&cv_, &lock_); cv_sleep(&cv_, &lock_);
if (uwq_->ref() == 0) if (!uwq_->valid())
this->exit(); this->exit();
release(&lock_); release(&lock_);
return 0; return 0;
...@@ -119,7 +119,6 @@ uwq::alloc(vmap* vmap, filetable *ftable) ...@@ -119,7 +119,6 @@ uwq::alloc(vmap* vmap, filetable *ftable)
ksfree(slab_userwq, len); ksfree(slab_userwq, len);
return nullptr; return nullptr;
} }
u->inc();
if (mapkva(vmap->pml4, (char*)len, USERWQ, USERWQSIZE)) { if (mapkva(vmap->pml4, (char*)len, USERWQ, USERWQSIZE)) {
ftable->decref(); ftable->decref();
...@@ -172,7 +171,7 @@ uwq::tryworker(void) ...@@ -172,7 +171,7 @@ uwq::tryworker(void)
// Try to start a worker thread // Try to start a worker thread
scoped_acquire lock0(&lock_); scoped_acquire lock0(&lock_);
if (ref() == 0) if (!valid())
return false; return false;
int slot = -1; int slot = -1;
......
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论