[TIPC]: Optimized initialization of TIPC reference table
This patch modifies TIPC's reference table code to delay initializing table entries until they are actually needed by applications. Signed-off-by: Allan Stephens <allan.stephens@windriver.com> Signed-off-by: David S. Miller <davem@davemloft.net>
This commit is contained in:
parent
4784b7c348
commit
0089509826
123
net/tipc/ref.c
123
net/tipc/ref.c
@ -50,7 +50,7 @@
|
|||||||
* struct reference - TIPC object reference entry
|
* struct reference - TIPC object reference entry
|
||||||
* @object: pointer to object associated with reference entry
|
* @object: pointer to object associated with reference entry
|
||||||
* @lock: spinlock controlling access to object
|
* @lock: spinlock controlling access to object
|
||||||
* @data: reference value associated with object (or link to next unused entry)
|
* @data: reference value for object (combines instance & array index info)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
struct reference {
|
struct reference {
|
||||||
@ -65,31 +65,40 @@ struct reference {
|
|||||||
/**
|
/**
|
||||||
* struct tipc_ref_table - table of TIPC object reference entries
|
* struct tipc_ref_table - table of TIPC object reference entries
|
||||||
* @entries: pointer to array of reference entries
|
* @entries: pointer to array of reference entries
|
||||||
* @index_mask: bitmask for array index portion of reference values
|
* @capacity: array index of first unusable entry
|
||||||
|
* @init_point: array index of first uninitialized entry
|
||||||
* @first_free: array index of first unused object reference entry
|
* @first_free: array index of first unused object reference entry
|
||||||
* @last_free: array index of last unused object reference entry
|
* @last_free: array index of last unused object reference entry
|
||||||
|
* @index_mask: bitmask for array index portion of reference values
|
||||||
|
* @start_mask: initial value for instance value portion of reference values
|
||||||
*/
|
*/
|
||||||
|
|
||||||
struct ref_table {
|
struct ref_table {
|
||||||
struct reference *entries;
|
struct reference *entries;
|
||||||
u32 index_mask;
|
u32 capacity;
|
||||||
|
u32 init_point;
|
||||||
u32 first_free;
|
u32 first_free;
|
||||||
u32 last_free;
|
u32 last_free;
|
||||||
|
u32 index_mask;
|
||||||
|
u32 start_mask;
|
||||||
};
|
};
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Object reference table consists of 2**N entries.
|
* Object reference table consists of 2**N entries.
|
||||||
*
|
*
|
||||||
* A used entry has object ptr != 0, reference == XXXX|own index
|
* State Object ptr Reference
|
||||||
* (XXXX changes each time entry is acquired)
|
* ----- ---------- ---------
|
||||||
* A free entry has object ptr == 0, reference == YYYY|next free index
|
* In use non-NULL XXXX|own index
|
||||||
* (YYYY is one more than last used XXXX)
|
* (XXXX changes each time entry is acquired)
|
||||||
|
* Free NULL YYYY|next free index
|
||||||
|
* (YYYY is one more than last used XXXX)
|
||||||
|
* Uninitialized NULL 0
|
||||||
*
|
*
|
||||||
* Free list is initially chained from entry (2**N)-1 to entry 1.
|
* Entry 0 is not used; this allows index 0 to denote the end of the free list.
|
||||||
* Entry 0 is not used to allow index 0 to indicate the end of the free list.
|
|
||||||
*
|
*
|
||||||
* Note: Any accidental reference of the form XXXX|0--0 won't match entry 0
|
* Note that a reference value of 0 does not necessarily indicate that an
|
||||||
* because entry 0's reference field has the form XXXX|1--1.
|
* entry is uninitialized, since the last entry in the free list could also
|
||||||
|
* have a reference value of 0 (although this is unlikely).
|
||||||
*/
|
*/
|
||||||
|
|
||||||
static struct ref_table tipc_ref_table = { NULL };
|
static struct ref_table tipc_ref_table = { NULL };
|
||||||
@ -103,29 +112,29 @@ static DEFINE_RWLOCK(ref_table_lock);
|
|||||||
int tipc_ref_table_init(u32 requested_size, u32 start)
|
int tipc_ref_table_init(u32 requested_size, u32 start)
|
||||||
{
|
{
|
||||||
struct reference *table;
|
struct reference *table;
|
||||||
u32 sz = 1 << 4;
|
u32 actual_size;
|
||||||
u32 index_mask;
|
|
||||||
int i;
|
|
||||||
|
|
||||||
while (sz < requested_size) {
|
/* account for unused entry, then round up size to a power of 2 */
|
||||||
sz <<= 1;
|
|
||||||
}
|
requested_size++;
|
||||||
table = vmalloc(sz * sizeof(*table));
|
for (actual_size = 16; actual_size < requested_size; actual_size <<= 1)
|
||||||
|
/* do nothing */ ;
|
||||||
|
|
||||||
|
/* allocate table & mark all entries as uninitialized */
|
||||||
|
|
||||||
|
table = __vmalloc(actual_size * sizeof(struct reference),
|
||||||
|
GFP_KERNEL | __GFP_HIGHMEM | __GFP_ZERO, PAGE_KERNEL);
|
||||||
if (table == NULL)
|
if (table == NULL)
|
||||||
return -ENOMEM;
|
return -ENOMEM;
|
||||||
|
|
||||||
write_lock_bh(&ref_table_lock);
|
|
||||||
index_mask = sz - 1;
|
|
||||||
for (i = sz - 1; i >= 0; i--) {
|
|
||||||
table[i].object = NULL;
|
|
||||||
spin_lock_init(&table[i].lock);
|
|
||||||
table[i].data.next_plus_upper = (start & ~index_mask) + i - 1;
|
|
||||||
}
|
|
||||||
tipc_ref_table.entries = table;
|
tipc_ref_table.entries = table;
|
||||||
tipc_ref_table.index_mask = index_mask;
|
tipc_ref_table.capacity = requested_size;
|
||||||
tipc_ref_table.first_free = sz - 1;
|
tipc_ref_table.init_point = 1;
|
||||||
tipc_ref_table.last_free = 1;
|
tipc_ref_table.first_free = 0;
|
||||||
write_unlock_bh(&ref_table_lock);
|
tipc_ref_table.last_free = 0;
|
||||||
|
tipc_ref_table.index_mask = actual_size - 1;
|
||||||
|
tipc_ref_table.start_mask = start & ~tipc_ref_table.index_mask;
|
||||||
|
|
||||||
return TIPC_OK;
|
return TIPC_OK;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -156,7 +165,7 @@ u32 tipc_ref_acquire(void *object, spinlock_t **lock)
|
|||||||
u32 index;
|
u32 index;
|
||||||
u32 index_mask;
|
u32 index_mask;
|
||||||
u32 next_plus_upper;
|
u32 next_plus_upper;
|
||||||
u32 reference = 0;
|
u32 reference;
|
||||||
|
|
||||||
if (!object) {
|
if (!object) {
|
||||||
err("Attempt to acquire reference to non-existent object\n");
|
err("Attempt to acquire reference to non-existent object\n");
|
||||||
@ -167,6 +176,8 @@ u32 tipc_ref_acquire(void *object, spinlock_t **lock)
|
|||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* take a free entry, if available; otherwise initialize a new entry */
|
||||||
|
|
||||||
write_lock_bh(&ref_table_lock);
|
write_lock_bh(&ref_table_lock);
|
||||||
if (tipc_ref_table.first_free) {
|
if (tipc_ref_table.first_free) {
|
||||||
index = tipc_ref_table.first_free;
|
index = tipc_ref_table.first_free;
|
||||||
@ -179,11 +190,23 @@ u32 tipc_ref_acquire(void *object, spinlock_t **lock)
|
|||||||
reference = (next_plus_upper & ~index_mask) + index;
|
reference = (next_plus_upper & ~index_mask) + index;
|
||||||
entry->data.reference = reference;
|
entry->data.reference = reference;
|
||||||
entry->object = object;
|
entry->object = object;
|
||||||
if (lock != NULL)
|
|
||||||
*lock = &entry->lock;
|
|
||||||
spin_unlock_bh(&entry->lock);
|
spin_unlock_bh(&entry->lock);
|
||||||
|
*lock = &entry->lock;
|
||||||
|
}
|
||||||
|
else if (tipc_ref_table.init_point < tipc_ref_table.capacity) {
|
||||||
|
index = tipc_ref_table.init_point++;
|
||||||
|
entry = &(tipc_ref_table.entries[index]);
|
||||||
|
spin_lock_init(&entry->lock);
|
||||||
|
reference = tipc_ref_table.start_mask + index;
|
||||||
|
entry->data.reference = reference;
|
||||||
|
entry->object = object;
|
||||||
|
*lock = &entry->lock;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
reference = 0;
|
||||||
}
|
}
|
||||||
write_unlock_bh(&ref_table_lock);
|
write_unlock_bh(&ref_table_lock);
|
||||||
|
|
||||||
return reference;
|
return reference;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -200,20 +223,17 @@ void tipc_ref_discard(u32 ref)
|
|||||||
u32 index;
|
u32 index;
|
||||||
u32 index_mask;
|
u32 index_mask;
|
||||||
|
|
||||||
if (!ref) {
|
|
||||||
err("Attempt to discard reference 0\n");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!tipc_ref_table.entries) {
|
if (!tipc_ref_table.entries) {
|
||||||
err("Reference table not found during discard attempt\n");
|
err("Reference table not found during discard attempt\n");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
write_lock_bh(&ref_table_lock);
|
|
||||||
index_mask = tipc_ref_table.index_mask;
|
index_mask = tipc_ref_table.index_mask;
|
||||||
index = ref & index_mask;
|
index = ref & index_mask;
|
||||||
entry = &(tipc_ref_table.entries[index]);
|
entry = &(tipc_ref_table.entries[index]);
|
||||||
|
|
||||||
|
write_lock_bh(&ref_table_lock);
|
||||||
|
|
||||||
if (!entry->object) {
|
if (!entry->object) {
|
||||||
err("Attempt to discard reference to non-existent object\n");
|
err("Attempt to discard reference to non-existent object\n");
|
||||||
goto exit;
|
goto exit;
|
||||||
@ -223,18 +243,23 @@ void tipc_ref_discard(u32 ref)
|
|||||||
goto exit;
|
goto exit;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* mark entry as unused */
|
/*
|
||||||
|
* mark entry as unused; increment upper bits of entry's data field
|
||||||
|
* to invalidate any subsequent references
|
||||||
|
*/
|
||||||
|
|
||||||
entry->object = NULL;
|
entry->object = NULL;
|
||||||
|
entry->data.next_plus_upper = (ref & ~index_mask) + (index_mask + 1);
|
||||||
|
|
||||||
|
/* append entry to free entry list */
|
||||||
|
|
||||||
if (tipc_ref_table.first_free == 0)
|
if (tipc_ref_table.first_free == 0)
|
||||||
tipc_ref_table.first_free = index;
|
tipc_ref_table.first_free = index;
|
||||||
else
|
else
|
||||||
/* next_plus_upper is always XXXX|0--0 for last free entry */
|
|
||||||
tipc_ref_table.entries[tipc_ref_table.last_free].
|
tipc_ref_table.entries[tipc_ref_table.last_free].
|
||||||
data.next_plus_upper |= index;
|
data.next_plus_upper |= index;
|
||||||
tipc_ref_table.last_free = index;
|
tipc_ref_table.last_free = index;
|
||||||
|
|
||||||
/* increment upper bits of entry to invalidate subsequent references */
|
|
||||||
entry->data.next_plus_upper = (ref & ~index_mask) + (index_mask + 1);
|
|
||||||
exit:
|
exit:
|
||||||
write_unlock_bh(&ref_table_lock);
|
write_unlock_bh(&ref_table_lock);
|
||||||
}
|
}
|
||||||
@ -249,10 +274,13 @@ void *tipc_ref_lock(u32 ref)
|
|||||||
struct reference *r;
|
struct reference *r;
|
||||||
|
|
||||||
r = &tipc_ref_table.entries[ref & tipc_ref_table.index_mask];
|
r = &tipc_ref_table.entries[ref & tipc_ref_table.index_mask];
|
||||||
spin_lock_bh(&r->lock);
|
|
||||||
if (likely(r->data.reference == ref))
|
if (likely(r->data.reference != 0)) {
|
||||||
return r->object;
|
spin_lock_bh(&r->lock);
|
||||||
spin_unlock_bh(&r->lock);
|
if (likely((r->data.reference == ref) && (r->object)))
|
||||||
|
return r->object;
|
||||||
|
spin_unlock_bh(&r->lock);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
@ -267,11 +295,12 @@ void tipc_ref_unlock(u32 ref)
|
|||||||
struct reference *r;
|
struct reference *r;
|
||||||
|
|
||||||
r = &tipc_ref_table.entries[ref & tipc_ref_table.index_mask];
|
r = &tipc_ref_table.entries[ref & tipc_ref_table.index_mask];
|
||||||
if (likely(r->data.reference == ref))
|
|
||||||
|
if (likely((r->data.reference == ref) && (r->object)))
|
||||||
spin_unlock_bh(&r->lock);
|
spin_unlock_bh(&r->lock);
|
||||||
else
|
else
|
||||||
err("tipc_ref_unlock() invoked using "
|
err("tipc_ref_unlock() invoked using "
|
||||||
"obsolete reference\n");
|
"invalid reference\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user