Commit 05fd9909 authored by Lucian Grijincu's avatar Lucian Grijincu Committed by Jordan DeLong

folly: speed up fastpath of StaticMeta::get()

Summary:
A smaller function makes it more likely it will be inlined
(it wasn't before, is now).

Test Plan: n/a

Reviewed By: tudorb@fb.com

FB internal diff: D760000
parent c246dd22
......@@ -260,12 +260,15 @@ struct StaticMeta {
}
}
static ElementWrapper& get(int id) {
/**
* Reserve enough space in the threadEntry_.elements for the item
* @id to fit in.
*/
static void reserve(int id) {
size_t prevSize = threadEntry_.elementsCapacity;
if (prevSize <= id) {
size_t newSize = static_cast<size_t>((id + 5) * 1.7);
auto & meta = instance();
ElementWrapper* ptr = NULL;
auto& meta = instance();
ElementWrapper* ptr = nullptr;
// Rely on jemalloc to zero the memory if possible -- maybe it knows
// it's already zeroed and saves us some work.
if (!usingJEMalloc() ||
......@@ -287,9 +290,8 @@ struct StaticMeta {
// and potentially faster when dealing with a lot of memory, as
// it can get already-zeroed pages from the kernel.
if ((ptr = static_cast<ElementWrapper*>(
calloc(newSize, sizeof(ElementWrapper)))) != NULL) {
memcpy(ptr, threadEntry_.elements,
sizeof(ElementWrapper) * prevSize);
calloc(newSize, sizeof(ElementWrapper)))) != nullptr) {
memcpy(ptr, threadEntry_.elements, sizeof(ElementWrapper) * prevSize);
} else {
throw std::bad_alloc();
}
......@@ -314,6 +316,11 @@ struct StaticMeta {
pthread_setspecific(meta.pthreadKey_, &meta);
}
}
static ElementWrapper& get(int id) {
if (UNLIKELY(threadEntry_.elementsCapacity <= id)) {
reserve(id);
}
return threadEntry_.elements[id];
}
};
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment