|
@@ -77,38 +77,9 @@ namespace bg
|
|
|
|
|
|
void* ShmManager::GetSingleton(const bg::detail::TypeName& type, size_t bytes, bool* first_call)
|
|
void* ShmManager::GetSingleton(const bg::detail::TypeName& type, size_t bytes, bool* first_call)
|
|
{
|
|
{
|
|
- char index;
|
|
|
|
- bg::detail::ShmManager::SingletonMap* v12; // r12
|
|
|
|
- unsigned __int8 v14; // r13
|
|
|
|
- __int64 v15; // r10
|
|
|
|
- __int64 v16; // r11
|
|
|
|
- bg::detail::ChunkCache::ClassCache* v17; // r12
|
|
|
|
- bg::detail::Span* v18; // r8
|
|
|
|
- void** m_chunk_list; // rax
|
|
|
|
- bg::detail::Span* m_prev; // rdx
|
|
|
|
- bool v21; // zf
|
|
|
|
- bg::detail::PageHeap* v24; // rdi
|
|
|
|
- bg::detail::Span* v25; // rax
|
|
|
|
- size_t m_start_page; // rcx
|
|
|
|
- size_t m_page_count; // rdi
|
|
|
|
- void** p_m_chunk_list; // rax
|
|
|
|
- void** v29; // rcx
|
|
|
|
- void** v30; // rdi
|
|
|
|
- void** v31; // rdx
|
|
|
|
- void** v32; // rsi
|
|
|
|
- bg::detail::ChunkCache::ClassCache* v33; // rax
|
|
|
|
- bg::detail::Span* m_next; // rcx
|
|
|
|
- __int64 v35; // [rsp+0h] [rbp-460h]
|
|
|
|
- bg::detail::Span* v36; // [rsp+8h] [rbp-458h]
|
|
|
|
- bg::detail::PageHeap* p_m_page_heap; // [rsp+10h] [rbp-450h]
|
|
|
|
- unsigned __int64 v40; // [rsp+18h] [rbp-448h]
|
|
|
|
- //std::pair<bg::detail::TypeName, void*> __v{}; // [rsp+20h] [rbp-440h] BYREF
|
|
|
|
- bg::detail::Span* span;
|
|
|
|
-
|
|
|
|
if(!m_singletons)
|
|
if(!m_singletons)
|
|
{
|
|
{
|
|
- m_singletons = (bg::detail::ShmManager::SingletonMap*)bg::ShmMalloc(
|
|
|
|
- sizeof(bg::detail::ShmManager::SingletonMap));
|
|
|
|
|
|
+ m_singletons = (bg::detail::ShmManager::SingletonMap*)bg::ShmMalloc(sizeof(bg::detail::ShmManager::SingletonMap));
|
|
if(!m_singletons)
|
|
if(!m_singletons)
|
|
{
|
|
{
|
|
m_singletons = nullptr;
|
|
m_singletons = nullptr;
|
|
@@ -117,298 +88,210 @@ namespace bg
|
|
|
|
|
|
new(m_singletons) SingletonMap;
|
|
new(m_singletons) SingletonMap;
|
|
}
|
|
}
|
|
- v12 = nullptr;
|
|
|
|
|
|
+
|
|
auto itor = m_singletons->find(type);
|
|
auto itor = m_singletons->find(type);
|
|
if(itor != m_singletons->end())
|
|
if(itor != m_singletons->end())
|
|
{
|
|
{
|
|
return itor->second;
|
|
return itor->second;
|
|
}
|
|
}
|
|
|
|
|
|
- if(bytes <= 0x400)
|
|
|
|
|
|
+ uint8_t index{};
|
|
|
|
+ if(bytes <= BYTES_COMP_VALUE)
|
|
{
|
|
{
|
|
- index = this->m_size_map.m_index_to_class[(bytes + 7) >> 3];
|
|
|
|
|
|
+ index = m_size_map.m_index_to_class[SMALL_BYTES_TO_INDEX(bytes)];
|
|
}
|
|
}
|
|
else
|
|
else
|
|
{
|
|
{
|
|
- if(bytes > 0x40000)
|
|
|
|
|
|
+ if(bytes > CLASS_MAX_BYTES)
|
|
{
|
|
{
|
|
- span = m_page_heap.AllocateSpan((bytes + 0x1FFF) >> 13);
|
|
|
|
|
|
+ bg::detail::Span* span = m_page_heap.AllocateSpan(BYTES_TO_PAGES(bytes + PAGE_BYTES));
|
|
if(span)
|
|
if(span)
|
|
{
|
|
{
|
|
- if(!m_singletons)
|
|
|
|
|
|
+ void* chunk_span = (void*)PAGES_TO_BYTES(span->m_start_page);
|
|
|
|
+ if(chunk_span)
|
|
{
|
|
{
|
|
- return nullptr;
|
|
|
|
|
|
+ m_singletons->operator[](type) = chunk_span;
|
|
|
|
+ if(first_call)
|
|
|
|
+ {
|
|
|
|
+ *first_call = 1;
|
|
|
|
+ SHM_INFO("singleton(%s: %p) created.", type.c_str(), chunk_span);
|
|
|
|
+ }
|
|
|
|
+ return chunk_span;
|
|
}
|
|
}
|
|
- goto LABEL_25;
|
|
|
|
}
|
|
}
|
|
- return 0LL;
|
|
|
|
|
|
+ return nullptr;
|
|
}
|
|
}
|
|
- index = this->m_size_map.m_index_to_class[(bytes + 15487) >> 7];
|
|
|
|
|
|
+ index = m_size_map.m_index_to_class[BIG_BYTES_TO_INDEX(bytes)];
|
|
}
|
|
}
|
|
- v14 = index;
|
|
|
|
- if(index > 0x56u)
|
|
|
|
|
|
+
|
|
|
|
+ if(index > CLASS_MAX_COUNT)
|
|
{
|
|
{
|
|
- v16 = index;
|
|
|
|
- v17 = &this->m_chunk_cache.m_caches[index];
|
|
|
|
- v40 = this->m_size_map.m_class_to_size[index];
|
|
|
|
- if(v17 != (bg::detail::ChunkCache::ClassCache*)v17->free_list.m_next)
|
|
|
|
- goto LABEL_21;
|
|
|
|
- p_m_page_heap = &this->m_page_heap;
|
|
|
|
-
|
|
|
|
- v16 = v14;
|
|
|
|
- v15 = v14;
|
|
|
|
- v24 = &this->m_page_heap;
|
|
|
|
- goto LABEL_35;
|
|
|
|
|
|
+ return nullptr;
|
|
}
|
|
}
|
|
- v15 = v14;
|
|
|
|
- v16 = v14;
|
|
|
|
- v17 = &this->m_chunk_cache.m_caches[(unsigned __int64)v14];
|
|
|
|
- v40 = this->m_size_map.m_class_to_size[v14];
|
|
|
|
- if(v17 == (bg::detail::ChunkCache::ClassCache*)v17->free_list.m_next)
|
|
|
|
|
|
+
|
|
|
|
+ ChunkCache::ClassCache* cheche = &this->m_chunk_cache.m_caches[index];
|
|
|
|
+ size_t size = this->m_size_map.ClassToSize(index);
|
|
|
|
+ if(cheche == (bg::detail::ChunkCache::ClassCache*)cheche->free_list.m_next)
|
|
{
|
|
{
|
|
- p_m_page_heap = &this->m_page_heap;
|
|
|
|
- v24 = &this->m_page_heap;
|
|
|
|
-LABEL_35:
|
|
|
|
- v35 = v16;
|
|
|
|
- v25 = (bg::detail::Span*)v24->AllocateSpan(this->m_size_map.m_class_to_pages[v15]);
|
|
|
|
- if(v25)
|
|
|
|
|
|
+ bg::detail::Span* span = m_page_heap.AllocateSpan(m_size_map.m_class_to_pages[index]);
|
|
|
|
+ if(span)
|
|
{
|
|
{
|
|
- v36 = v25;
|
|
|
|
- p_m_page_heap->RegisterSpan(v25);
|
|
|
|
- v18 = v36;
|
|
|
|
- v16 = v35;
|
|
|
|
- m_start_page = v36->m_start_page;
|
|
|
|
- m_page_count = v36->m_page_count;
|
|
|
|
- p_m_chunk_list = &v36->m_chunk_list;
|
|
|
|
- v36->m_size_class = v14;
|
|
|
|
- v29 = (void**)(m_start_page << 13);
|
|
|
|
- v30 = &v29[1024 * m_page_count];
|
|
|
|
- v31 = (void**)((char*)v29 + v40);
|
|
|
|
- if((void**)((char*)v29 + v40) <= v30)
|
|
|
|
|
|
+ m_page_heap.RegisterSpan(span);
|
|
|
|
+ span->m_size_class = index;
|
|
|
|
+
|
|
|
|
+ span->m_chunk_list = (void*)PAGES_TO_BYTES(span->m_start_page);
|
|
|
|
+
|
|
|
|
+ uintptr_t end_memory = (uintptr_t)PAGES_TO_BYTES(span->m_start_page + span->m_page_count);
|
|
|
|
+
|
|
|
|
+ uintptr_t end_ptr = (uintptr_t)span->m_chunk_list + size;
|
|
|
|
+
|
|
|
|
+ uintptr_t ptr = (uintptr_t)span->m_chunk_list;
|
|
|
|
+
|
|
|
|
+ for(; end_ptr < end_memory; )
|
|
{
|
|
{
|
|
- while(1)
|
|
|
|
|
|
+ *((uintptr_t*)ptr) = end_ptr;
|
|
|
|
+ auto temp = end_ptr + size;
|
|
|
|
+ if(temp > end_memory)
|
|
{
|
|
{
|
|
- v32 = v31;
|
|
|
|
- v31 = (void**)((char*)v31 + v40);
|
|
|
|
- *p_m_chunk_list = v29;
|
|
|
|
- p_m_chunk_list = v29;
|
|
|
|
- if(v30 < v31)
|
|
|
|
- break;
|
|
|
|
- v29 = v32;
|
|
|
|
|
|
+ break;
|
|
}
|
|
}
|
|
|
|
+ ptr = end_ptr;
|
|
|
|
+ end_ptr = temp;
|
|
}
|
|
}
|
|
- else
|
|
|
|
- {
|
|
|
|
- v29 = &v36->m_chunk_list;
|
|
|
|
- }
|
|
|
|
- *v29 = 0LL;
|
|
|
|
- v33 = &this->m_chunk_cache.m_caches[v35];
|
|
|
|
- m_next = v33->free_list.m_next;
|
|
|
|
- v36->m_prev = &v17->free_list;
|
|
|
|
- v36->m_next = m_next;
|
|
|
|
- v33->free_list.m_next->m_prev = v36;
|
|
|
|
- v33->free_list.m_next = v36;
|
|
|
|
- ++v33->span_count;
|
|
|
|
- goto LABEL_22;
|
|
|
|
|
|
+
|
|
|
|
+ *(uintptr_t*)ptr = 0ll;
|
|
|
|
+ cheche->free_list.InstertNextSpan(span);
|
|
|
|
+ ++cheche->span_count;
|
|
|
|
+
|
|
}
|
|
}
|
|
- return 0LL;
|
|
|
|
|
|
+
|
|
}
|
|
}
|
|
-LABEL_21:
|
|
|
|
- v18 = v17->free_list.m_next;
|
|
|
|
-LABEL_22:
|
|
|
|
- m_chunk_list = (void**)v18->m_chunk_list;
|
|
|
|
- ++v18->m_used_count;
|
|
|
|
- m_prev = (bg::detail::Span*)*m_chunk_list;
|
|
|
|
- v21 = *m_chunk_list == 0LL;
|
|
|
|
- v18->m_chunk_list = *m_chunk_list;
|
|
|
|
- if(v21)
|
|
|
|
|
|
+
|
|
|
|
+ if(cheche == (bg::detail::ChunkCache::ClassCache*)cheche->free_list.m_next)
|
|
{
|
|
{
|
|
- m_prev = v18->m_prev;
|
|
|
|
- m_prev->m_next = v18->m_next;
|
|
|
|
- v18->m_next->m_prev = m_prev;
|
|
|
|
- v18->m_prev = 0LL;
|
|
|
|
- v18->m_next = 0LL;
|
|
|
|
- --this->m_chunk_cache.m_caches[v16].span_count;
|
|
|
|
|
|
+ return nullptr;
|
|
}
|
|
}
|
|
- v12 = (bg::detail::ShmManager::SingletonMap*)m_chunk_list;
|
|
|
|
-LABEL_25:
|
|
|
|
- (*m_singletons)[type] = v12;
|
|
|
|
|
|
+
|
|
|
|
+ auto next_span = cheche->free_list.m_next;
|
|
|
|
+ ++next_span->m_used_count;
|
|
|
|
+ auto chunk_span = next_span->m_chunk_list;
|
|
|
|
+ next_span->m_chunk_list = *(void**)chunk_span;
|
|
|
|
+ if(*(void**)chunk_span == nullptr)
|
|
|
|
+ {
|
|
|
|
+ cheche->free_list.RemoveNextSpan();
|
|
|
|
+ --this->m_chunk_cache.m_caches[index].span_count;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ m_singletons->operator[](type) = chunk_span;
|
|
if(first_call)
|
|
if(first_call)
|
|
|
|
+ {
|
|
*first_call = 1;
|
|
*first_call = 1;
|
|
-
|
|
|
|
- return v12;
|
|
|
|
|
|
+ SHM_INFO("singleton(%s: %p) created.", type.c_str(), chunk_span);
|
|
|
|
+ }
|
|
|
|
+ return chunk_span;
|
|
}
|
|
}
|
|
|
|
|
|
void ShmManager::FreeSingleton(const bg::detail::TypeName& type)
|
|
void ShmManager::FreeSingleton(const bg::detail::TypeName& type)
|
|
{
|
|
{
|
|
- unsigned __int64 v7; // rax
|
|
|
|
- bg::detail::RadixTree<10ul, 10ul, 15ul>::NodeV1* v8; // rsi
|
|
|
|
- bg::detail::RadixTree<10ul, 10ul, 15ul>::NodeV2* v9; // rcx
|
|
|
|
- bg::detail::Span* v10; // rsi
|
|
|
|
- __int64 m_size_class; // rdx
|
|
|
|
- size_t m_used_count; // rcx
|
|
|
|
- void* m_chunk_list; // r8
|
|
|
|
- size_t v14; // rcx
|
|
|
|
- bg::detail::PageHeap* p_m_page_heap; // rdi
|
|
|
|
- bool v16; // cl
|
|
|
|
- __int64 v17; // rdx
|
|
|
|
- char* v18; // rax
|
|
|
|
- bg::detail::Span* m_prev; // rdx
|
|
|
|
- bg::detail::ShmManager::SingletonMap* v20; // rbx
|
|
|
|
- bg::detail::ChunkCache::ClassCache* v23; // rax
|
|
|
|
- bg::detail::Span* v24; // rcx
|
|
|
|
- bg::detail::Span* m_next; // rdi
|
|
|
|
-
|
|
|
|
if(m_singletons)
|
|
if(m_singletons)
|
|
{
|
|
{
|
|
auto itor = m_singletons->find(type);
|
|
auto itor = m_singletons->find(type);
|
|
- if(itor != m_singletons->end())
|
|
|
|
|
|
+ if(itor == m_singletons->end())
|
|
|
|
+ {
|
|
|
|
+ return;
|
|
|
|
+ }
|
|
|
|
+ SHM_INFO("singleton(%s: %p) destroyed.", type.c_str());
|
|
|
|
+
|
|
|
|
+ bg::detail::Span* span = m_page_heap.GetSpanMap(BYTES_TO_PAGES((uintptr_t)itor->second));
|
|
|
|
+ if(span)
|
|
{
|
|
{
|
|
- v7 = *(uintptr_t*)&itor->second;
|
|
|
|
- if(v7)
|
|
|
|
|
|
+ do
|
|
{
|
|
{
|
|
- if(v7 >> 13 > 0x7FFFFFFFFLL)
|
|
|
|
- {
|
|
|
|
|
|
|
|
|
|
+ if(span->m_size_class == 0xff)
|
|
|
|
+ {
|
|
|
|
+ m_page_heap.DeallocateSpan(span);
|
|
}
|
|
}
|
|
else
|
|
else
|
|
{
|
|
{
|
|
- v8 = this->m_page_heap.m_span_map.lv0[v7 >> 38];
|
|
|
|
- if(v8)
|
|
|
|
|
|
+ if(span->m_used_count && span->m_size_class <= CLASS_MAX_COUNT)
|
|
{
|
|
{
|
|
- v9 = v8->lv1[(v7 >> 28) & 0x3FF];
|
|
|
|
- if(v9)
|
|
|
|
|
|
+ span->m_used_count -= 1;
|
|
|
|
+ auto chunk_list = span->m_chunk_list;
|
|
|
|
+ *(uintptr_t*)itor->second = (uintptr_t)span->m_chunk_list;
|
|
|
|
+ span->m_chunk_list = itor->second;
|
|
|
|
+
|
|
|
|
+ if(chunk_list)
|
|
{
|
|
{
|
|
- v10 = v9->lv2[(v7 >> 13) & 0x7FFF];
|
|
|
|
- if(v10)
|
|
|
|
|
|
+ if(span->m_used_count == 0)
|
|
{
|
|
{
|
|
- m_size_class = v10->m_size_class;
|
|
|
|
- if((char)m_size_class == 0xFF)
|
|
|
|
|
|
+ if(m_chunk_cache.m_caches[span->m_size_class].span_count > 1)
|
|
{
|
|
{
|
|
- m_page_heap.DeallocateSpan(v10);
|
|
|
|
|
|
+ span->m_prev->RemoveNextSpan();
|
|
|
|
+ span->m_in_use = false;
|
|
|
|
+ span->m_used_count = 0LL;
|
|
|
|
+ span->m_size_class = -1;
|
|
|
|
+ span->m_chunk_list = 0LL;
|
|
|
|
+ m_page_heap.DeallocateSpan(span);
|
|
}
|
|
}
|
|
- else
|
|
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ else
|
|
|
|
+ {
|
|
|
|
+ if(span->m_used_count == 0)
|
|
|
|
+ {
|
|
|
|
+ if(m_chunk_cache.m_caches[span->m_size_class].span_count)
|
|
{
|
|
{
|
|
- m_used_count = v10->m_used_count;
|
|
|
|
- if(m_used_count && (unsigned __int8)m_size_class <= 0x56u)
|
|
|
|
- {
|
|
|
|
- m_chunk_list = v10->m_chunk_list;
|
|
|
|
- v14 = m_used_count - 1;
|
|
|
|
- p_m_page_heap = &this->m_page_heap;
|
|
|
|
- *(uintptr_t*)v7 = (uintptr_t)m_chunk_list;
|
|
|
|
- v10->m_used_count = v14;
|
|
|
|
- v16 = v14 == 0;
|
|
|
|
- v10->m_chunk_list = (void*)v7;
|
|
|
|
- if(m_chunk_list)
|
|
|
|
- {
|
|
|
|
- if(v16)
|
|
|
|
- {
|
|
|
|
- v17 = m_size_class << 6;
|
|
|
|
- v18 = (char*)&this->m_chunk_cache.m_caches[0].free_list.m_chunk_list + v17;
|
|
|
|
- if(*(size_t*)((char*)&this->m_chunk_cache.m_caches[0].span_count + v17) > 1)
|
|
|
|
- {
|
|
|
|
- m_prev = v10->m_prev;
|
|
|
|
- m_prev->m_next = v10->m_next;
|
|
|
|
- v10->m_next->m_prev = m_prev;
|
|
|
|
- v10->m_prev = 0LL;
|
|
|
|
- v10->m_next = 0LL;
|
|
|
|
- --* ((uintptr_t*)v18 + 1);
|
|
|
|
- v10->m_used_count = 0LL;
|
|
|
|
- v10->m_size_class = -1;
|
|
|
|
- v10->m_chunk_list = 0LL;
|
|
|
|
- p_m_page_heap->DeallocateSpan(v10);
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- v23 = &this->m_chunk_cache.m_caches[m_size_class];
|
|
|
|
- if(v16)
|
|
|
|
- {
|
|
|
|
- if(v23->span_count)
|
|
|
|
- {
|
|
|
|
- v10->m_used_count = 0LL;
|
|
|
|
- v10->m_size_class = -1;
|
|
|
|
- v10->m_chunk_list = 0LL;
|
|
|
|
- p_m_page_heap->DeallocateSpan(v10);
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- m_next = v23->free_list.m_next;
|
|
|
|
- v10->m_prev = &v23->free_list;
|
|
|
|
- v10->m_next = m_next;
|
|
|
|
- v23->free_list.m_next->m_prev = v10;
|
|
|
|
- v23->free_list.m_next = v10;
|
|
|
|
- ++v23->span_count;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- v24 = v23->free_list.m_next;
|
|
|
|
- v10->m_prev = &v23->free_list;
|
|
|
|
- v10->m_next = v24;
|
|
|
|
- v23->free_list.m_next->m_prev = v10;
|
|
|
|
- v23->free_list.m_next = v10;
|
|
|
|
- ++v23->span_count;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- else
|
|
|
|
- {
|
|
|
|
- }
|
|
|
|
|
|
+ span->m_used_count = 0LL;
|
|
|
|
+ span->m_size_class = -1;
|
|
|
|
+ span->m_chunk_list = 0LL;
|
|
|
|
+ m_page_heap.DeallocateSpan(span);
|
|
|
|
+ break;
|
|
}
|
|
}
|
|
- goto LABEL_21;
|
|
|
|
|
|
+
|
|
}
|
|
}
|
|
|
|
+ m_chunk_cache.m_caches[span->m_size_class].free_list.m_next->InstertNextSpan(span);
|
|
|
|
+ m_chunk_cache.m_caches[span->m_size_class].span_count++;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- }
|
|
|
|
|
|
|
|
-
|
|
|
|
- }
|
|
|
|
-LABEL_21:
|
|
|
|
- v20 = this->m_singletons;
|
|
|
|
- v20->erase(itor);
|
|
|
|
|
|
+ }
|
|
|
|
+ } while(false);
|
|
}
|
|
}
|
|
|
|
+ m_singletons->erase(itor);
|
|
|
|
+
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+ return;
|
|
|
|
+
|
|
}
|
|
}
|
|
|
|
|
|
bool ShmManager::OnResume(const char* identifier)
|
|
bool ShmManager::OnResume(const char* identifier)
|
|
{
|
|
{
|
|
- int v2; // eax
|
|
|
|
- size_t v3; // rbx
|
|
|
|
- unsigned int v4; // eax
|
|
|
|
- unsigned int v5; // r13d
|
|
|
|
char magic[64]; // [rsp+0h] [rbp-60h] BYREF
|
|
char magic[64]; // [rsp+0h] [rbp-60h] BYREF
|
|
|
|
|
|
- v2 = snprintf(magic, 0x40uLL, "ByteGame/ShmManager-%s", identifier);
|
|
|
|
- if(!strncmp(magic, this->m_magic, v2))
|
|
|
|
|
|
+ int size = snprintf(magic, 0x40uLL, "ByteGame/ShmManager-%s", identifier);
|
|
|
|
+ if(!strncmp(magic, this->m_magic, size))
|
|
{
|
|
{
|
|
- v3 = 0LL;
|
|
|
|
- if(this->m_block_count)
|
|
|
|
|
|
+ size_t index = 0;
|
|
|
|
+ for(; index < m_block_count; ++index)
|
|
{
|
|
{
|
|
- while(1)
|
|
|
|
|
|
+ if(!AttachBlock(index))
|
|
{
|
|
{
|
|
- v4 = AttachBlock(v3);
|
|
|
|
- v5 = v4;
|
|
|
|
- if(!(char)v4)
|
|
|
|
- break;
|
|
|
|
- if(this->m_block_count <= ++v3)
|
|
|
|
- goto LABEL_7;
|
|
|
|
|
|
+ SHM_ERROR("failed to attach block(%lu), address(%#lx), used size(%#lx), real size(%#lx), mmap size(%#lx).", index, m_blocks[index].addr, m_blocks[index].used_size, m_blocks[index].real_size, m_blocks[index].mmap_size);
|
|
|
|
+ return false;
|
|
}
|
|
}
|
|
|
|
|
|
- return v5;
|
|
|
|
}
|
|
}
|
|
- else
|
|
|
|
|
|
+ if(m_block_count <= index)
|
|
{
|
|
{
|
|
-LABEL_7:
|
|
|
|
- ++this->m_version;
|
|
|
|
|
|
+ m_version++;
|
|
return true;
|
|
return true;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- else
|
|
|
|
- {
|
|
|
|
- return false;
|
|
|
|
- }
|
|
|
|
|
|
+
|
|
|
|
+ SHM_ERROR("magic mismatch, expected: %s, actual: %s.", magic, this->m_magic);
|
|
|
|
+ return false;
|
|
}
|
|
}
|
|
|
|
|
|
bool ShmManager::OnCreate(void)
|
|
bool ShmManager::OnCreate(void)
|
|
@@ -418,34 +301,21 @@ LABEL_7:
|
|
|
|
|
|
bool ShmManager::DeleteBlock(size_t index)
|
|
bool ShmManager::DeleteBlock(size_t index)
|
|
{
|
|
{
|
|
- unsigned int v3; // r14d
|
|
|
|
- char* v4; // rbx
|
|
|
|
- unsigned __int64 v5; // rsi
|
|
|
|
- char path[256]; // [rsp+0h] [rbp-130h] BYREF
|
|
|
|
-
|
|
|
|
- if(this->m_block_count <= index)
|
|
|
|
|
|
+ SHM_ASSERT_RETURN_FALSE(index > m_block_count);
|
|
|
|
+ char path[256];
|
|
|
|
+ if(m_blocks[index].addr)
|
|
{
|
|
{
|
|
- v3 = 0;
|
|
|
|
|
|
+ m_options.AddFixedAddress((uintptr_t)m_blocks[index].addr);
|
|
|
|
+ snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
|
|
|
|
+ bg::detail::ShmObjectDelete(m_blocks[index].addr, m_blocks[index].mmap_size, path);
|
|
|
|
+ SHM_INFO("block(%lu) deleted, address(%#lx), used size(%#lx), real size(%#lx), mmap size(%#lx), path(%s).",
|
|
|
|
+ index, m_blocks[index].addr, m_blocks[index].used_size, m_blocks[index].real_size, m_blocks[index].mmap_size, path);
|
|
|
|
|
|
|
|
+ new (m_blocks[index].addr) ShmBlock();
|
|
}
|
|
}
|
|
- else
|
|
|
|
- {
|
|
|
|
- v3 = 1;
|
|
|
|
- v4 = (char*)this + 32 * index;
|
|
|
|
- v5 = *((uintptr_t*)v4 + 3113);
|
|
|
|
- if(v5)
|
|
|
|
- {
|
|
|
|
- m_options.AddFixedAddress(v5);
|
|
|
|
- snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
|
|
|
|
- bg::detail::ShmObjectDelete(*((void**)v4 + 3113), (uintptr_t) * ((void**)v4 + 3116), path);
|
|
|
|
|
|
|
|
- *((uintptr_t*)v4 + 3113) = 0LL;
|
|
|
|
- *((uintptr_t*)v4 + 3114) = 0LL;
|
|
|
|
- *((uintptr_t*)v4 + 3115) = 0LL;
|
|
|
|
- *((uintptr_t*)v4 + 3116) = 0LL;
|
|
|
|
- }
|
|
|
|
- }
|
|
|
|
- return v3;
|
|
|
|
|
|
+
|
|
|
|
+ return true;
|
|
}
|
|
}
|
|
|
|
|
|
void* ShmManager::AllocateInBlock(size_t index, size_t bytes, size_t alignment)
|
|
void* ShmManager::AllocateInBlock(size_t index, size_t bytes, size_t alignment)
|
|
@@ -607,33 +477,33 @@ LABEL_13:
|
|
return result;
|
|
return result;
|
|
}
|
|
}
|
|
|
|
|
|
- bool ShmManager::ResizeBlock(size_t index, const char* new_size)
|
|
|
|
|
|
+ bool ShmManager::ResizeBlock(size_t index, size_t new_size)
|
|
{
|
|
{
|
|
bool v3; // cc
|
|
bool v3; // cc
|
|
unsigned int v5; // r13d
|
|
unsigned int v5; // r13d
|
|
size_t new_sizea; // [rsp+8h] [rbp-128h] BYREF
|
|
size_t new_sizea; // [rsp+8h] [rbp-128h] BYREF
|
|
char path[256]; // [rsp+10h] [rbp-120h] BYREF
|
|
char path[256]; // [rsp+10h] [rbp-120h] BYREF
|
|
|
|
|
|
- v3 = this->m_block_count <= index;
|
|
|
|
- new_sizea = (size_t)new_size;
|
|
|
|
- if(v3)
|
|
|
|
- {
|
|
|
|
- return false;
|
|
|
|
- }
|
|
|
|
- else if(this->m_blocks[index].mmap_size < (unsigned __int64)new_size)
|
|
|
|
|
|
+ SHM_ASSERT_RETURN_FALSE(index < m_block_count);
|
|
|
|
+
|
|
|
|
+ if(m_blocks[index].mmap_size < new_size)
|
|
{
|
|
{
|
|
|
|
+ SHM_ERROR("new size(%#lx) too large, block(%lu), mmap size(%#lx).", new_size, index, m_blocks[index].mmap_size);
|
|
return false;
|
|
return false;
|
|
}
|
|
}
|
|
else
|
|
else
|
|
{
|
|
{
|
|
snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
|
|
snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
|
|
- v5 = bg::detail::ShmObjectResize(path, &new_sizea);
|
|
|
|
- if((char)v5)
|
|
|
|
|
|
+ if(bg::detail::ShmObjectResize(path, &new_size))
|
|
{
|
|
{
|
|
|
|
+ SHM_INFO("block(%lu) resized, old size(%#lx), new size(%#lx), used size(%#lx), mmap size(%#lx).",
|
|
|
|
+ index, m_blocks[index].addr, m_blocks[index].real_size, new_size, m_blocks[index].used_size, m_blocks[index].mmap_size);
|
|
this->m_blocks[index].real_size = new_sizea;
|
|
this->m_blocks[index].real_size = new_sizea;
|
|
|
|
+ return true;
|
|
}
|
|
}
|
|
- return v5;
|
|
|
|
|
|
+
|
|
}
|
|
}
|
|
|
|
+ return false;
|
|
}
|
|
}
|
|
|
|
|
|
}
|
|
}
|