shm_manager.cc 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385
  1. #include "shm_manager.h"
  2. #include "shm_object.h"
  3. #include "shm_config.h"
  4. extern bg::detail::ShmContext g_shm_ctx;
  5. namespace bg
  6. {
  7. namespace detail
  8. {
  9. ShmManager::ShmManager(const bg::ShmOptions& options) : m_options(options)
  10. {
  11. snprintf(this->m_magic, 0x40uLL, "ByteGame/ShmManager-%s", this->m_options.identifier);
  12. }
  13. ShmManager::~ShmManager()
  14. {
  15. if(m_singletons)
  16. {
  17. m_singletons->clear();
  18. bg::ShmFree(m_singletons);
  19. this->m_singletons = nullptr;
  20. }
  21. for(size_t i = 0LL; this->m_block_count > i; ++i)
  22. {
  23. DeleteBlock(i);
  24. }
  25. }
  26. void* ShmManager::AllocateRawMemory(size_t* bytes, size_t alignment)
  27. {
  28. void* result = nullptr;
  29. if((alignment & (alignment - 1)) != 0)
  30. {
  31. SHM_ERROR("invalid alignment(%#lx).", (const char*)alignment);
  32. return result;
  33. }
  34. *bytes = PALIGN_UP(*bytes, alignment);
  35. if(m_block_count)
  36. {
  37. result = AllocateInBlock(m_block_count - 1, *bytes, alignment);
  38. if(result)
  39. {
  40. return result;
  41. }
  42. }
  43. if(CreateBlock(m_block_count, *bytes))
  44. {
  45. size_t index = m_block_count > 1 ? m_block_count - 1 : 0;
  46. result = AllocateInBlock(index, *bytes, alignment);
  47. if(!result)
  48. {
  49. SHM_ERROR("newly created block(%lu) still cannot satisfy requirement, addr(%#lx), used size(%#lx), real size(%#lx), real bytes(%#lx), alignment(%#lx).",
  50. m_block_count, m_blocks[index].addr, m_blocks[index].used_size, m_blocks[index].real_size);
  51. return result;
  52. }
  53. }
  54. else
  55. {
  56. SHM_ERROR("failed to create block(%lu), real bytes(%#lx).", this->m_block_count, *bytes);
  57. }
  58. return result;
  59. }
  60. bool ShmManager::HasSingleton(const bg::detail::TypeName& type)
  61. {
  62. auto itor = m_singletons->find(type);
  63. if(itor == m_singletons->end())
  64. {
  65. return false;
  66. }
  67. return true;
  68. }
  69. void* ShmManager::GetSingleton(const bg::detail::TypeName& type, size_t bytes, bool* first_call)
  70. {
  71. if(!m_singletons)
  72. {
  73. m_singletons = (bg::detail::ShmManager::SingletonMap*)bg::ShmMalloc(sizeof(bg::detail::ShmManager::SingletonMap));
  74. if(!m_singletons)
  75. {
  76. m_singletons = nullptr;
  77. return nullptr;
  78. }
  79. new(m_singletons) SingletonMap;
  80. }
  81. auto itor = m_singletons->find(type);
  82. if(itor != m_singletons->end())
  83. {
  84. return itor->second;
  85. }
  86. uint8_t index{};
  87. if(bytes <= BYTES_COMP_VALUE)
  88. {
  89. index = m_size_map.m_index_to_class[SMALL_BYTES_TO_INDEX(bytes)];
  90. }
  91. else
  92. {
  93. if(bytes > CLASS_MAX_BYTES)
  94. {
  95. bg::detail::Span* span = m_page_heap.AllocateSpan(BYTES_TO_PAGES(bytes + PAGE_BYTES));
  96. if(span)
  97. {
  98. void* chunk_span = (void*)PAGES_TO_BYTES(span->m_start_page);
  99. if(chunk_span)
  100. {
  101. m_singletons->operator[](type) = chunk_span;
  102. if(first_call)
  103. {
  104. *first_call = 1;
  105. SHM_INFO("singleton(%s: %p) created.", type.c_str(), chunk_span);
  106. }
  107. return chunk_span;
  108. }
  109. }
  110. return nullptr;
  111. }
  112. index = m_size_map.m_index_to_class[BIG_BYTES_TO_INDEX(bytes)];
  113. }
  114. if(index > CLASS_MAX_COUNT)
  115. {
  116. return nullptr;
  117. }
  118. ChunkCache::ClassCache* cheche = &this->m_chunk_cache.m_caches[index];
  119. size_t size = this->m_size_map.ClassToSize(index);
  120. if(cheche == (bg::detail::ChunkCache::ClassCache*)cheche->free_list.m_next)
  121. {
  122. bg::detail::Span* span = m_page_heap.AllocateSpan(m_size_map.m_class_to_pages[index]);
  123. if(span)
  124. {
  125. m_page_heap.RegisterSpan(span);
  126. span->m_size_class = index;
  127. span->m_chunk_list = (void*)PAGES_TO_BYTES(span->m_start_page);
  128. uintptr_t end_memory = (uintptr_t)PAGES_TO_BYTES(span->m_start_page + span->m_page_count);
  129. uintptr_t end_ptr = (uintptr_t)span->m_chunk_list + size;
  130. uintptr_t ptr = (uintptr_t)span->m_chunk_list;
  131. for(; end_ptr < end_memory; )
  132. {
  133. *((uintptr_t*)ptr) = end_ptr;
  134. auto temp = end_ptr + size;
  135. if(temp > end_memory)
  136. {
  137. break;
  138. }
  139. ptr = end_ptr;
  140. end_ptr = temp;
  141. }
  142. *(uintptr_t*)ptr = 0ll;
  143. cheche->free_list.InstertNextSpan(span);
  144. ++cheche->span_count;
  145. }
  146. }
  147. if(cheche == (bg::detail::ChunkCache::ClassCache*)cheche->free_list.m_next)
  148. {
  149. return nullptr;
  150. }
  151. auto next_span = cheche->free_list.m_next;
  152. ++next_span->m_used_count;
  153. auto chunk_span = next_span->m_chunk_list;
  154. next_span->m_chunk_list = *(void**)chunk_span;
  155. if(*(void**)chunk_span == nullptr)
  156. {
  157. cheche->free_list.RemoveNextSpan();
  158. --this->m_chunk_cache.m_caches[index].span_count;
  159. }
  160. m_singletons->operator[](type) = chunk_span;
  161. if(first_call)
  162. {
  163. *first_call = 1;
  164. SHM_INFO("singleton(%s: %p) created.", type.c_str(), chunk_span);
  165. }
  166. return chunk_span;
  167. }
  168. void ShmManager::FreeSingleton(const bg::detail::TypeName& type)
  169. {
  170. if(m_singletons)
  171. {
  172. auto itor = m_singletons->find(type);
  173. if(itor == m_singletons->end())
  174. {
  175. return;
  176. }
  177. SHM_INFO("singleton(%s: %p) destroyed.", type.c_str());
  178. bg::detail::Span* span = m_page_heap.GetSpanMap(BYTES_TO_PAGES((uintptr_t)itor->second));
  179. if(span)
  180. {
  181. do
  182. {
  183. if(span->m_size_class == 0xff)
  184. {
  185. m_page_heap.DeallocateSpan(span);
  186. }
  187. else
  188. {
  189. if(span->m_used_count && span->m_size_class <= CLASS_MAX_COUNT)
  190. {
  191. span->m_used_count -= 1;
  192. auto chunk_list = span->m_chunk_list;
  193. *(uintptr_t*)itor->second = (uintptr_t)span->m_chunk_list;
  194. span->m_chunk_list = itor->second;
  195. if(chunk_list)
  196. {
  197. if(span->m_used_count == 0)
  198. {
  199. if(m_chunk_cache.m_caches[span->m_size_class].span_count > 1)
  200. {
  201. span->m_prev->RemoveNextSpan();
  202. span->m_in_use = false;
  203. span->m_used_count = 0LL;
  204. span->m_size_class = -1;
  205. span->m_chunk_list = 0LL;
  206. m_page_heap.DeallocateSpan(span);
  207. }
  208. }
  209. }
  210. else
  211. {
  212. if(span->m_used_count == 0)
  213. {
  214. if(m_chunk_cache.m_caches[span->m_size_class].span_count)
  215. {
  216. span->m_used_count = 0LL;
  217. span->m_size_class = -1;
  218. span->m_chunk_list = 0LL;
  219. m_page_heap.DeallocateSpan(span);
  220. break;
  221. }
  222. }
  223. m_chunk_cache.m_caches[span->m_size_class].free_list.m_next->InstertNextSpan(span);
  224. m_chunk_cache.m_caches[span->m_size_class].span_count++;
  225. }
  226. }
  227. }
  228. } while(false);
  229. }
  230. m_singletons->erase(itor);
  231. }
  232. return;
  233. }
  234. bool ShmManager::OnResume(const char* identifier)
  235. {
  236. char magic[64]; // [rsp+0h] [rbp-60h] BYREF
  237. int size = snprintf(magic, 0x40uLL, "ByteGame/ShmManager-%s", identifier);
  238. if(!strncmp(magic, this->m_magic, size))
  239. {
  240. size_t index = 0;
  241. for(; index < m_block_count; ++index)
  242. {
  243. if(!AttachBlock(index))
  244. {
  245. SHM_ERROR("failed to attach block(%lu), address(%#lx), used size(%#lx), real size(%#lx), mmap size(%#lx).", index, m_blocks[index].addr, m_blocks[index].used_size, m_blocks[index].real_size, m_blocks[index].mmap_size);
  246. return false;
  247. }
  248. }
  249. if(m_block_count <= index)
  250. {
  251. m_version++;
  252. return true;
  253. }
  254. }
  255. SHM_ERROR("magic mismatch, expected: %s, actual: %s.", magic, this->m_magic);
  256. return false;
  257. }
  258. bool ShmManager::OnCreate(void)
  259. {
  260. return m_size_map.Init();
  261. }
  262. bool ShmManager::DeleteBlock(size_t index)
  263. {
  264. SHM_ASSERT_RETURN_FALSE(index > m_block_count);
  265. char path[256];
  266. if(m_blocks[index].addr)
  267. {
  268. m_options.AddFixedAddress((uintptr_t)m_blocks[index].addr);
  269. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  270. bg::detail::ShmObjectDelete(m_blocks[index].addr, m_blocks[index].mmap_size, path);
  271. SHM_INFO("block(%lu) deleted, address(%#lx), used size(%#lx), real size(%#lx), mmap size(%#lx), path(%s).",
  272. index, m_blocks[index].addr, m_blocks[index].used_size, m_blocks[index].real_size, m_blocks[index].mmap_size, path);
  273. new (m_blocks[index].addr) ShmBlock();
  274. }
  275. return true;
  276. }
  277. void* ShmManager::AllocateInBlock(size_t index, size_t bytes, size_t alignment)
  278. {
  279. size_t v7; // r8
  280. char* v8; // r12
  281. __int64 v9; // rdx
  282. __int64 v10; // rax
  283. __int64 v11; // rbx
  284. size_t v12; // rcx
  285. size_t mmap_size; // rsi
  286. size_t real_size; // rdi
  287. size_t shm_block_grow_size; // r10
  288. const char* v16; // r15
  289. const char* v17; // rcx
  290. size_t v18; // rdx
  291. size_t v20; // [rsp+0h] [rbp-40h]
  292. char* v21; // [rsp+8h] [rbp-38h]
  293. if(this->m_block_count <= index)
  294. {
  295. return 0LL;
  296. }
  297. else
  298. {
  299. v7 = 0LL;
  300. v8 = (char*)this + 32 * index;
  301. v9 = *((uintptr_t*)v8 + 3113);
  302. v10 = *((uintptr_t*)v8 + 3114);
  303. v11 = v10 + v9;
  304. if(((v10 + v9) & (alignment - 1)) != 0)
  305. {
  306. v7 = alignment - ((v10 + v9) & (alignment - 1));
  307. v11 += v7;
  308. }
  309. v12 = v11 + bytes;
  310. mmap_size = this->m_blocks[index].mmap_size;
  311. if(v11 + bytes > mmap_size + v9)
  312. {
  313. return 0LL;
  314. }
  315. else
  316. {
  317. real_size = this->m_blocks[index].real_size;
  318. if(v12 <= real_size + v9)
  319. {
  320. LABEL_13:
  321. *((uintptr_t*)v8 + 3114) = v7 + bytes + v10;
  322. return (void*)v11;
  323. }
  324. shm_block_grow_size = this->m_options.shm_block_grow_size;
  325. v16 = (const char*)(shm_block_grow_size + real_size);
  326. if(v12 > shm_block_grow_size + real_size + v9)
  327. {
  328. v17 = (const char*)(v12 - v9);
  329. v16 = v17;
  330. //v18 = (uintptr_t)&v17[-real_size] % shm_block_grow_size;
  331. v18 = 0;
  332. if(v18)
  333. v16 = &v17[shm_block_grow_size - v18];
  334. }
  335. v21 = (char*)index;
  336. if((unsigned __int64)v16 > mmap_size)
  337. v16 = (const char*)mmap_size;
  338. v20 = v7;
  339. if(ResizeBlock(index, v16))
  340. {
  341. v10 = *((uintptr_t*)v8 + 3114);
  342. v7 = v20;
  343. goto LABEL_13;
  344. }
  345. return nullptr;
  346. }
  347. }
  348. }
  349. bool ShmManager::CreateBlock(size_t index, size_t min_size)
  350. {
  351. size_t shm_block_grow_size; // rcx
  352. size_t shm_block_mmap_size; // rax
  353. __int64 v8; // rax
  354. uintptr_t fixed_addr; // [rsp+8h] [rbp-138h] BYREF
  355. size_t real_size; // [rsp+10h] [rbp-130h] BYREF
  356. size_t mmap_size; // [rsp+18h] [rbp-128h] BYREF
  357. char path[256]; // [rsp+20h] [rbp-120h] BYREF
  358. if(index > 0xFF)
  359. {
  360. return 0LL;
  361. }
  362. else
  363. {
  364. shm_block_grow_size = this->m_options.shm_block_grow_size;
  365. shm_block_mmap_size = this->m_options.shm_block_mmap_size;
  366. fixed_addr = 0LL;
  367. real_size = shm_block_grow_size;
  368. mmap_size = shm_block_mmap_size;
  369. if(shm_block_grow_size < min_size)
  370. real_size = min_size;
  371. if(shm_block_mmap_size < min_size)
  372. mmap_size = min_size;
  373. m_options.PopFixedAddress(&fixed_addr);
  374. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  375. v8 = (uintptr_t)bg::detail::ShmObjectCreate(path, fixed_addr, &real_size, &mmap_size);
  376. if(!v8)
  377. {
  378. if(fixed_addr)
  379. m_options.AddFixedAddress(fixed_addr);
  380. return false;
  381. }
  382. m_blocks[index].addr = (void*)v8;
  383. m_blocks[index].real_size = real_size;
  384. m_blocks[index].mmap_size = mmap_size;
  385. m_blocks[index].used_size = 0;
  386. if(this->m_block_count != index)
  387. return true;
  388. this->m_block_count = index + 1;
  389. }
  390. return true;
  391. }
  392. bool ShmManager::AttachBlock(size_t index)
  393. {
  394. __int64 result; // rax
  395. char* v3; // rbx
  396. size_t v4; // rax
  397. __int64 v6; // rdx
  398. size_t real_size; // [rsp+0h] [rbp-130h] BYREF
  399. size_t mmap_size; // [rsp+8h] [rbp-128h] BYREF
  400. char path[256]; // [rsp+10h] [rbp-120h] BYREF
  401. if(this->m_block_count <= index)
  402. {
  403. return 0LL;
  404. }
  405. else
  406. {
  407. result = 1LL;
  408. v3 = (char*)this + 32 * index;
  409. if(*((uintptr_t*)v3 + 3113))
  410. {
  411. v4 = *((uintptr_t*)v3 + 3116);
  412. real_size = 0LL;
  413. mmap_size = v4;
  414. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  415. v6 = (uintptr_t)bg::detail::ShmObjectAttach(path, (uintptr_t) * ((const char**)v3 + 3113), &real_size, &mmap_size);
  416. result = 0LL;
  417. if(v6)
  418. {
  419. return 1LL;
  420. }
  421. }
  422. }
  423. return result;
  424. }
  425. bool ShmManager::ResizeBlock(size_t index, size_t new_size)
  426. {
  427. bool v3; // cc
  428. unsigned int v5; // r13d
  429. size_t new_sizea; // [rsp+8h] [rbp-128h] BYREF
  430. char path[256]; // [rsp+10h] [rbp-120h] BYREF
  431. SHM_ASSERT_RETURN_FALSE(index < m_block_count);
  432. if(m_blocks[index].mmap_size < new_size)
  433. {
  434. SHM_ERROR("new size(%#lx) too large, block(%lu), mmap size(%#lx).", new_size, index, m_blocks[index].mmap_size);
  435. return false;
  436. }
  437. else
  438. {
  439. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  440. if(bg::detail::ShmObjectResize(path, &new_size))
  441. {
  442. SHM_INFO("block(%lu) resized, old size(%#lx), new size(%#lx), used size(%#lx), mmap size(%#lx).",
  443. index, m_blocks[index].addr, m_blocks[index].real_size, new_size, m_blocks[index].used_size, m_blocks[index].mmap_size);
  444. this->m_blocks[index].real_size = new_sizea;
  445. return true;
  446. }
  447. }
  448. return false;
  449. }
  450. }
  451. void* ShmMalloc(size_t bytes)
  452. {
  453. int64_t v6; // rbx
  454. uint8_t index; // r15
  455. __int64 v9; // r14
  456. __int64 v10; // r13
  457. bg::detail::Span* v11; // r12
  458. bg::detail::Span* m_next; // r8
  459. void** result; // rax
  460. bool v14; // zf
  461. bg::detail::Span* Span; // rax
  462. bg::detail::PageHeap* v16; // rdi
  463. bg::detail::Span* v17; // rax
  464. size_t m_start_page; // rcx
  465. size_t m_page_count; // rdi
  466. void** p_m_chunk_list; // rax
  467. void** v21; // rcx
  468. void** v22; // rdx
  469. void** v23; // rdi
  470. void** v24; // rsi
  471. __int64 v25; // rax
  472. bg::detail::Span* v26; // rcx
  473. bg::detail::Span* m_prev; // rdx
  474. bg::detail::Span* v28; // [rsp+0h] [rbp-40h]
  475. bg::detail::PageHeap* v29; // [rsp+8h] [rbp-38h]
  476. v6 = (int64_t)g_shm_ctx.mgr;
  477. if(!v6)
  478. {
  479. return nullptr;
  480. }
  481. if(bytes <= BYTES_COMP_VALUE)
  482. {
  483. index = g_shm_ctx.mgr->m_size_map.m_index_to_class[SMALL_BYTES_TO_INDEX(bytes)];
  484. }
  485. else
  486. {
  487. if(bytes > CLASS_MAX_BYTES)
  488. {
  489. Span = g_shm_ctx.mgr->m_page_heap.AllocateSpan(BYTES_TO_PAGES(bytes + 0x1FFF));
  490. if(Span)
  491. {
  492. return (void**)(*(uintptr_t*)(Span + 16) << 13);
  493. }
  494. return nullptr;
  495. }
  496. auto temp1 = BIG_BYTES_TO_INDEX(bytes);
  497. index = g_shm_ctx.mgr->m_size_map.m_index_to_class[BIG_BYTES_TO_INDEX(bytes)];
  498. }
  499. if(index > CLASS_MAX_COUNT)
  500. {
  501. v9 = g_shm_ctx.mgr->m_size_map.m_class_to_size[index];
  502. v10 = index;
  503. v11 = &g_shm_ctx.mgr->m_chunk_cache.m_caches[index].free_list;
  504. if(v11 != v11->m_next)
  505. goto LABEL_7;
  506. v29 = &g_shm_ctx.mgr->m_page_heap;
  507. v16 = &g_shm_ctx.mgr->m_page_heap;
  508. goto LABEL_18;
  509. }
  510. v9 = g_shm_ctx.mgr->m_size_map.m_class_to_size[index];
  511. v10 = index;
  512. v11 = &g_shm_ctx.mgr->m_chunk_cache.m_caches[index].free_list;
  513. if(v11 == v11->m_next)
  514. {
  515. v29 = &g_shm_ctx.mgr->m_page_heap;
  516. v16 = &g_shm_ctx.mgr->m_page_heap;
  517. LABEL_18:
  518. v17 = v16->AllocateSpan(g_shm_ctx.mgr->m_size_map.m_class_to_pages[index]);
  519. if(v17)
  520. {
  521. v28 = v17;
  522. v29->RegisterSpan(v17);
  523. m_next = v28;
  524. m_start_page = v28->m_start_page;
  525. m_page_count = v28->m_page_count;
  526. p_m_chunk_list = &v28->m_chunk_list;
  527. v28->m_size_class = index;
  528. v21 = (void**)(m_start_page << 13);
  529. v22 = (void**)((char*)v21 + v9);
  530. v23 = &v21[1024 * m_page_count];
  531. if((void**)((char*)v21 + v9) <= v23)
  532. {
  533. while(1)
  534. {
  535. v24 = v22;
  536. v22 = (void**)((char*)v22 + v9);
  537. *p_m_chunk_list = v21;
  538. p_m_chunk_list = v21;
  539. if(v23 < v22)
  540. break;
  541. v21 = v24;
  542. }
  543. }
  544. else
  545. {
  546. v21 = &v28->m_chunk_list;
  547. }
  548. *v21 = 0LL;
  549. v25 = v6 + (v10 << 6);
  550. v26 = *(bg::detail::Span**)(v25 + 40);
  551. v28->m_prev = v11;
  552. v28->m_next = v26;
  553. *(uintptr_t*)(*(uintptr_t*)(v25 + 40) + 32LL) = (uintptr_t)v28;
  554. *(uintptr_t*)(v25 + 40) = (uintptr_t)v28;
  555. ++* (uintptr_t*)(v25 + 56);
  556. goto LABEL_8;
  557. }
  558. return 0LL;
  559. }
  560. LABEL_7:
  561. m_next = v11->m_next;
  562. LABEL_8:
  563. result = (void**)m_next->m_chunk_list;
  564. ++m_next->m_used_count;
  565. v14 = *result == 0LL;
  566. m_next->m_chunk_list = *result;
  567. if(v14)
  568. {
  569. m_prev = m_next->m_prev;
  570. m_prev->m_next = m_next->m_next;
  571. m_next->m_next->m_prev = m_prev;
  572. m_next->m_prev = 0LL;
  573. m_next->m_next = 0LL;
  574. --* (uintptr_t*)(v6 + (v10 << 6) + 56);
  575. }
  576. return result;
  577. }
  578. void ShmFree(void* ptr)
  579. {
  580. __int64 v6; // rcx
  581. __int64 v7; // rsi
  582. __int64 v8; // rax
  583. bg::detail::Span* v9; // rsi
  584. __int64 m_size_class; // rax
  585. size_t m_used_count; // rdx
  586. void* m_chunk_list; // r9
  587. size_t v13; // rdx
  588. bool v14; // dl
  589. __int64 v15; // rax
  590. bg::detail::Span* m_prev; // rdx
  591. __int64 v17; // rax
  592. bg::detail::Span* v18; // rdi
  593. bg::detail::Span* v19; // rcx
  594. v6 = (int64_t)g_shm_ctx.mgr;
  595. if(!v6)
  596. {
  597. return;
  598. }
  599. if(ptr)
  600. {
  601. if((uintptr_t)ptr >> 13 > 0x7FFFFFFFFLL)
  602. {
  603. }
  604. else
  605. {
  606. v7 = *(uintptr_t*)(v6 + 8 * ((uintptr_t)ptr >> 38) + 9136);
  607. if(v7)
  608. {
  609. v8 = *(uintptr_t*)(v7 + 8 * (((uintptr_t)ptr >> 28) & 0x3FF));
  610. if(v8)
  611. {
  612. v9 = *(bg::detail::Span**)(v8 + 8 * (((uintptr_t)ptr >> 13) & 0x7FFF));
  613. if(v9)
  614. {
  615. m_size_class = v9->m_size_class;
  616. if((char)m_size_class == 0xFF)
  617. {
  618. ((bg::detail::PageHeap*)(v6 + 9136))->DeallocateSpan(v9);
  619. }
  620. else
  621. {
  622. m_used_count = v9->m_used_count;
  623. if(m_used_count && (unsigned __int8)m_size_class <= 0x56u)
  624. {
  625. m_chunk_list = v9->m_chunk_list;
  626. v13 = m_used_count - 1;
  627. *(uintptr_t*)ptr = (uintptr_t)m_chunk_list;
  628. v9->m_used_count = v13;
  629. v14 = v13 == 0;
  630. v9->m_chunk_list = (void*)ptr;
  631. if(m_chunk_list)
  632. {
  633. if(v14)
  634. {
  635. v15 = v6 + (m_size_class << 6) + 48;
  636. if(*(uintptr_t*)(v15 + 8) > 1uLL)
  637. {
  638. m_prev = v9->m_prev;
  639. m_prev->m_next = v9->m_next;
  640. v9->m_next->m_prev = m_prev;
  641. v9->m_prev = 0LL;
  642. v9->m_next = 0LL;
  643. --* (uintptr_t*)(v15 + 8);
  644. LABEL_14:
  645. v9->m_used_count = 0LL;
  646. v9->m_size_class = -1;
  647. v9->m_chunk_list = 0LL;
  648. ((bg::detail::PageHeap*)(v6 + 9136))->DeallocateSpan(v9);
  649. return;
  650. }
  651. }
  652. }
  653. else
  654. {
  655. v17 = v6 + (m_size_class << 6);
  656. if(v14)
  657. {
  658. if(*(uintptr_t*)(v17 + 56))
  659. goto LABEL_14;
  660. v18 = *(bg::detail::Span**)(v17 + 40);
  661. v9->m_prev = (bg::detail::Span*)v17;
  662. v9->m_next = v18;
  663. *(uintptr_t*)(*(uintptr_t*)(v17 + 40) + 32LL) = (uintptr_t)v9;
  664. *(uintptr_t*)(v17 + 40) = (uintptr_t)v9;
  665. ++* (uintptr_t*)(v17 + 56);
  666. }
  667. else
  668. {
  669. v19 = *(bg::detail::Span**)(v17 + 40);
  670. v9->m_prev = (bg::detail::Span*)v17;
  671. v9->m_next = v19;
  672. *(uintptr_t*)(*(uintptr_t*)(v17 + 40) + 32LL) = (uintptr_t)v9;
  673. ++* (uintptr_t*)(v17 + 56);
  674. *(uintptr_t*)(v17 + 40) = (uintptr_t)v9;
  675. }
  676. }
  677. }
  678. else
  679. {
  680. }
  681. }
  682. return;
  683. }
  684. }
  685. }
  686. }
  687. }
  688. }
  689. void* ShmRealloc(void* old_ptr, size_t new_bytes)
  690. {
  691. __int64 v6; // r14
  692. unsigned __int64 v9; // r15
  693. unsigned __int64 v10; // r10
  694. __int64 v11; // r11
  695. __int64 v12; // rax
  696. __int64 v13; // rax
  697. __int64 v14; // rax
  698. __int64 v15; // rbx
  699. size_t v16; // rbx
  700. size_t v17; // rdx
  701. void** result; // rax
  702. bg::detail::Span* v19; // rax
  703. bg::detail::Span* v20; // rax
  704. void** m_chunk_list; // rdi
  705. void* v22; // rdx
  706. size_t v23; // rdx
  707. __int64 v24; // rax
  708. __int64 v25; // rax
  709. bg::detail::Span* v26; // rsi
  710. uint8_t m_size_class; // al
  711. size_t m_used_count; // rcx
  712. __int64 v29; // rdx
  713. void* v30; // rax
  714. size_t v31; // rcx
  715. bg::detail::PageHeap* v32; // rdi
  716. bool v33; // cl
  717. __int64 v34; // rdx
  718. bg::detail::Span* v35; // rcx
  719. bg::detail::Span* v36; // rax
  720. bg::detail::Span* m_next; // rax
  721. void** v38; // rdi
  722. void* v39; // rdx
  723. bg::detail::Span* v40; // rdx
  724. __int64 Span; // rax
  725. __int64 v42; // r14
  726. bg::detail::Span* v43; // rcx
  727. bg::detail::Span* v44; // rdx
  728. __int64 v45; // rax
  729. bg::detail::PageHeap* v46; // rdi
  730. bg::detail::Span* v47; // rax
  731. size_t v48; // r8
  732. void** v49; // rsi
  733. size_t v50; // rdi
  734. unsigned __int64 v51; // rdx
  735. size_t v52; // r8
  736. void** v53; // rcx
  737. __int64 v54; // rdx
  738. bg::detail::Span* v55; // rsi
  739. bg::detail::PageHeap* v56; // rdi
  740. bg::detail::Span* v57; // rax
  741. size_t m_start_page; // rcx
  742. size_t m_page_count; // r8
  743. void** p_m_chunk_list; // rsi
  744. void** v61; // rcx
  745. void** v62; // rdx
  746. void** v63; // r8
  747. void** v64; // rdi
  748. __int64 v65; // rdx
  749. bg::detail::Span* v66; // rsi
  750. bg::detail::Span* m_prev; // rdx
  751. __int64 v68; // [rsp+0h] [rbp-70h]
  752. __int64 v69; // [rsp+0h] [rbp-70h]
  753. __int64 v70; // [rsp+0h] [rbp-70h]
  754. __int64 v71; // [rsp+8h] [rbp-68h]
  755. unsigned __int64 v72; // [rsp+8h] [rbp-68h]
  756. unsigned __int64 v73; // [rsp+8h] [rbp-68h]
  757. __int64 v74; // [rsp+8h] [rbp-68h]
  758. unsigned __int64 v75; // [rsp+8h] [rbp-68h]
  759. unsigned __int64 v76; // [rsp+8h] [rbp-68h]
  760. bg::detail::PageHeap* v77; // [rsp+10h] [rbp-60h]
  761. bg::detail::PageHeap* v78; // [rsp+10h] [rbp-60h]
  762. bg::detail::PageHeap* v79; // [rsp+10h] [rbp-60h]
  763. bg::detail::PageHeap* v80; // [rsp+10h] [rbp-60h]
  764. __int64 v81; // [rsp+18h] [rbp-58h]
  765. __int64 v82; // [rsp+18h] [rbp-58h]
  766. bg::detail::Span* v83; // [rsp+18h] [rbp-58h]
  767. __int64 v84; // [rsp+20h] [rbp-50h]
  768. __int64 v85; // [rsp+20h] [rbp-50h]
  769. bg::detail::Span* v86; // [rsp+20h] [rbp-50h]
  770. __int64 v87; // [rsp+28h] [rbp-48h]
  771. __int64 v88; // [rsp+28h] [rbp-48h]
  772. __int64 v89; // [rsp+28h] [rbp-48h]
  773. bg::detail::Span* v90; // [rsp+30h] [rbp-40h]
  774. unsigned __int64 v91; // [rsp+30h] [rbp-40h]
  775. bg::detail::Span* v92; // [rsp+30h] [rbp-40h]
  776. __int64 v93; // [rsp+30h] [rbp-40h]
  777. __int64 v94; // [rsp+30h] [rbp-40h]
  778. uint8_t dest; // [rsp+38h] [rbp-38h]
  779. void** desta; // [rsp+38h] [rbp-38h]
  780. uint8_t destb; // [rsp+38h] [rbp-38h]
  781. void* destc; // [rsp+38h] [rbp-38h]
  782. void* destd; // [rsp+38h] [rbp-38h]
  783. v6 = (int64_t)g_shm_ctx.mgr;
  784. if(!v6)
  785. {
  786. return 0LL;
  787. }
  788. if(!old_ptr)
  789. return bg::ShmMalloc(new_bytes);
  790. if(!new_bytes)
  791. {
  792. bg::ShmFree((void*)old_ptr);
  793. return 0LL;
  794. }
  795. if((uintptr_t)old_ptr >> 13 > 0x7FFFFFFFFLL)
  796. {
  797. LABEL_51:
  798. return 0LL;
  799. }
  800. v9 = ((uintptr_t)old_ptr >> 13) & 0x7FFF;
  801. v10 = ((uintptr_t)old_ptr >> 28) & 0x3FF;
  802. v11 = v6 + 8 * ((uintptr_t)old_ptr >> 38);
  803. v12 = *(uintptr_t*)(v11 + 9136);
  804. if(!v12)
  805. goto LABEL_51;
  806. v13 = *(uintptr_t*)(v12 + 8 * v10);
  807. if(!v13)
  808. goto LABEL_51;
  809. v14 = *(uintptr_t*)(v13 + 8 * v9);
  810. if(!v14)
  811. goto LABEL_51;
  812. v15 = *(unsigned __int8*)(v14 + 1);
  813. if((char)v15 == 0xFF)
  814. {
  815. v16 = *(uintptr_t*)(v14 + 24) << 13;
  816. }
  817. else
  818. {
  819. if((unsigned __int8)v15 > 0x56u)
  820. {
  821. v11 = v6 + 8 * ((uintptr_t)old_ptr >> 38);
  822. v10 = ((uintptr_t)old_ptr >> 28) & 0x3FF;
  823. }
  824. v16 = *(uintptr_t*)(v6 + 8 * v15 + 7744);
  825. }
  826. v17 = v16 + (v16 >> 2);
  827. if(v16 > v16 >> 2)
  828. v17 = -1LL;
  829. if(new_bytes < v16 >> 1 || (result = (void**)old_ptr, new_bytes > v16))
  830. {
  831. if(new_bytes >= v17 || new_bytes <= v16)
  832. goto LABEL_19;
  833. if(v17 <= 0x400)
  834. {
  835. destb = *(char*)(v6 + ((v17 + 7) >> 3) + 5568);
  836. }
  837. else
  838. {
  839. if(v17 > 0x40000)
  840. {
  841. v93 = v11;
  842. destc = (void*)v10;
  843. Span = (uintptr_t)((bg::detail::PageHeap*)(v6 + 9136))->AllocateSpan((v17 + 0x1FFF) >> 13);
  844. v10 = (unsigned __int64)destc;
  845. v11 = v93;
  846. if(Span)
  847. {
  848. desta = (void**)(*(uintptr_t*)(Span + 16) << 13);
  849. if(desta)
  850. goto LABEL_27;
  851. }
  852. goto LABEL_19;
  853. }
  854. destb = *(char*)(v6 + ((v17 + 15487) >> 7) + 5568);
  855. }
  856. if(destb > 0x56u)
  857. {
  858. v71 = v11;
  859. v77 = (bg::detail::PageHeap*)v10;
  860. v10 = (unsigned __int64)v77;
  861. v11 = v71;
  862. v85 = destb;
  863. v82 = *(uintptr_t*)(v6 + 8LL * destb + 7744);
  864. v89 = destb;
  865. v36 = (bg::detail::Span*)(v6 + ((unsigned __int64)destb << 6));
  866. v92 = v36;
  867. if(v36 != v36->m_next)
  868. goto LABEL_47;
  869. v78 = (bg::detail::PageHeap*)(v6 + 9136);
  870. v69 = v71;
  871. v72 = v10;
  872. v10 = v72;
  873. v11 = v69;
  874. v46 = (bg::detail::PageHeap*)(v6 + 9136);
  875. }
  876. else
  877. {
  878. v85 = destb;
  879. v82 = *(uintptr_t*)(v6 + 8LL * destb + 7744);
  880. v89 = destb;
  881. v36 = (bg::detail::Span*)(v6 + ((unsigned __int64)destb << 6));
  882. v92 = v36;
  883. if(v36 != v36->m_next)
  884. {
  885. LABEL_47:
  886. m_next = v36->m_next;
  887. goto LABEL_48;
  888. }
  889. v78 = (bg::detail::PageHeap*)(v6 + 9136);
  890. v46 = (bg::detail::PageHeap*)(v6 + 9136);
  891. }
  892. v68 = v11;
  893. v73 = v10;
  894. v47 = (bg::detail::Span*)v46->bg::detail::PageHeap::AllocateSpan(*(uintptr_t*)(v6 + 8 * v85 + 8440));
  895. v10 = v73;
  896. v11 = v68;
  897. if(!v47)
  898. {
  899. LABEL_19:
  900. if(new_bytes <= 0x400)
  901. {
  902. dest = *(char*)(v6 + ((new_bytes + 7) >> 3) + 5568);
  903. goto LABEL_22;
  904. }
  905. if(new_bytes <= 0x40000)
  906. {
  907. dest = *(char*)(v6 + ((new_bytes + 15487) >> 7) + 5568);
  908. LABEL_22:
  909. if(dest > 0x56u)
  910. {
  911. v74 = v11;
  912. v79 = (bg::detail::PageHeap*)v10;
  913. v10 = (unsigned __int64)v79;
  914. v11 = v74;
  915. v81 = dest;
  916. v87 = *(char*)(v6 + 8LL * dest + 7744);
  917. v84 = dest;
  918. v19 = (bg::detail::Span*)(v6 + ((unsigned __int64)dest << 6));
  919. v90 = v19;
  920. if(v19 != v19->m_next)
  921. goto LABEL_24;
  922. v80 = (bg::detail::PageHeap*)(v6 + 9136);
  923. v70 = v74;
  924. v75 = v10;
  925. v10 = v75;
  926. v11 = v70;
  927. v56 = (bg::detail::PageHeap*)(v6 + 9136);
  928. }
  929. else
  930. {
  931. v81 = dest;
  932. v87 = *(char*)(v6 + 8LL * dest + 7744);
  933. v84 = dest;
  934. v19 = (bg::detail::Span*)(v6 + ((unsigned __int64)dest << 6));
  935. v90 = v19;
  936. if(v19 != v19->m_next)
  937. {
  938. LABEL_24:
  939. v20 = v19->m_next;
  940. goto LABEL_25;
  941. }
  942. v80 = (bg::detail::PageHeap*)(v6 + 9136);
  943. v56 = (bg::detail::PageHeap*)(v6 + 9136);
  944. }
  945. v68 = v11;
  946. v76 = v10;
  947. v57 = v56->AllocateSpan(*(uintptr_t*)(v6 + 8 * v81 + 8440));
  948. if(v57)
  949. {
  950. v83 = v57;
  951. v80->RegisterSpan(v57);
  952. v20 = v83;
  953. v10 = v76;
  954. v11 = v68;
  955. m_start_page = v83->m_start_page;
  956. m_page_count = v83->m_page_count;
  957. p_m_chunk_list = &v83->m_chunk_list;
  958. v83->m_size_class = dest;
  959. v61 = (void**)(m_start_page << 13);
  960. v62 = (void**)((char*)v61 + v87);
  961. v63 = &v61[1024 * m_page_count];
  962. if((void**)((char*)v61 + v87) <= v63)
  963. {
  964. while(1)
  965. {
  966. v64 = v62;
  967. v62 = (void**)((char*)v62 + v87);
  968. *p_m_chunk_list = v61;
  969. p_m_chunk_list = v61;
  970. if(v63 < v62)
  971. break;
  972. v61 = v64;
  973. }
  974. }
  975. else
  976. {
  977. v61 = &v83->m_chunk_list;
  978. }
  979. *v61 = 0LL;
  980. v65 = v6 + (v84 << 6);
  981. v66 = *(bg::detail::Span**)(v65 + 40);
  982. v83->m_prev = v90;
  983. v83->m_next = v66;
  984. *(uintptr_t*)(*(uintptr_t*)(v65 + 40) + 32LL) = (uintptr_t)v83;
  985. *(uintptr_t*)(v65 + 40) = (uintptr_t)v83;
  986. ++* (uintptr_t*)(v65 + 56);
  987. LABEL_25:
  988. m_chunk_list = (void**)v20->m_chunk_list;
  989. ++v20->m_used_count;
  990. v22 = *m_chunk_list;
  991. desta = m_chunk_list;
  992. v20->m_chunk_list = *m_chunk_list;
  993. if(!v22)
  994. {
  995. m_prev = v20->m_prev;
  996. m_prev->m_next = v20->m_next;
  997. v20->m_next->m_prev = m_prev;
  998. v20->m_prev = 0LL;
  999. v20->m_next = 0LL;
  1000. --* (uintptr_t*)(v6 + (v84 << 6) + 56);
  1001. }
  1002. goto LABEL_27;
  1003. }
  1004. return 0LL;
  1005. }
  1006. v94 = v11;
  1007. destd = (void*)v10;
  1008. v45 = (uintptr_t)((bg::detail::PageHeap*)(v6 + 9136))->AllocateSpan((new_bytes + 0x1FFF) >> 13);
  1009. v10 = (unsigned __int64)destd;
  1010. v11 = v94;
  1011. if(!v45)
  1012. return 0LL;
  1013. result = (void**)(*(uintptr_t*)(v45 + 16) << 13);
  1014. desta = result;
  1015. if(!result)
  1016. return result;
  1017. LABEL_27:
  1018. v23 = new_bytes;
  1019. if(v16 <= new_bytes)
  1020. v23 = v16;
  1021. v88 = v11;
  1022. v91 = v10;
  1023. memcpy(desta, (const void*)old_ptr, v23);
  1024. v24 = *(uintptr_t*)(v88 + 9136);
  1025. if(v24 && (v25 = *(uintptr_t*)(v24 + 8 * v91)) != 0 && (v26 = *(bg::detail::Span**)(v25 + 8 * v9)) != 0LL)
  1026. {
  1027. m_size_class = v26->m_size_class;
  1028. if(m_size_class == 0xFF)
  1029. {
  1030. ((bg::detail::PageHeap*)(v6 + 9136))->DeallocateSpan(v26);
  1031. return desta;
  1032. }
  1033. m_used_count = v26->m_used_count;
  1034. if(m_used_count && m_size_class <= 0x56u)
  1035. {
  1036. v29 = m_size_class;
  1037. v30 = v26->m_chunk_list;
  1038. v31 = m_used_count - 1;
  1039. v32 = (bg::detail::PageHeap*)(v6 + 9136);
  1040. *(uintptr_t*)old_ptr = (uintptr_t)v30;
  1041. v26->m_used_count = v31;
  1042. v33 = v31 == 0;
  1043. v26->m_chunk_list = (void*)old_ptr;
  1044. if(v30)
  1045. {
  1046. result = desta;
  1047. if(!v33)
  1048. return result;
  1049. v34 = v6 + (v29 << 6) + 48;
  1050. if(*(uintptr_t*)(v34 + 8) <= 1uLL)
  1051. return result;
  1052. v35 = v26->m_prev;
  1053. v35->m_next = v26->m_next;
  1054. v26->m_next->m_prev = v35;
  1055. v26->m_prev = 0LL;
  1056. v26->m_next = 0LL;
  1057. --* (uintptr_t*)(v34 + 8);
  1058. }
  1059. else
  1060. {
  1061. v42 = (v29 << 6) + v6;
  1062. if(!v33)
  1063. {
  1064. v44 = *(bg::detail::Span**)(v42 + 40);
  1065. result = desta;
  1066. v26->m_prev = (bg::detail::Span*)v42;
  1067. v26->m_next = v44;
  1068. *(uintptr_t*)(*(uintptr_t*)(v42 + 40) + 32LL) = (uintptr_t)v26;
  1069. *(uintptr_t*)(v42 + 40) = (uintptr_t)v26;
  1070. ++* (uintptr_t*)(v42 + 56);
  1071. return result;
  1072. }
  1073. if(!*(uintptr_t*)(v42 + 56))
  1074. {
  1075. v43 = *(bg::detail::Span**)(v42 + 40);
  1076. v26->m_prev = (bg::detail::Span*)v42;
  1077. v26->m_next = v43;
  1078. *(uintptr_t*)(*(uintptr_t*)(v42 + 40) + 32LL) = (uintptr_t)v26;
  1079. *(uintptr_t*)(v42 + 40) = (uintptr_t)v26;
  1080. ++* (uintptr_t*)(v42 + 56);
  1081. return desta;
  1082. }
  1083. }
  1084. v26->m_used_count = 0LL;
  1085. v26->m_size_class = -1;
  1086. v26->m_chunk_list = 0LL;
  1087. v32->DeallocateSpan(v26);
  1088. return desta;
  1089. }
  1090. }
  1091. else
  1092. {
  1093. }
  1094. return desta;
  1095. }
  1096. v86 = v47;
  1097. v78->RegisterSpan(v47);
  1098. m_next = v86;
  1099. v10 = v73;
  1100. v11 = v68;
  1101. v48 = v86->m_page_count;
  1102. v49 = &v86->m_chunk_list;
  1103. v86->m_size_class = destb;
  1104. v50 = v86->m_start_page << 13;
  1105. v51 = v50 + v82;
  1106. v52 = v50 + (v48 << 13);
  1107. if(v50 + v82 > v52)
  1108. {
  1109. v53 = &v86->m_chunk_list;
  1110. }
  1111. else
  1112. {
  1113. do
  1114. {
  1115. v53 = (void**)(v51 - v82);
  1116. v51 += v82;
  1117. *v49 = v53;
  1118. v49 = v53;
  1119. } while(v52 >= v51);
  1120. }
  1121. *v53 = 0LL;
  1122. v54 = v6 + (v89 << 6);
  1123. v55 = *(bg::detail::Span**)(v54 + 40);
  1124. v86->m_prev = v92;
  1125. v86->m_next = v55;
  1126. *(uintptr_t*)(*(uintptr_t*)(v54 + 40) + 32LL) = (uintptr_t)v86;
  1127. *(uintptr_t*)(v54 + 40) = (uintptr_t)v86;
  1128. ++* (uintptr_t*)(v54 + 56);
  1129. LABEL_48:
  1130. v38 = (void**)m_next->m_chunk_list;
  1131. ++m_next->m_used_count;
  1132. v39 = *v38;
  1133. desta = v38;
  1134. m_next->m_chunk_list = *v38;
  1135. if(!v39)
  1136. {
  1137. v40 = m_next->m_prev;
  1138. v40->m_next = m_next->m_next;
  1139. m_next->m_next->m_prev = v40;
  1140. m_next->m_prev = 0LL;
  1141. m_next->m_next = 0LL;
  1142. --* (uintptr_t*)(v6 + (v89 << 6) + 56);
  1143. }
  1144. goto LABEL_27;
  1145. }
  1146. return result;
  1147. }
  1148. void* ShmCalloc(size_t n, size_t bytes)
  1149. {
  1150. __int64 v6; // r12
  1151. size_t v7; // rbx
  1152. void** v8; // r12
  1153. uint8_t v10; // r14
  1154. __int64 v11; // r11
  1155. __int64 v12; // r15
  1156. __int64 v13; // r10
  1157. bg::detail::Span* v14; // r13
  1158. bg::detail::Span* m_next; // rax
  1159. void** m_chunk_list; // rdx
  1160. bool v17; // zf
  1161. __int64 Span; // rax
  1162. bg::detail::PageHeap* v19; // rdi
  1163. bg::detail::Span* v20; // rax
  1164. size_t m_start_page; // rcx
  1165. size_t m_page_count; // r8
  1166. void** p_m_chunk_list; // rsi
  1167. void** v24; // rcx
  1168. void** v25; // rdx
  1169. void** v26; // r8
  1170. void** v27; // rdi
  1171. __int64 v28; // rdx
  1172. bg::detail::Span* v29; // rsi
  1173. bg::detail::Span* m_prev; // rcx
  1174. bg::detail::Span* v31; // [rsp+8h] [rbp-48h]
  1175. __int64 v32; // [rsp+10h] [rbp-40h]
  1176. bg::detail::PageHeap* v33; // [rsp+18h] [rbp-38h]
  1177. v6 = (int64_t)g_shm_ctx.mgr;
  1178. if(!v6)
  1179. {
  1180. return nullptr;
  1181. }
  1182. v7 = bytes * n;
  1183. if(bytes && n != v7 / bytes)
  1184. return 0LL;
  1185. if(v7 <= 0x400)
  1186. {
  1187. v10 = *(char*)(v6 + ((v7 + 7) >> 3) + 5568);
  1188. }
  1189. else
  1190. {
  1191. if(v7 > 0x40000)
  1192. {
  1193. Span = (uintptr_t)((bg::detail::PageHeap*)(v6 + 9136))->AllocateSpan((v7 + 0x1FFF) >> 13);
  1194. if(Span)
  1195. {
  1196. v8 = (void**)(*(uintptr_t*)(Span + 16) << 13);
  1197. if(v8)
  1198. {
  1199. LABEL_16:
  1200. memset(v8, 0, v7);
  1201. return v8;
  1202. }
  1203. }
  1204. return 0LL;
  1205. }
  1206. v10 = *(char*)(v6 + ((v7 + 15487) >> 7) + 5568);
  1207. }
  1208. if(v10 > 0x56u)
  1209. {
  1210. v12 = *(uintptr_t*)(v6 + 8LL * v10 + 7744);
  1211. v13 = v10;
  1212. v14 = (bg::detail::Span*)(v6 + ((unsigned __int64)v10 << 6));
  1213. if(v14 != v14->m_next)
  1214. goto LABEL_12;
  1215. v33 = (bg::detail::PageHeap*)(v6 + 9136);
  1216. v13 = v10;
  1217. v11 = v10;
  1218. v19 = (bg::detail::PageHeap*)(v6 + 9136);
  1219. }
  1220. else
  1221. {
  1222. v11 = v10;
  1223. v12 = *(uintptr_t*)(v6 + 8LL * v10 + 7744);
  1224. v13 = v10;
  1225. v14 = (bg::detail::Span*)(v6 + ((unsigned __int64)v10 << 6));
  1226. if(v14 != v14->m_next)
  1227. {
  1228. LABEL_12:
  1229. m_next = v14->m_next;
  1230. goto LABEL_13;
  1231. }
  1232. v33 = (bg::detail::PageHeap*)(v6 + 9136);
  1233. v19 = (bg::detail::PageHeap*)(v6 + 9136);
  1234. }
  1235. v32 = v13;
  1236. v20 = (bg::detail::Span*)v19->AllocateSpan(*(uintptr_t*)(v6 + 8 * v11 + 8440));
  1237. if(v20)
  1238. {
  1239. v31 = v20;
  1240. v33->RegisterSpan(v20);
  1241. m_next = v31;
  1242. v13 = v32;
  1243. m_start_page = v31->m_start_page;
  1244. m_page_count = v31->m_page_count;
  1245. p_m_chunk_list = &v31->m_chunk_list;
  1246. v31->m_size_class = v10;
  1247. v24 = (void**)(m_start_page << 13);
  1248. v25 = (void**)((char*)v24 + v12);
  1249. v26 = &v24[1024 * m_page_count];
  1250. if((void**)((char*)v24 + v12) <= v26)
  1251. {
  1252. while(1)
  1253. {
  1254. v27 = v25;
  1255. v25 = (void**)((char*)v25 + v12);
  1256. *p_m_chunk_list = v24;
  1257. p_m_chunk_list = v24;
  1258. if(v26 < v25)
  1259. break;
  1260. v24 = v27;
  1261. }
  1262. }
  1263. else
  1264. {
  1265. v24 = &v31->m_chunk_list;
  1266. }
  1267. *v24 = 0LL;
  1268. v28 = v6 + (v32 << 6);
  1269. v29 = *(bg::detail::Span**)(v28 + 40);
  1270. v31->m_prev = v14;
  1271. v31->m_next = v29;
  1272. *(uintptr_t*)(*(uintptr_t*)(v28 + 40) + 32LL) = (uintptr_t)v31;
  1273. *(uintptr_t*)(v28 + 40) = (uintptr_t)v31;
  1274. ++* (uintptr_t*)(v28 + 56);
  1275. LABEL_13:
  1276. m_chunk_list = (void**)m_next->m_chunk_list;
  1277. ++m_next->m_used_count;
  1278. v17 = *m_chunk_list == 0LL;
  1279. m_next->m_chunk_list = *m_chunk_list;
  1280. if(v17)
  1281. {
  1282. m_prev = m_next->m_prev;
  1283. m_prev->m_next = m_next->m_next;
  1284. m_next->m_next->m_prev = m_prev;
  1285. m_next->m_prev = 0LL;
  1286. m_next->m_next = 0LL;
  1287. --* (uintptr_t*)(v6 + (v13 << 6) + 56);
  1288. }
  1289. v8 = m_chunk_list;
  1290. goto LABEL_16;
  1291. }
  1292. return 0LL;
  1293. }
  1294. }