shm_manager.cc 57 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515
  1. #include "shm_manager.h"
  2. #include "shm_object.h"
  3. #include "shm_config.h"
  4. extern bg::detail::ShmContext g_shm_ctx;
  5. namespace bg
  6. {
  7. namespace detail
  8. {
  9. ShmManager::ShmManager(const bg::ShmOptions& options) : m_options(options)
  10. {
  11. snprintf(this->m_magic, 0x40uLL, "ByteGame/ShmManager-%s", this->m_options.identifier);
  12. }
  13. ShmManager::~ShmManager()
  14. {
  15. if(m_singletons)
  16. {
  17. m_singletons->clear();
  18. bg::ShmFree(m_singletons);
  19. this->m_singletons = nullptr;
  20. }
  21. for(size_t i = 0LL; this->m_block_count > i; ++i)
  22. {
  23. DeleteBlock(i);
  24. }
  25. }
  26. void* ShmManager::AllocateRawMemory(size_t* bytes, size_t alignment)
  27. {
  28. void* result = nullptr;
  29. if((alignment & (alignment - 1)) != 0)
  30. {
  31. SHM_ERROR("invalid alignment(%#lx).", (const char*)alignment);
  32. return result;
  33. }
  34. *bytes = PALIGN_UP(*bytes, alignment);
  35. if(m_block_count)
  36. {
  37. result = AllocateInBlock(m_block_count - 1, *bytes, alignment);
  38. if(result)
  39. {
  40. return result;
  41. }
  42. }
  43. if(CreateBlock(m_block_count, *bytes))
  44. {
  45. size_t index = m_block_count > 1 ? m_block_count - 1 : 0;
  46. result = AllocateInBlock(index, *bytes, alignment);
  47. if(!result)
  48. {
  49. SHM_ERROR("newly created block(%lu) still cannot satisfy requirement, addr(%#lx), used size(%#lx), real size(%#lx), real bytes(%#lx), alignment(%#lx).",
  50. m_block_count, m_blocks[index].addr, m_blocks[index].used_size, m_blocks[index].real_size);
  51. return result;
  52. }
  53. }
  54. else
  55. {
  56. SHM_ERROR("failed to create block(%lu), real bytes(%#lx).", this->m_block_count, *bytes);
  57. }
  58. return result;
  59. }
  60. bool ShmManager::HasSingleton(const bg::detail::TypeName& type)
  61. {
  62. auto itor = m_singletons->find(type);
  63. if(itor == m_singletons->end())
  64. {
  65. return false;
  66. }
  67. return true;
  68. }
  69. void* ShmManager::GetSingleton(const bg::detail::TypeName& type, size_t bytes, bool* first_call)
  70. {
  71. char index;
  72. bg::detail::ShmManager::SingletonMap* v12; // r12
  73. unsigned __int8 v14; // r13
  74. __int64 v15; // r10
  75. __int64 v16; // r11
  76. bg::detail::ChunkCache::ClassCache* v17; // r12
  77. bg::detail::Span* v18; // r8
  78. void** m_chunk_list; // rax
  79. bg::detail::Span* m_prev; // rdx
  80. bool v21; // zf
  81. bg::detail::PageHeap* v24; // rdi
  82. bg::detail::Span* v25; // rax
  83. size_t m_start_page; // rcx
  84. size_t m_page_count; // rdi
  85. void** p_m_chunk_list; // rax
  86. void** v29; // rcx
  87. void** v30; // rdi
  88. void** v31; // rdx
  89. void** v32; // rsi
  90. bg::detail::ChunkCache::ClassCache* v33; // rax
  91. bg::detail::Span* m_next; // rcx
  92. __int64 v35; // [rsp+0h] [rbp-460h]
  93. bg::detail::Span* v36; // [rsp+8h] [rbp-458h]
  94. bg::detail::PageHeap* p_m_page_heap; // [rsp+10h] [rbp-450h]
  95. unsigned __int64 v40; // [rsp+18h] [rbp-448h]
  96. //std::pair<bg::detail::TypeName, void*> __v{}; // [rsp+20h] [rbp-440h] BYREF
  97. bg::detail::Span* span;
  98. if(!m_singletons)
  99. {
  100. m_singletons = (bg::detail::ShmManager::SingletonMap*)bg::ShmMalloc(
  101. sizeof(bg::detail::ShmManager::SingletonMap));
  102. if(!m_singletons)
  103. {
  104. m_singletons = nullptr;
  105. return nullptr;
  106. }
  107. new(m_singletons) SingletonMap;
  108. }
  109. v12 = nullptr;
  110. auto itor = m_singletons->find(type);
  111. if(itor != m_singletons->end())
  112. {
  113. return itor->second;
  114. }
  115. if(bytes <= 0x400)
  116. {
  117. index = this->m_size_map.m_index_to_class[(bytes + 7) >> 3];
  118. }
  119. else
  120. {
  121. if(bytes > 0x40000)
  122. {
  123. span = m_page_heap.AllocateSpan((bytes + 0x1FFF) >> 13);
  124. if(span)
  125. {
  126. if(!m_singletons)
  127. {
  128. return nullptr;
  129. }
  130. goto LABEL_25;
  131. }
  132. return 0LL;
  133. }
  134. index = this->m_size_map.m_index_to_class[(bytes + 15487) >> 7];
  135. }
  136. v14 = index;
  137. if(index > 0x56u)
  138. {
  139. v16 = index;
  140. v17 = &this->m_chunk_cache.m_caches[index];
  141. v40 = this->m_size_map.m_class_to_size[index];
  142. if(v17 != (bg::detail::ChunkCache::ClassCache*)v17->free_list.m_next)
  143. goto LABEL_21;
  144. p_m_page_heap = &this->m_page_heap;
  145. v16 = v14;
  146. v15 = v14;
  147. v24 = &this->m_page_heap;
  148. goto LABEL_35;
  149. }
  150. v15 = v14;
  151. v16 = v14;
  152. v17 = &this->m_chunk_cache.m_caches[(unsigned __int64)v14];
  153. v40 = this->m_size_map.m_class_to_size[v14];
  154. if(v17 == (bg::detail::ChunkCache::ClassCache*)v17->free_list.m_next)
  155. {
  156. p_m_page_heap = &this->m_page_heap;
  157. v24 = &this->m_page_heap;
  158. LABEL_35:
  159. v35 = v16;
  160. v25 = (bg::detail::Span*)v24->AllocateSpan(this->m_size_map.m_class_to_pages[v15]);
  161. if(v25)
  162. {
  163. v36 = v25;
  164. p_m_page_heap->RegisterSpan(v25);
  165. v18 = v36;
  166. v16 = v35;
  167. m_start_page = v36->m_start_page;
  168. m_page_count = v36->m_page_count;
  169. p_m_chunk_list = &v36->m_chunk_list;
  170. v36->m_size_class = v14;
  171. v29 = (void**)(m_start_page << 13);
  172. v30 = &v29[1024 * m_page_count];
  173. v31 = (void**)((char*)v29 + v40);
  174. if((void**)((char*)v29 + v40) <= v30)
  175. {
  176. while(1)
  177. {
  178. v32 = v31;
  179. v31 = (void**)((char*)v31 + v40);
  180. *p_m_chunk_list = v29;
  181. p_m_chunk_list = v29;
  182. if(v30 < v31)
  183. break;
  184. v29 = v32;
  185. }
  186. }
  187. else
  188. {
  189. v29 = &v36->m_chunk_list;
  190. }
  191. *v29 = 0LL;
  192. v33 = &this->m_chunk_cache.m_caches[v35];
  193. m_next = v33->free_list.m_next;
  194. v36->m_prev = &v17->free_list;
  195. v36->m_next = m_next;
  196. v33->free_list.m_next->m_prev = v36;
  197. v33->free_list.m_next = v36;
  198. ++v33->span_count;
  199. goto LABEL_22;
  200. }
  201. return 0LL;
  202. }
  203. LABEL_21:
  204. v18 = v17->free_list.m_next;
  205. LABEL_22:
  206. m_chunk_list = (void**)v18->m_chunk_list;
  207. ++v18->m_used_count;
  208. m_prev = (bg::detail::Span*)*m_chunk_list;
  209. v21 = *m_chunk_list == 0LL;
  210. v18->m_chunk_list = *m_chunk_list;
  211. if(v21)
  212. {
  213. m_prev = v18->m_prev;
  214. m_prev->m_next = v18->m_next;
  215. v18->m_next->m_prev = m_prev;
  216. v18->m_prev = 0LL;
  217. v18->m_next = 0LL;
  218. --this->m_chunk_cache.m_caches[v16].span_count;
  219. }
  220. v12 = (bg::detail::ShmManager::SingletonMap*)m_chunk_list;
  221. LABEL_25:
  222. (*m_singletons)[type] = v12;
  223. if(first_call)
  224. *first_call = 1;
  225. return v12;
  226. }
  227. void ShmManager::FreeSingleton(const bg::detail::TypeName& type)
  228. {
  229. unsigned __int64 v7; // rax
  230. bg::detail::RadixTree<10ul, 10ul, 15ul>::NodeV1* v8; // rsi
  231. bg::detail::RadixTree<10ul, 10ul, 15ul>::NodeV2* v9; // rcx
  232. bg::detail::Span* v10; // rsi
  233. __int64 m_size_class; // rdx
  234. size_t m_used_count; // rcx
  235. void* m_chunk_list; // r8
  236. size_t v14; // rcx
  237. bg::detail::PageHeap* p_m_page_heap; // rdi
  238. bool v16; // cl
  239. __int64 v17; // rdx
  240. char* v18; // rax
  241. bg::detail::Span* m_prev; // rdx
  242. bg::detail::ShmManager::SingletonMap* v20; // rbx
  243. bg::detail::ChunkCache::ClassCache* v23; // rax
  244. bg::detail::Span* v24; // rcx
  245. bg::detail::Span* m_next; // rdi
  246. if(m_singletons)
  247. {
  248. auto itor = m_singletons->find(type);
  249. if(itor != m_singletons->end())
  250. {
  251. v7 = *(uintptr_t*)&itor->second;
  252. if(v7)
  253. {
  254. if(v7 >> 13 > 0x7FFFFFFFFLL)
  255. {
  256. }
  257. else
  258. {
  259. v8 = this->m_page_heap.m_span_map.lv0[v7 >> 38];
  260. if(v8)
  261. {
  262. v9 = v8->lv1[(v7 >> 28) & 0x3FF];
  263. if(v9)
  264. {
  265. v10 = v9->lv2[(v7 >> 13) & 0x7FFF];
  266. if(v10)
  267. {
  268. m_size_class = v10->m_size_class;
  269. if((char)m_size_class == 0xFF)
  270. {
  271. m_page_heap.DeallocateSpan(v10);
  272. }
  273. else
  274. {
  275. m_used_count = v10->m_used_count;
  276. if(m_used_count && (unsigned __int8)m_size_class <= 0x56u)
  277. {
  278. m_chunk_list = v10->m_chunk_list;
  279. v14 = m_used_count - 1;
  280. p_m_page_heap = &this->m_page_heap;
  281. *(uintptr_t*)v7 = (uintptr_t)m_chunk_list;
  282. v10->m_used_count = v14;
  283. v16 = v14 == 0;
  284. v10->m_chunk_list = (void*)v7;
  285. if(m_chunk_list)
  286. {
  287. if(v16)
  288. {
  289. v17 = m_size_class << 6;
  290. v18 = (char*)&this->m_chunk_cache.m_caches[0].free_list.m_chunk_list + v17;
  291. if(*(size_t*)((char*)&this->m_chunk_cache.m_caches[0].span_count + v17) > 1)
  292. {
  293. m_prev = v10->m_prev;
  294. m_prev->m_next = v10->m_next;
  295. v10->m_next->m_prev = m_prev;
  296. v10->m_prev = 0LL;
  297. v10->m_next = 0LL;
  298. --* ((uintptr_t*)v18 + 1);
  299. v10->m_used_count = 0LL;
  300. v10->m_size_class = -1;
  301. v10->m_chunk_list = 0LL;
  302. p_m_page_heap->DeallocateSpan(v10);
  303. }
  304. }
  305. }
  306. else
  307. {
  308. v23 = &this->m_chunk_cache.m_caches[m_size_class];
  309. if(v16)
  310. {
  311. if(v23->span_count)
  312. {
  313. v10->m_used_count = 0LL;
  314. v10->m_size_class = -1;
  315. v10->m_chunk_list = 0LL;
  316. p_m_page_heap->DeallocateSpan(v10);
  317. }
  318. else
  319. {
  320. m_next = v23->free_list.m_next;
  321. v10->m_prev = &v23->free_list;
  322. v10->m_next = m_next;
  323. v23->free_list.m_next->m_prev = v10;
  324. v23->free_list.m_next = v10;
  325. ++v23->span_count;
  326. }
  327. }
  328. else
  329. {
  330. v24 = v23->free_list.m_next;
  331. v10->m_prev = &v23->free_list;
  332. v10->m_next = v24;
  333. v23->free_list.m_next->m_prev = v10;
  334. v23->free_list.m_next = v10;
  335. ++v23->span_count;
  336. }
  337. }
  338. }
  339. else
  340. {
  341. }
  342. }
  343. goto LABEL_21;
  344. }
  345. }
  346. }
  347. }
  348. }
  349. LABEL_21:
  350. v20 = this->m_singletons;
  351. v20->erase(itor);
  352. }
  353. }
  354. }
  355. bool ShmManager::OnResume(const char* identifier)
  356. {
  357. int v2; // eax
  358. size_t v3; // rbx
  359. unsigned int v4; // eax
  360. unsigned int v5; // r13d
  361. char magic[64]; // [rsp+0h] [rbp-60h] BYREF
  362. v2 = snprintf(magic, 0x40uLL, "ByteGame/ShmManager-%s", identifier);
  363. if(!strncmp(magic, this->m_magic, v2))
  364. {
  365. v3 = 0LL;
  366. if(this->m_block_count)
  367. {
  368. while(1)
  369. {
  370. v4 = AttachBlock(v3);
  371. v5 = v4;
  372. if(!(char)v4)
  373. break;
  374. if(this->m_block_count <= ++v3)
  375. goto LABEL_7;
  376. }
  377. return v5;
  378. }
  379. else
  380. {
  381. LABEL_7:
  382. ++this->m_version;
  383. return true;
  384. }
  385. }
  386. else
  387. {
  388. return false;
  389. }
  390. }
  391. bool ShmManager::OnCreate(void)
  392. {
  393. return m_size_map.Init();
  394. }
  395. bool ShmManager::DeleteBlock(size_t index)
  396. {
  397. unsigned int v3; // r14d
  398. char* v4; // rbx
  399. unsigned __int64 v5; // rsi
  400. char path[256]; // [rsp+0h] [rbp-130h] BYREF
  401. if(this->m_block_count <= index)
  402. {
  403. v3 = 0;
  404. }
  405. else
  406. {
  407. v3 = 1;
  408. v4 = (char*)this + 32 * index;
  409. v5 = *((uintptr_t*)v4 + 3113);
  410. if(v5)
  411. {
  412. m_options.AddFixedAddress(v5);
  413. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  414. bg::detail::ShmObjectDelete(*((void**)v4 + 3113), (uintptr_t) * ((void**)v4 + 3116), path);
  415. *((uintptr_t*)v4 + 3113) = 0LL;
  416. *((uintptr_t*)v4 + 3114) = 0LL;
  417. *((uintptr_t*)v4 + 3115) = 0LL;
  418. *((uintptr_t*)v4 + 3116) = 0LL;
  419. }
  420. }
  421. return v3;
  422. }
  423. void* ShmManager::AllocateInBlock(size_t index, size_t bytes, size_t alignment)
  424. {
  425. size_t v7; // r8
  426. char* v8; // r12
  427. __int64 v9; // rdx
  428. __int64 v10; // rax
  429. __int64 v11; // rbx
  430. size_t v12; // rcx
  431. size_t mmap_size; // rsi
  432. size_t real_size; // rdi
  433. size_t shm_block_grow_size; // r10
  434. const char* v16; // r15
  435. const char* v17; // rcx
  436. size_t v18; // rdx
  437. size_t v20; // [rsp+0h] [rbp-40h]
  438. char* v21; // [rsp+8h] [rbp-38h]
  439. if(this->m_block_count <= index)
  440. {
  441. return 0LL;
  442. }
  443. else
  444. {
  445. v7 = 0LL;
  446. v8 = (char*)this + 32 * index;
  447. v9 = *((uintptr_t*)v8 + 3113);
  448. v10 = *((uintptr_t*)v8 + 3114);
  449. v11 = v10 + v9;
  450. if(((v10 + v9) & (alignment - 1)) != 0)
  451. {
  452. v7 = alignment - ((v10 + v9) & (alignment - 1));
  453. v11 += v7;
  454. }
  455. v12 = v11 + bytes;
  456. mmap_size = this->m_blocks[index].mmap_size;
  457. if(v11 + bytes > mmap_size + v9)
  458. {
  459. return 0LL;
  460. }
  461. else
  462. {
  463. real_size = this->m_blocks[index].real_size;
  464. if(v12 <= real_size + v9)
  465. {
  466. LABEL_13:
  467. *((uintptr_t*)v8 + 3114) = v7 + bytes + v10;
  468. return (void*)v11;
  469. }
  470. shm_block_grow_size = this->m_options.shm_block_grow_size;
  471. v16 = (const char*)(shm_block_grow_size + real_size);
  472. if(v12 > shm_block_grow_size + real_size + v9)
  473. {
  474. v17 = (const char*)(v12 - v9);
  475. v16 = v17;
  476. //v18 = (uintptr_t)&v17[-real_size] % shm_block_grow_size;
  477. v18 = 0;
  478. if(v18)
  479. v16 = &v17[shm_block_grow_size - v18];
  480. }
  481. v21 = (char*)index;
  482. if((unsigned __int64)v16 > mmap_size)
  483. v16 = (const char*)mmap_size;
  484. v20 = v7;
  485. if(ResizeBlock(index, v16))
  486. {
  487. v10 = *((uintptr_t*)v8 + 3114);
  488. v7 = v20;
  489. goto LABEL_13;
  490. }
  491. return nullptr;
  492. }
  493. }
  494. }
  495. bool ShmManager::CreateBlock(size_t index, size_t min_size)
  496. {
  497. size_t shm_block_grow_size; // rcx
  498. size_t shm_block_mmap_size; // rax
  499. __int64 v8; // rax
  500. uintptr_t fixed_addr; // [rsp+8h] [rbp-138h] BYREF
  501. size_t real_size; // [rsp+10h] [rbp-130h] BYREF
  502. size_t mmap_size; // [rsp+18h] [rbp-128h] BYREF
  503. char path[256]; // [rsp+20h] [rbp-120h] BYREF
  504. if(index > 0xFF)
  505. {
  506. return 0LL;
  507. }
  508. else
  509. {
  510. shm_block_grow_size = this->m_options.shm_block_grow_size;
  511. shm_block_mmap_size = this->m_options.shm_block_mmap_size;
  512. fixed_addr = 0LL;
  513. real_size = shm_block_grow_size;
  514. mmap_size = shm_block_mmap_size;
  515. if(shm_block_grow_size < min_size)
  516. real_size = min_size;
  517. if(shm_block_mmap_size < min_size)
  518. mmap_size = min_size;
  519. m_options.PopFixedAddress(&fixed_addr);
  520. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  521. v8 = (uintptr_t)bg::detail::ShmObjectCreate(path, fixed_addr, &real_size, &mmap_size);
  522. if(!v8)
  523. {
  524. if(fixed_addr)
  525. m_options.AddFixedAddress(fixed_addr);
  526. return false;
  527. }
  528. m_blocks[index].addr = (void*)v8;
  529. m_blocks[index].real_size = real_size;
  530. m_blocks[index].mmap_size = mmap_size;
  531. m_blocks[index].used_size = 0;
  532. if(this->m_block_count != index)
  533. return true;
  534. this->m_block_count = index + 1;
  535. }
  536. return true;
  537. }
  538. bool ShmManager::AttachBlock(size_t index)
  539. {
  540. __int64 result; // rax
  541. char* v3; // rbx
  542. size_t v4; // rax
  543. __int64 v6; // rdx
  544. size_t real_size; // [rsp+0h] [rbp-130h] BYREF
  545. size_t mmap_size; // [rsp+8h] [rbp-128h] BYREF
  546. char path[256]; // [rsp+10h] [rbp-120h] BYREF
  547. if(this->m_block_count <= index)
  548. {
  549. return 0LL;
  550. }
  551. else
  552. {
  553. result = 1LL;
  554. v3 = (char*)this + 32 * index;
  555. if(*((uintptr_t*)v3 + 3113))
  556. {
  557. v4 = *((uintptr_t*)v3 + 3116);
  558. real_size = 0LL;
  559. mmap_size = v4;
  560. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  561. v6 = (uintptr_t)bg::detail::ShmObjectAttach(path, (uintptr_t) * ((const char**)v3 + 3113), &real_size, &mmap_size);
  562. result = 0LL;
  563. if(v6)
  564. {
  565. return 1LL;
  566. }
  567. }
  568. }
  569. return result;
  570. }
  571. bool ShmManager::ResizeBlock(size_t index, const char* new_size)
  572. {
  573. bool v3; // cc
  574. unsigned int v5; // r13d
  575. size_t new_sizea; // [rsp+8h] [rbp-128h] BYREF
  576. char path[256]; // [rsp+10h] [rbp-120h] BYREF
  577. v3 = this->m_block_count <= index;
  578. new_sizea = (size_t)new_size;
  579. if(v3)
  580. {
  581. return false;
  582. }
  583. else if(this->m_blocks[index].mmap_size < (unsigned __int64)new_size)
  584. {
  585. return false;
  586. }
  587. else
  588. {
  589. snprintf(path, 0x100uLL, "%s-%03zu.mmap", this->m_options.identifier, index);
  590. v5 = bg::detail::ShmObjectResize(path, &new_sizea);
  591. if((char)v5)
  592. {
  593. this->m_blocks[index].real_size = new_sizea;
  594. }
  595. return v5;
  596. }
  597. }
  598. }
  599. void* ShmMalloc(size_t bytes)
  600. {
  601. int64_t v6; // rbx
  602. uint8_t index; // r15
  603. __int64 v9; // r14
  604. __int64 v10; // r13
  605. bg::detail::Span* v11; // r12
  606. bg::detail::Span* m_next; // r8
  607. void** result; // rax
  608. bool v14; // zf
  609. bg::detail::Span* Span; // rax
  610. bg::detail::PageHeap* v16; // rdi
  611. bg::detail::Span* v17; // rax
  612. size_t m_start_page; // rcx
  613. size_t m_page_count; // rdi
  614. void** p_m_chunk_list; // rax
  615. void** v21; // rcx
  616. void** v22; // rdx
  617. void** v23; // rdi
  618. void** v24; // rsi
  619. __int64 v25; // rax
  620. bg::detail::Span* v26; // rcx
  621. bg::detail::Span* m_prev; // rdx
  622. bg::detail::Span* v28; // [rsp+0h] [rbp-40h]
  623. bg::detail::PageHeap* v29; // [rsp+8h] [rbp-38h]
  624. v6 = (int64_t)g_shm_ctx.mgr;
  625. if(!v6)
  626. {
  627. return nullptr;
  628. }
  629. if(bytes <= BYTES_COMP_VALUE)
  630. {
  631. index = g_shm_ctx.mgr->m_size_map.m_index_to_class[SMALL_BYTES_TO_INDEX(bytes)];
  632. }
  633. else
  634. {
  635. if(bytes > CLASS_MAX_BYTES)
  636. {
  637. Span = g_shm_ctx.mgr->m_page_heap.AllocateSpan(BYTES_TO_PAGES(bytes + 0x1FFF));
  638. if(Span)
  639. {
  640. return (void**)(*(uintptr_t*)(Span + 16) << 13);
  641. }
  642. return nullptr;
  643. }
  644. auto temp1 = BIG_BYTES_TO_INDEX(bytes);
  645. index = g_shm_ctx.mgr->m_size_map.m_index_to_class[BIG_BYTES_TO_INDEX(bytes)];
  646. }
  647. if(index > CLASS_MAX_COUNT)
  648. {
  649. v9 = g_shm_ctx.mgr->m_size_map.m_class_to_size[index];
  650. v10 = index;
  651. v11 = &g_shm_ctx.mgr->m_chunk_cache.m_caches[index].free_list;
  652. if(v11 != v11->m_next)
  653. goto LABEL_7;
  654. v29 = &g_shm_ctx.mgr->m_page_heap;
  655. v16 = &g_shm_ctx.mgr->m_page_heap;
  656. goto LABEL_18;
  657. }
  658. v9 = g_shm_ctx.mgr->m_size_map.m_class_to_size[index];
  659. v10 = index;
  660. v11 = &g_shm_ctx.mgr->m_chunk_cache.m_caches[index].free_list;
  661. if(v11 == v11->m_next)
  662. {
  663. v29 = &g_shm_ctx.mgr->m_page_heap;
  664. v16 = &g_shm_ctx.mgr->m_page_heap;
  665. LABEL_18:
  666. v17 = v16->AllocateSpan(g_shm_ctx.mgr->m_size_map.m_class_to_pages[index]);
  667. if(v17)
  668. {
  669. v28 = v17;
  670. v29->RegisterSpan(v17);
  671. m_next = v28;
  672. m_start_page = v28->m_start_page;
  673. m_page_count = v28->m_page_count;
  674. p_m_chunk_list = &v28->m_chunk_list;
  675. v28->m_size_class = index;
  676. v21 = (void**)(m_start_page << 13);
  677. v22 = (void**)((char*)v21 + v9);
  678. v23 = &v21[1024 * m_page_count];
  679. if((void**)((char*)v21 + v9) <= v23)
  680. {
  681. while(1)
  682. {
  683. v24 = v22;
  684. v22 = (void**)((char*)v22 + v9);
  685. *p_m_chunk_list = v21;
  686. p_m_chunk_list = v21;
  687. if(v23 < v22)
  688. break;
  689. v21 = v24;
  690. }
  691. }
  692. else
  693. {
  694. v21 = &v28->m_chunk_list;
  695. }
  696. *v21 = 0LL;
  697. v25 = v6 + (v10 << 6);
  698. v26 = *(bg::detail::Span**)(v25 + 40);
  699. v28->m_prev = v11;
  700. v28->m_next = v26;
  701. *(uintptr_t*)(*(uintptr_t*)(v25 + 40) + 32LL) = (uintptr_t)v28;
  702. *(uintptr_t*)(v25 + 40) = (uintptr_t)v28;
  703. ++* (uintptr_t*)(v25 + 56);
  704. goto LABEL_8;
  705. }
  706. return 0LL;
  707. }
  708. LABEL_7:
  709. m_next = v11->m_next;
  710. LABEL_8:
  711. result = (void**)m_next->m_chunk_list;
  712. ++m_next->m_used_count;
  713. v14 = *result == 0LL;
  714. m_next->m_chunk_list = *result;
  715. if(v14)
  716. {
  717. m_prev = m_next->m_prev;
  718. m_prev->m_next = m_next->m_next;
  719. m_next->m_next->m_prev = m_prev;
  720. m_next->m_prev = 0LL;
  721. m_next->m_next = 0LL;
  722. --* (uintptr_t*)(v6 + (v10 << 6) + 56);
  723. }
  724. return result;
  725. }
  726. void ShmFree(void* ptr)
  727. {
  728. __int64 v6; // rcx
  729. __int64 v7; // rsi
  730. __int64 v8; // rax
  731. bg::detail::Span* v9; // rsi
  732. __int64 m_size_class; // rax
  733. size_t m_used_count; // rdx
  734. void* m_chunk_list; // r9
  735. size_t v13; // rdx
  736. bool v14; // dl
  737. __int64 v15; // rax
  738. bg::detail::Span* m_prev; // rdx
  739. __int64 v17; // rax
  740. bg::detail::Span* v18; // rdi
  741. bg::detail::Span* v19; // rcx
  742. v6 = (int64_t)g_shm_ctx.mgr;
  743. if(!v6)
  744. {
  745. return;
  746. }
  747. if(ptr)
  748. {
  749. if((uintptr_t)ptr >> 13 > 0x7FFFFFFFFLL)
  750. {
  751. }
  752. else
  753. {
  754. v7 = *(uintptr_t*)(v6 + 8 * ((uintptr_t)ptr >> 38) + 9136);
  755. if(v7)
  756. {
  757. v8 = *(uintptr_t*)(v7 + 8 * (((uintptr_t)ptr >> 28) & 0x3FF));
  758. if(v8)
  759. {
  760. v9 = *(bg::detail::Span**)(v8 + 8 * (((uintptr_t)ptr >> 13) & 0x7FFF));
  761. if(v9)
  762. {
  763. m_size_class = v9->m_size_class;
  764. if((char)m_size_class == 0xFF)
  765. {
  766. ((bg::detail::PageHeap*)(v6 + 9136))->DeallocateSpan(v9);
  767. }
  768. else
  769. {
  770. m_used_count = v9->m_used_count;
  771. if(m_used_count && (unsigned __int8)m_size_class <= 0x56u)
  772. {
  773. m_chunk_list = v9->m_chunk_list;
  774. v13 = m_used_count - 1;
  775. *(uintptr_t*)ptr = (uintptr_t)m_chunk_list;
  776. v9->m_used_count = v13;
  777. v14 = v13 == 0;
  778. v9->m_chunk_list = (void*)ptr;
  779. if(m_chunk_list)
  780. {
  781. if(v14)
  782. {
  783. v15 = v6 + (m_size_class << 6) + 48;
  784. if(*(uintptr_t*)(v15 + 8) > 1uLL)
  785. {
  786. m_prev = v9->m_prev;
  787. m_prev->m_next = v9->m_next;
  788. v9->m_next->m_prev = m_prev;
  789. v9->m_prev = 0LL;
  790. v9->m_next = 0LL;
  791. --* (uintptr_t*)(v15 + 8);
  792. LABEL_14:
  793. v9->m_used_count = 0LL;
  794. v9->m_size_class = -1;
  795. v9->m_chunk_list = 0LL;
  796. ((bg::detail::PageHeap*)(v6 + 9136))->DeallocateSpan(v9);
  797. return;
  798. }
  799. }
  800. }
  801. else
  802. {
  803. v17 = v6 + (m_size_class << 6);
  804. if(v14)
  805. {
  806. if(*(uintptr_t*)(v17 + 56))
  807. goto LABEL_14;
  808. v18 = *(bg::detail::Span**)(v17 + 40);
  809. v9->m_prev = (bg::detail::Span*)v17;
  810. v9->m_next = v18;
  811. *(uintptr_t*)(*(uintptr_t*)(v17 + 40) + 32LL) = (uintptr_t)v9;
  812. *(uintptr_t*)(v17 + 40) = (uintptr_t)v9;
  813. ++* (uintptr_t*)(v17 + 56);
  814. }
  815. else
  816. {
  817. v19 = *(bg::detail::Span**)(v17 + 40);
  818. v9->m_prev = (bg::detail::Span*)v17;
  819. v9->m_next = v19;
  820. *(uintptr_t*)(*(uintptr_t*)(v17 + 40) + 32LL) = (uintptr_t)v9;
  821. ++* (uintptr_t*)(v17 + 56);
  822. *(uintptr_t*)(v17 + 40) = (uintptr_t)v9;
  823. }
  824. }
  825. }
  826. else
  827. {
  828. }
  829. }
  830. return;
  831. }
  832. }
  833. }
  834. }
  835. }
  836. }
  837. void* ShmRealloc(void* old_ptr, size_t new_bytes)
  838. {
  839. __int64 v6; // r14
  840. unsigned __int64 v9; // r15
  841. unsigned __int64 v10; // r10
  842. __int64 v11; // r11
  843. __int64 v12; // rax
  844. __int64 v13; // rax
  845. __int64 v14; // rax
  846. __int64 v15; // rbx
  847. size_t v16; // rbx
  848. size_t v17; // rdx
  849. void** result; // rax
  850. bg::detail::Span* v19; // rax
  851. bg::detail::Span* v20; // rax
  852. void** m_chunk_list; // rdi
  853. void* v22; // rdx
  854. size_t v23; // rdx
  855. __int64 v24; // rax
  856. __int64 v25; // rax
  857. bg::detail::Span* v26; // rsi
  858. uint8_t m_size_class; // al
  859. size_t m_used_count; // rcx
  860. __int64 v29; // rdx
  861. void* v30; // rax
  862. size_t v31; // rcx
  863. bg::detail::PageHeap* v32; // rdi
  864. bool v33; // cl
  865. __int64 v34; // rdx
  866. bg::detail::Span* v35; // rcx
  867. bg::detail::Span* v36; // rax
  868. bg::detail::Span* m_next; // rax
  869. void** v38; // rdi
  870. void* v39; // rdx
  871. bg::detail::Span* v40; // rdx
  872. __int64 Span; // rax
  873. __int64 v42; // r14
  874. bg::detail::Span* v43; // rcx
  875. bg::detail::Span* v44; // rdx
  876. __int64 v45; // rax
  877. bg::detail::PageHeap* v46; // rdi
  878. bg::detail::Span* v47; // rax
  879. size_t v48; // r8
  880. void** v49; // rsi
  881. size_t v50; // rdi
  882. unsigned __int64 v51; // rdx
  883. size_t v52; // r8
  884. void** v53; // rcx
  885. __int64 v54; // rdx
  886. bg::detail::Span* v55; // rsi
  887. bg::detail::PageHeap* v56; // rdi
  888. bg::detail::Span* v57; // rax
  889. size_t m_start_page; // rcx
  890. size_t m_page_count; // r8
  891. void** p_m_chunk_list; // rsi
  892. void** v61; // rcx
  893. void** v62; // rdx
  894. void** v63; // r8
  895. void** v64; // rdi
  896. __int64 v65; // rdx
  897. bg::detail::Span* v66; // rsi
  898. bg::detail::Span* m_prev; // rdx
  899. __int64 v68; // [rsp+0h] [rbp-70h]
  900. __int64 v69; // [rsp+0h] [rbp-70h]
  901. __int64 v70; // [rsp+0h] [rbp-70h]
  902. __int64 v71; // [rsp+8h] [rbp-68h]
  903. unsigned __int64 v72; // [rsp+8h] [rbp-68h]
  904. unsigned __int64 v73; // [rsp+8h] [rbp-68h]
  905. __int64 v74; // [rsp+8h] [rbp-68h]
  906. unsigned __int64 v75; // [rsp+8h] [rbp-68h]
  907. unsigned __int64 v76; // [rsp+8h] [rbp-68h]
  908. bg::detail::PageHeap* v77; // [rsp+10h] [rbp-60h]
  909. bg::detail::PageHeap* v78; // [rsp+10h] [rbp-60h]
  910. bg::detail::PageHeap* v79; // [rsp+10h] [rbp-60h]
  911. bg::detail::PageHeap* v80; // [rsp+10h] [rbp-60h]
  912. __int64 v81; // [rsp+18h] [rbp-58h]
  913. __int64 v82; // [rsp+18h] [rbp-58h]
  914. bg::detail::Span* v83; // [rsp+18h] [rbp-58h]
  915. __int64 v84; // [rsp+20h] [rbp-50h]
  916. __int64 v85; // [rsp+20h] [rbp-50h]
  917. bg::detail::Span* v86; // [rsp+20h] [rbp-50h]
  918. __int64 v87; // [rsp+28h] [rbp-48h]
  919. __int64 v88; // [rsp+28h] [rbp-48h]
  920. __int64 v89; // [rsp+28h] [rbp-48h]
  921. bg::detail::Span* v90; // [rsp+30h] [rbp-40h]
  922. unsigned __int64 v91; // [rsp+30h] [rbp-40h]
  923. bg::detail::Span* v92; // [rsp+30h] [rbp-40h]
  924. __int64 v93; // [rsp+30h] [rbp-40h]
  925. __int64 v94; // [rsp+30h] [rbp-40h]
  926. uint8_t dest; // [rsp+38h] [rbp-38h]
  927. void** desta; // [rsp+38h] [rbp-38h]
  928. uint8_t destb; // [rsp+38h] [rbp-38h]
  929. void* destc; // [rsp+38h] [rbp-38h]
  930. void* destd; // [rsp+38h] [rbp-38h]
  931. v6 = (int64_t)g_shm_ctx.mgr;
  932. if(!v6)
  933. {
  934. return 0LL;
  935. }
  936. if(!old_ptr)
  937. return bg::ShmMalloc(new_bytes);
  938. if(!new_bytes)
  939. {
  940. bg::ShmFree((void*)old_ptr);
  941. return 0LL;
  942. }
  943. if((uintptr_t)old_ptr >> 13 > 0x7FFFFFFFFLL)
  944. {
  945. LABEL_51:
  946. return 0LL;
  947. }
  948. v9 = ((uintptr_t)old_ptr >> 13) & 0x7FFF;
  949. v10 = ((uintptr_t)old_ptr >> 28) & 0x3FF;
  950. v11 = v6 + 8 * ((uintptr_t)old_ptr >> 38);
  951. v12 = *(uintptr_t*)(v11 + 9136);
  952. if(!v12)
  953. goto LABEL_51;
  954. v13 = *(uintptr_t*)(v12 + 8 * v10);
  955. if(!v13)
  956. goto LABEL_51;
  957. v14 = *(uintptr_t*)(v13 + 8 * v9);
  958. if(!v14)
  959. goto LABEL_51;
  960. v15 = *(unsigned __int8*)(v14 + 1);
  961. if((char)v15 == 0xFF)
  962. {
  963. v16 = *(uintptr_t*)(v14 + 24) << 13;
  964. }
  965. else
  966. {
  967. if((unsigned __int8)v15 > 0x56u)
  968. {
  969. v11 = v6 + 8 * ((uintptr_t)old_ptr >> 38);
  970. v10 = ((uintptr_t)old_ptr >> 28) & 0x3FF;
  971. }
  972. v16 = *(uintptr_t*)(v6 + 8 * v15 + 7744);
  973. }
  974. v17 = v16 + (v16 >> 2);
  975. if(v16 > v16 >> 2)
  976. v17 = -1LL;
  977. if(new_bytes < v16 >> 1 || (result = (void**)old_ptr, new_bytes > v16))
  978. {
  979. if(new_bytes >= v17 || new_bytes <= v16)
  980. goto LABEL_19;
  981. if(v17 <= 0x400)
  982. {
  983. destb = *(char*)(v6 + ((v17 + 7) >> 3) + 5568);
  984. }
  985. else
  986. {
  987. if(v17 > 0x40000)
  988. {
  989. v93 = v11;
  990. destc = (void*)v10;
  991. Span = (uintptr_t)((bg::detail::PageHeap*)(v6 + 9136))->AllocateSpan((v17 + 0x1FFF) >> 13);
  992. v10 = (unsigned __int64)destc;
  993. v11 = v93;
  994. if(Span)
  995. {
  996. desta = (void**)(*(uintptr_t*)(Span + 16) << 13);
  997. if(desta)
  998. goto LABEL_27;
  999. }
  1000. goto LABEL_19;
  1001. }
  1002. destb = *(char*)(v6 + ((v17 + 15487) >> 7) + 5568);
  1003. }
  1004. if(destb > 0x56u)
  1005. {
  1006. v71 = v11;
  1007. v77 = (bg::detail::PageHeap*)v10;
  1008. v10 = (unsigned __int64)v77;
  1009. v11 = v71;
  1010. v85 = destb;
  1011. v82 = *(uintptr_t*)(v6 + 8LL * destb + 7744);
  1012. v89 = destb;
  1013. v36 = (bg::detail::Span*)(v6 + ((unsigned __int64)destb << 6));
  1014. v92 = v36;
  1015. if(v36 != v36->m_next)
  1016. goto LABEL_47;
  1017. v78 = (bg::detail::PageHeap*)(v6 + 9136);
  1018. v69 = v71;
  1019. v72 = v10;
  1020. v10 = v72;
  1021. v11 = v69;
  1022. v46 = (bg::detail::PageHeap*)(v6 + 9136);
  1023. }
  1024. else
  1025. {
  1026. v85 = destb;
  1027. v82 = *(uintptr_t*)(v6 + 8LL * destb + 7744);
  1028. v89 = destb;
  1029. v36 = (bg::detail::Span*)(v6 + ((unsigned __int64)destb << 6));
  1030. v92 = v36;
  1031. if(v36 != v36->m_next)
  1032. {
  1033. LABEL_47:
  1034. m_next = v36->m_next;
  1035. goto LABEL_48;
  1036. }
  1037. v78 = (bg::detail::PageHeap*)(v6 + 9136);
  1038. v46 = (bg::detail::PageHeap*)(v6 + 9136);
  1039. }
  1040. v68 = v11;
  1041. v73 = v10;
  1042. v47 = (bg::detail::Span*)v46->bg::detail::PageHeap::AllocateSpan(*(uintptr_t*)(v6 + 8 * v85 + 8440));
  1043. v10 = v73;
  1044. v11 = v68;
  1045. if(!v47)
  1046. {
  1047. LABEL_19:
  1048. if(new_bytes <= 0x400)
  1049. {
  1050. dest = *(char*)(v6 + ((new_bytes + 7) >> 3) + 5568);
  1051. goto LABEL_22;
  1052. }
  1053. if(new_bytes <= 0x40000)
  1054. {
  1055. dest = *(char*)(v6 + ((new_bytes + 15487) >> 7) + 5568);
  1056. LABEL_22:
  1057. if(dest > 0x56u)
  1058. {
  1059. v74 = v11;
  1060. v79 = (bg::detail::PageHeap*)v10;
  1061. v10 = (unsigned __int64)v79;
  1062. v11 = v74;
  1063. v81 = dest;
  1064. v87 = *(char*)(v6 + 8LL * dest + 7744);
  1065. v84 = dest;
  1066. v19 = (bg::detail::Span*)(v6 + ((unsigned __int64)dest << 6));
  1067. v90 = v19;
  1068. if(v19 != v19->m_next)
  1069. goto LABEL_24;
  1070. v80 = (bg::detail::PageHeap*)(v6 + 9136);
  1071. v70 = v74;
  1072. v75 = v10;
  1073. v10 = v75;
  1074. v11 = v70;
  1075. v56 = (bg::detail::PageHeap*)(v6 + 9136);
  1076. }
  1077. else
  1078. {
  1079. v81 = dest;
  1080. v87 = *(char*)(v6 + 8LL * dest + 7744);
  1081. v84 = dest;
  1082. v19 = (bg::detail::Span*)(v6 + ((unsigned __int64)dest << 6));
  1083. v90 = v19;
  1084. if(v19 != v19->m_next)
  1085. {
  1086. LABEL_24:
  1087. v20 = v19->m_next;
  1088. goto LABEL_25;
  1089. }
  1090. v80 = (bg::detail::PageHeap*)(v6 + 9136);
  1091. v56 = (bg::detail::PageHeap*)(v6 + 9136);
  1092. }
  1093. v68 = v11;
  1094. v76 = v10;
  1095. v57 = v56->AllocateSpan(*(uintptr_t*)(v6 + 8 * v81 + 8440));
  1096. if(v57)
  1097. {
  1098. v83 = v57;
  1099. v80->RegisterSpan(v57);
  1100. v20 = v83;
  1101. v10 = v76;
  1102. v11 = v68;
  1103. m_start_page = v83->m_start_page;
  1104. m_page_count = v83->m_page_count;
  1105. p_m_chunk_list = &v83->m_chunk_list;
  1106. v83->m_size_class = dest;
  1107. v61 = (void**)(m_start_page << 13);
  1108. v62 = (void**)((char*)v61 + v87);
  1109. v63 = &v61[1024 * m_page_count];
  1110. if((void**)((char*)v61 + v87) <= v63)
  1111. {
  1112. while(1)
  1113. {
  1114. v64 = v62;
  1115. v62 = (void**)((char*)v62 + v87);
  1116. *p_m_chunk_list = v61;
  1117. p_m_chunk_list = v61;
  1118. if(v63 < v62)
  1119. break;
  1120. v61 = v64;
  1121. }
  1122. }
  1123. else
  1124. {
  1125. v61 = &v83->m_chunk_list;
  1126. }
  1127. *v61 = 0LL;
  1128. v65 = v6 + (v84 << 6);
  1129. v66 = *(bg::detail::Span**)(v65 + 40);
  1130. v83->m_prev = v90;
  1131. v83->m_next = v66;
  1132. *(uintptr_t*)(*(uintptr_t*)(v65 + 40) + 32LL) = (uintptr_t)v83;
  1133. *(uintptr_t*)(v65 + 40) = (uintptr_t)v83;
  1134. ++* (uintptr_t*)(v65 + 56);
  1135. LABEL_25:
  1136. m_chunk_list = (void**)v20->m_chunk_list;
  1137. ++v20->m_used_count;
  1138. v22 = *m_chunk_list;
  1139. desta = m_chunk_list;
  1140. v20->m_chunk_list = *m_chunk_list;
  1141. if(!v22)
  1142. {
  1143. m_prev = v20->m_prev;
  1144. m_prev->m_next = v20->m_next;
  1145. v20->m_next->m_prev = m_prev;
  1146. v20->m_prev = 0LL;
  1147. v20->m_next = 0LL;
  1148. --* (uintptr_t*)(v6 + (v84 << 6) + 56);
  1149. }
  1150. goto LABEL_27;
  1151. }
  1152. return 0LL;
  1153. }
  1154. v94 = v11;
  1155. destd = (void*)v10;
  1156. v45 = (uintptr_t)((bg::detail::PageHeap*)(v6 + 9136))->AllocateSpan((new_bytes + 0x1FFF) >> 13);
  1157. v10 = (unsigned __int64)destd;
  1158. v11 = v94;
  1159. if(!v45)
  1160. return 0LL;
  1161. result = (void**)(*(uintptr_t*)(v45 + 16) << 13);
  1162. desta = result;
  1163. if(!result)
  1164. return result;
  1165. LABEL_27:
  1166. v23 = new_bytes;
  1167. if(v16 <= new_bytes)
  1168. v23 = v16;
  1169. v88 = v11;
  1170. v91 = v10;
  1171. memcpy(desta, (const void*)old_ptr, v23);
  1172. v24 = *(uintptr_t*)(v88 + 9136);
  1173. if(v24 && (v25 = *(uintptr_t*)(v24 + 8 * v91)) != 0 && (v26 = *(bg::detail::Span**)(v25 + 8 * v9)) != 0LL)
  1174. {
  1175. m_size_class = v26->m_size_class;
  1176. if(m_size_class == 0xFF)
  1177. {
  1178. ((bg::detail::PageHeap*)(v6 + 9136))->DeallocateSpan(v26);
  1179. return desta;
  1180. }
  1181. m_used_count = v26->m_used_count;
  1182. if(m_used_count && m_size_class <= 0x56u)
  1183. {
  1184. v29 = m_size_class;
  1185. v30 = v26->m_chunk_list;
  1186. v31 = m_used_count - 1;
  1187. v32 = (bg::detail::PageHeap*)(v6 + 9136);
  1188. *(uintptr_t*)old_ptr = (uintptr_t)v30;
  1189. v26->m_used_count = v31;
  1190. v33 = v31 == 0;
  1191. v26->m_chunk_list = (void*)old_ptr;
  1192. if(v30)
  1193. {
  1194. result = desta;
  1195. if(!v33)
  1196. return result;
  1197. v34 = v6 + (v29 << 6) + 48;
  1198. if(*(uintptr_t*)(v34 + 8) <= 1uLL)
  1199. return result;
  1200. v35 = v26->m_prev;
  1201. v35->m_next = v26->m_next;
  1202. v26->m_next->m_prev = v35;
  1203. v26->m_prev = 0LL;
  1204. v26->m_next = 0LL;
  1205. --* (uintptr_t*)(v34 + 8);
  1206. }
  1207. else
  1208. {
  1209. v42 = (v29 << 6) + v6;
  1210. if(!v33)
  1211. {
  1212. v44 = *(bg::detail::Span**)(v42 + 40);
  1213. result = desta;
  1214. v26->m_prev = (bg::detail::Span*)v42;
  1215. v26->m_next = v44;
  1216. *(uintptr_t*)(*(uintptr_t*)(v42 + 40) + 32LL) = (uintptr_t)v26;
  1217. *(uintptr_t*)(v42 + 40) = (uintptr_t)v26;
  1218. ++* (uintptr_t*)(v42 + 56);
  1219. return result;
  1220. }
  1221. if(!*(uintptr_t*)(v42 + 56))
  1222. {
  1223. v43 = *(bg::detail::Span**)(v42 + 40);
  1224. v26->m_prev = (bg::detail::Span*)v42;
  1225. v26->m_next = v43;
  1226. *(uintptr_t*)(*(uintptr_t*)(v42 + 40) + 32LL) = (uintptr_t)v26;
  1227. *(uintptr_t*)(v42 + 40) = (uintptr_t)v26;
  1228. ++* (uintptr_t*)(v42 + 56);
  1229. return desta;
  1230. }
  1231. }
  1232. v26->m_used_count = 0LL;
  1233. v26->m_size_class = -1;
  1234. v26->m_chunk_list = 0LL;
  1235. v32->DeallocateSpan(v26);
  1236. return desta;
  1237. }
  1238. }
  1239. else
  1240. {
  1241. }
  1242. return desta;
  1243. }
  1244. v86 = v47;
  1245. v78->RegisterSpan(v47);
  1246. m_next = v86;
  1247. v10 = v73;
  1248. v11 = v68;
  1249. v48 = v86->m_page_count;
  1250. v49 = &v86->m_chunk_list;
  1251. v86->m_size_class = destb;
  1252. v50 = v86->m_start_page << 13;
  1253. v51 = v50 + v82;
  1254. v52 = v50 + (v48 << 13);
  1255. if(v50 + v82 > v52)
  1256. {
  1257. v53 = &v86->m_chunk_list;
  1258. }
  1259. else
  1260. {
  1261. do
  1262. {
  1263. v53 = (void**)(v51 - v82);
  1264. v51 += v82;
  1265. *v49 = v53;
  1266. v49 = v53;
  1267. } while(v52 >= v51);
  1268. }
  1269. *v53 = 0LL;
  1270. v54 = v6 + (v89 << 6);
  1271. v55 = *(bg::detail::Span**)(v54 + 40);
  1272. v86->m_prev = v92;
  1273. v86->m_next = v55;
  1274. *(uintptr_t*)(*(uintptr_t*)(v54 + 40) + 32LL) = (uintptr_t)v86;
  1275. *(uintptr_t*)(v54 + 40) = (uintptr_t)v86;
  1276. ++* (uintptr_t*)(v54 + 56);
  1277. LABEL_48:
  1278. v38 = (void**)m_next->m_chunk_list;
  1279. ++m_next->m_used_count;
  1280. v39 = *v38;
  1281. desta = v38;
  1282. m_next->m_chunk_list = *v38;
  1283. if(!v39)
  1284. {
  1285. v40 = m_next->m_prev;
  1286. v40->m_next = m_next->m_next;
  1287. m_next->m_next->m_prev = v40;
  1288. m_next->m_prev = 0LL;
  1289. m_next->m_next = 0LL;
  1290. --* (uintptr_t*)(v6 + (v89 << 6) + 56);
  1291. }
  1292. goto LABEL_27;
  1293. }
  1294. return result;
  1295. }
  1296. void* ShmCalloc(size_t n, size_t bytes)
  1297. {
  1298. __int64 v6; // r12
  1299. size_t v7; // rbx
  1300. void** v8; // r12
  1301. uint8_t v10; // r14
  1302. __int64 v11; // r11
  1303. __int64 v12; // r15
  1304. __int64 v13; // r10
  1305. bg::detail::Span* v14; // r13
  1306. bg::detail::Span* m_next; // rax
  1307. void** m_chunk_list; // rdx
  1308. bool v17; // zf
  1309. __int64 Span; // rax
  1310. bg::detail::PageHeap* v19; // rdi
  1311. bg::detail::Span* v20; // rax
  1312. size_t m_start_page; // rcx
  1313. size_t m_page_count; // r8
  1314. void** p_m_chunk_list; // rsi
  1315. void** v24; // rcx
  1316. void** v25; // rdx
  1317. void** v26; // r8
  1318. void** v27; // rdi
  1319. __int64 v28; // rdx
  1320. bg::detail::Span* v29; // rsi
  1321. bg::detail::Span* m_prev; // rcx
  1322. bg::detail::Span* v31; // [rsp+8h] [rbp-48h]
  1323. __int64 v32; // [rsp+10h] [rbp-40h]
  1324. bg::detail::PageHeap* v33; // [rsp+18h] [rbp-38h]
  1325. v6 = (int64_t)g_shm_ctx.mgr;
  1326. if(!v6)
  1327. {
  1328. return nullptr;
  1329. }
  1330. v7 = bytes * n;
  1331. if(bytes && n != v7 / bytes)
  1332. return 0LL;
  1333. if(v7 <= 0x400)
  1334. {
  1335. v10 = *(char*)(v6 + ((v7 + 7) >> 3) + 5568);
  1336. }
  1337. else
  1338. {
  1339. if(v7 > 0x40000)
  1340. {
  1341. Span = (uintptr_t)((bg::detail::PageHeap*)(v6 + 9136))->AllocateSpan((v7 + 0x1FFF) >> 13);
  1342. if(Span)
  1343. {
  1344. v8 = (void**)(*(uintptr_t*)(Span + 16) << 13);
  1345. if(v8)
  1346. {
  1347. LABEL_16:
  1348. memset(v8, 0, v7);
  1349. return v8;
  1350. }
  1351. }
  1352. return 0LL;
  1353. }
  1354. v10 = *(char*)(v6 + ((v7 + 15487) >> 7) + 5568);
  1355. }
  1356. if(v10 > 0x56u)
  1357. {
  1358. v12 = *(uintptr_t*)(v6 + 8LL * v10 + 7744);
  1359. v13 = v10;
  1360. v14 = (bg::detail::Span*)(v6 + ((unsigned __int64)v10 << 6));
  1361. if(v14 != v14->m_next)
  1362. goto LABEL_12;
  1363. v33 = (bg::detail::PageHeap*)(v6 + 9136);
  1364. v13 = v10;
  1365. v11 = v10;
  1366. v19 = (bg::detail::PageHeap*)(v6 + 9136);
  1367. }
  1368. else
  1369. {
  1370. v11 = v10;
  1371. v12 = *(uintptr_t*)(v6 + 8LL * v10 + 7744);
  1372. v13 = v10;
  1373. v14 = (bg::detail::Span*)(v6 + ((unsigned __int64)v10 << 6));
  1374. if(v14 != v14->m_next)
  1375. {
  1376. LABEL_12:
  1377. m_next = v14->m_next;
  1378. goto LABEL_13;
  1379. }
  1380. v33 = (bg::detail::PageHeap*)(v6 + 9136);
  1381. v19 = (bg::detail::PageHeap*)(v6 + 9136);
  1382. }
  1383. v32 = v13;
  1384. v20 = (bg::detail::Span*)v19->AllocateSpan(*(uintptr_t*)(v6 + 8 * v11 + 8440));
  1385. if(v20)
  1386. {
  1387. v31 = v20;
  1388. v33->RegisterSpan(v20);
  1389. m_next = v31;
  1390. v13 = v32;
  1391. m_start_page = v31->m_start_page;
  1392. m_page_count = v31->m_page_count;
  1393. p_m_chunk_list = &v31->m_chunk_list;
  1394. v31->m_size_class = v10;
  1395. v24 = (void**)(m_start_page << 13);
  1396. v25 = (void**)((char*)v24 + v12);
  1397. v26 = &v24[1024 * m_page_count];
  1398. if((void**)((char*)v24 + v12) <= v26)
  1399. {
  1400. while(1)
  1401. {
  1402. v27 = v25;
  1403. v25 = (void**)((char*)v25 + v12);
  1404. *p_m_chunk_list = v24;
  1405. p_m_chunk_list = v24;
  1406. if(v26 < v25)
  1407. break;
  1408. v24 = v27;
  1409. }
  1410. }
  1411. else
  1412. {
  1413. v24 = &v31->m_chunk_list;
  1414. }
  1415. *v24 = 0LL;
  1416. v28 = v6 + (v32 << 6);
  1417. v29 = *(bg::detail::Span**)(v28 + 40);
  1418. v31->m_prev = v14;
  1419. v31->m_next = v29;
  1420. *(uintptr_t*)(*(uintptr_t*)(v28 + 40) + 32LL) = (uintptr_t)v31;
  1421. *(uintptr_t*)(v28 + 40) = (uintptr_t)v31;
  1422. ++* (uintptr_t*)(v28 + 56);
  1423. LABEL_13:
  1424. m_chunk_list = (void**)m_next->m_chunk_list;
  1425. ++m_next->m_used_count;
  1426. v17 = *m_chunk_list == 0LL;
  1427. m_next->m_chunk_list = *m_chunk_list;
  1428. if(v17)
  1429. {
  1430. m_prev = m_next->m_prev;
  1431. m_prev->m_next = m_next->m_next;
  1432. m_next->m_next->m_prev = m_prev;
  1433. m_next->m_prev = 0LL;
  1434. m_next->m_next = 0LL;
  1435. --* (uintptr_t*)(v6 + (v13 << 6) + 56);
  1436. }
  1437. v8 = m_chunk_list;
  1438. goto LABEL_16;
  1439. }
  1440. return 0LL;
  1441. }
  1442. }