page_heap.cc 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328
  1. #include "page_heap.h"
  2. #include "shm_helper.h"
  3. #include "size_map.h"
  4. #include "shm_config.h"
  5. namespace bg
  6. {
  7. namespace detail
  8. {
  9. PageHeap::PageHeap()
  10. {
  11. }
  12. bg::detail::Span* PageHeap::GetSpanMap(size_t start_page)
  13. {
  14. SHM_ASSERT_RETURN_NULL(start_page <= ShmConfig::MAX_PAGE_COUNT);
  15. auto node1 = m_span_map.lv0[PTR_TO_LV0(start_page)];
  16. if(node1 == nullptr)
  17. {
  18. return nullptr;
  19. }
  20. auto node2 = node1->lv1[PTR_TO_LV1(start_page)];
  21. if(node2 == nullptr)
  22. {
  23. return nullptr;
  24. }
  25. auto node3 = node2->lv2[PTR_TO_LV2(start_page)];
  26. return node3;
  27. }
  28. bool PageHeap::SetSpanMap(size_t page, bg::detail::Span* span)
  29. {
  30. SHM_ASSERT_RETURN_FALSE(span->m_start_page <= ShmConfig::MAX_PAGE_COUNT);
  31. m_span_map.lv0[PTR_TO_LV0(page)]->lv1[PTR_TO_LV1(page)]->lv2[PTR_TO_LV2(page)] = span;
  32. return true;
  33. }
  34. void PageHeap::DeallocateSpan(bg::detail::Span* s)
  35. {
  36. bg::detail::Span* pre_span = GetSpanMap(s->m_start_page - 1);
  37. if(pre_span && !pre_span->m_in_use)
  38. {
  39. s->m_start_page -= pre_span->m_page_count;
  40. s->m_page_count += pre_span->m_page_count;
  41. pre_span->RemoveNextSpan();
  42. *(void**)pre_span = m_span_allocator.m_free_list;
  43. m_span_allocator.m_free_list = pre_span;
  44. SetSpanMap(s->m_start_page, s);
  45. }
  46. bg::detail::Span* next_span = GetSpanMap(s->m_start_page + s->m_page_count);
  47. if(next_span && next_span->m_in_use != true)
  48. {
  49. s->m_page_count += next_span->m_page_count;
  50. next_span->RemoveNextSpan();
  51. *(void**)next_span = m_span_allocator.m_free_list;
  52. m_span_allocator.m_free_list = next_span;
  53. if(s->m_page_count)
  54. {
  55. SetSpanMap(s->m_page_count + s->m_start_page - 1, s);
  56. }
  57. }
  58. bg::detail::Span* p_list;
  59. p_list = &m_large_list;
  60. if(s->m_page_count <= 0x7F)
  61. {
  62. p_list = &m_free_lists[s->m_page_count];
  63. }
  64. p_list->InstertNextSpan(s);
  65. }
  66. void PageHeap::RegisterSpan(bg::detail::Span* span)
  67. {
  68. size_t start = 1ll;
  69. size_t end = span->m_page_count - 1;
  70. while(end > start)
  71. {
  72. size_t index = span->m_start_page + start;
  73. SetSpanMap(index, span);
  74. ++start;
  75. }
  76. }
  77. bool PageHeap::GrowHeap(size_t page_count)
  78. {
  79. size_t real_bytes{};
  80. size_t page_start{};
  81. void* rawmemoey{};
  82. if(page_count > ShmConfig::MAX_PAGE_COUNT)
  83. {
  84. return false;
  85. }
  86. if(page_count > 0x7F)
  87. {
  88. real_bytes = PAGES_TO_BYTES(page_count);
  89. rawmemoey = bg::detail::ShmAllocateRawMemory(&real_bytes, PAGE_BYTES);
  90. if(!rawmemoey)
  91. {
  92. return false;
  93. }
  94. }
  95. else
  96. {
  97. real_bytes = 0x100000LL;
  98. rawmemoey = bg::detail::ShmAllocateRawMemory(&real_bytes, PAGE_BYTES);
  99. if(!rawmemoey)
  100. {
  101. real_bytes = PAGES_TO_BYTES(page_count);
  102. rawmemoey = bg::detail::ShmAllocateRawMemory(&real_bytes, PAGE_BYTES);
  103. if(!rawmemoey)
  104. {
  105. return false;
  106. }
  107. }
  108. }
  109. if((int64_t)rawmemoey >> 48)
  110. {
  111. SHM_ERROR("annot allocate enough space in span map, memory(%p, %#lx) leaked.", rawmemoey, real_bytes);
  112. return false;
  113. }
  114. page_start = BYTES_TO_PAGES((int64_t)rawmemoey);
  115. size_t real_pages = BYTES_TO_PAGES(real_bytes);
  116. size_t page_end = page_start + real_pages;
  117. for(size_t index = page_start; index < page_end; ++index)
  118. {
  119. size_t lv0_index = PTR_TO_LV0(index);
  120. size_t lv1_index = PTR_TO_LV1(index);
  121. size_t lv2_index = PTR_TO_LV2(index);
  122. if(m_span_map.lv0[lv0_index] && m_span_map.lv0[lv0_index]->lv1[lv1_index])
  123. {
  124. continue;
  125. }
  126. else
  127. {
  128. size_t node1_size = sizeof(RadixTree::NodeV1);
  129. RadixTree::NodeV1* p_node1 = (RadixTree::NodeV1*)bg::detail::ShmAllocateRawMemory(
  130. &node1_size, sizeof(RadixTree::NodeV1));
  131. if(!p_node1)
  132. {
  133. break;
  134. }
  135. m_span_map.lv0[lv0_index] = p_node1;
  136. new (p_node1) RadixTree::NodeV1();
  137. }
  138. size_t node2_size = sizeof(RadixTree::NodeV2);
  139. RadixTree::NodeV2* p_node2 = (RadixTree::NodeV2*)bg::detail::ShmAllocateRawMemory(&node2_size, sizeof(RadixTree::NodeV2));
  140. if(!p_node2)
  141. {
  142. break;
  143. }
  144. m_span_map.lv0[lv0_index]->lv1[lv1_index] = p_node2;
  145. new (p_node2) RadixTree::NodeV2();
  146. m_span_map.lv0[lv0_index]->lv1[lv1_index]->lv2[lv2_index];
  147. }
  148. bg::detail::Span* free_span = GetNewSpan();
  149. if(!free_span)
  150. {
  151. SHM_ERROR("cannot allocate new span, memory(%p, %#lx) leaked.", rawmemoey, real_bytes);
  152. return false;
  153. }
  154. new (free_span) Span(page_start, page_count);
  155. SetSpanMap(page_start, free_span);
  156. if(real_pages > 1)
  157. {
  158. SetSpanMap(page_end - 1, free_span);
  159. }
  160. free_span->m_in_use = 1;
  161. this->DeallocateSpan(free_span);
  162. return true;
  163. }
  164. bg::detail::Span* PageHeap::AllocateSpan(size_t page_count)
  165. {
  166. auto span = GetLastSpan(page_count);
  167. if(!span)
  168. {
  169. if(!GrowHeap(page_count) || !(span = GetLastSpan(page_count)))
  170. {
  171. SHM_ERROR("cannot allocate span, page count: %lu.", page_count);
  172. return nullptr;
  173. }
  174. }
  175. span->m_prev->RemoveNextSpan();
  176. do
  177. {
  178. int64_t diff = page_count - span->m_page_count;
  179. if(diff == 0)
  180. {
  181. break;
  182. }
  183. Span* free_list = GetNewSpan();
  184. if(!free_list)
  185. {
  186. break;
  187. }
  188. size_t start_page = span->m_start_page + page_count;
  189. new (free_list) Span(start_page, diff);
  190. SetSpanMap(start_page, free_list);
  191. Span* list;
  192. if(diff <= 1 || SetSpanMap(start_page + diff - 1, free_list), list = &m_large_list, diff <= 0x7f)
  193. {
  194. list = &m_free_lists[diff];
  195. }
  196. list->InstertNextSpan(free_list);
  197. span->m_page_count = page_count;
  198. if(page_count)
  199. {
  200. SetSpanMap(page_count + span->m_start_page - 1, free_list);
  201. }
  202. } while(false);
  203. span->m_in_use = 1;
  204. return span;
  205. }
  206. bg::detail::Span* PageHeap::GetNewSpan()
  207. {
  208. bg::detail::Span* free_span = (bg::detail::Span*)m_span_allocator.m_free_list;
  209. if(free_span)
  210. {
  211. m_span_allocator.m_free_list = (void*)&free_span->m_in_use;
  212. return free_span;
  213. }
  214. size_t free_left = this->m_span_allocator.m_free_left;
  215. if(free_left <= 0x37)
  216. {
  217. size_t spans_bytes = FREE_AREA_SIZE;
  218. bg::detail::Span* spans = (bg::detail::Span*)bg::detail::ShmAllocateRawMemory(&spans_bytes, PAGE_BYTES);
  219. if(spans)
  220. {
  221. free_span = spans;
  222. this->m_span_allocator.m_free_area = (char*)&spans[1];
  223. this->m_span_allocator.m_free_left = spans_bytes - sizeof(bg::detail::Span);
  224. return free_span;
  225. }
  226. }
  227. else
  228. {
  229. free_span = (bg::detail::Span*)this->m_span_allocator.m_free_area;
  230. this->m_span_allocator.m_free_left -= sizeof(bg::detail::Span);
  231. this->m_span_allocator.m_free_area += sizeof(bg::detail::Span);
  232. if(free_span)
  233. {
  234. return free_span;
  235. }
  236. }
  237. return nullptr;
  238. }
  239. bg::detail::Span* PageHeap::GetLastSpan(size_t page_count)
  240. {
  241. if(page_count <= 0x7f)
  242. {
  243. Span* next_span = &m_free_lists[page_count];
  244. Span* free_span = next_span->m_next;
  245. if(next_span == next_span->m_next)
  246. {
  247. for(&m_free_lists[page_count + 1]; next_span != &m_large_list; ++next_span)
  248. {
  249. if(next_span->m_next != next_span)
  250. {
  251. free_span = next_span->m_next;
  252. break;
  253. }
  254. }
  255. }
  256. if(free_span != free_span->m_next)
  257. {
  258. return free_span;
  259. }
  260. }
  261. if(m_large_list.m_next != &m_large_list)
  262. {
  263. Span* next_span = m_large_list.m_next;
  264. Span* free_span = nullptr;
  265. for(; &m_large_list == next_span; next_span = next_span->m_next)
  266. {
  267. if(page_count > next_span->m_page_count)
  268. {
  269. continue;
  270. }
  271. if(!free_span || next_span->m_page_count < free_span->m_page_count ||
  272. (next_span->m_page_count == free_span->m_page_count && next_span->m_start_page < free_span->m_start_page))
  273. {
  274. free_span = next_span;
  275. }
  276. }
  277. return free_span;
  278. }
  279. return nullptr;
  280. }
  281. }
  282. }