Skip to content

Commit bfb9ee6

Browse files
committedJul 9, 2024·
wip
1 parent a280e80 commit bfb9ee6

File tree

6 files changed

+389
-598
lines changed

6 files changed

+389
-598
lines changed
 

‎ix/src/block.rs

+132
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
use std::{
2+
num::NonZeroUsize,
3+
ops::{Deref, DerefMut, Range},
4+
sync::atomic::{AtomicU8, Ordering},
5+
};
6+
7+
use mallockit::{space::page_resource::MemRegion, util::mem::size_class::SizeClass};
8+
9+
use crate::{pool::Pool, ImmixAllocator};
10+
11+
use super::Address;
12+
13+
#[repr(C)]
14+
pub struct BlockMeta {
15+
pub owner: usize,
16+
// bump_cursor: u32,
17+
// used_bytes: u32,
18+
// pub prev: Option<Block>,
19+
// pub next: Option<Block>,
20+
// pub size_class: SizeClass,
21+
// pub group: u8,
22+
// head_cell: Address,
23+
// pub owner: &'static Pool,
24+
pub line_marks: [AtomicU8; 8],
25+
}
26+
27+
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
28+
pub struct Block(NonZeroUsize);
29+
30+
impl MemRegion for Block {
31+
type Meta = BlockMeta;
32+
33+
const LOG_BYTES: usize = 15;
34+
35+
fn start(&self) -> Address {
36+
Address::from(self.0.get())
37+
}
38+
39+
fn from_address(addr: Address) -> Self {
40+
debug_assert!(!addr.is_zero());
41+
debug_assert!(Self::is_aligned(addr));
42+
Self(unsafe { NonZeroUsize::new_unchecked(usize::from(addr)) })
43+
}
44+
}
45+
46+
impl Deref for Block {
47+
type Target = BlockMeta;
48+
49+
fn deref(&self) -> &Self::Target {
50+
self.meta()
51+
}
52+
}
53+
54+
impl DerefMut for Block {
55+
fn deref_mut(&mut self) -> &mut Self::Target {
56+
unsafe { self.meta_mut() }
57+
}
58+
}
59+
60+
impl Block {
61+
pub const LINES: usize = Self::DATA_BYTES / Line::BYTES;
62+
63+
pub fn init(mut self, owner: usize) {
64+
self.owner = owner;
65+
debug_assert_eq!(Self::META_BYTES, Address::BYTES * 8);
66+
// self.size_class = size_class;
67+
// let size = size_class.bytes();
68+
// self.head_cell = Address::ZERO;
69+
// self.bump_cursor = (Address::ZERO + Self::META_BYTES).align_up(size).as_usize() as u32;
70+
// self.used_bytes = 0;
71+
}
72+
73+
pub fn lines(self) -> Range<Line> {
74+
let start = Line::from_address(self.data_start());
75+
let end = Line::from_address(self.end());
76+
start..end
77+
}
78+
79+
pub fn get_next_available_lines(self, search_start: Line) -> Option<Range<Line>> {
80+
let start_cursor = search_start.get_index_within_block();
81+
let mut cursor = start_cursor;
82+
// Find start
83+
while cursor < self.line_marks.len() {
84+
let mark = self.line_marks[cursor].load(Ordering::SeqCst);
85+
if mark == 0 {
86+
break;
87+
}
88+
cursor += 1;
89+
}
90+
if cursor == self.line_marks.len() {
91+
return None;
92+
}
93+
let start = Line::from_address(self.data_start() + cursor * Line::BYTES);
94+
// Find limit
95+
while cursor < self.line_marks.len() {
96+
let mark = self.line_marks[cursor].load(Ordering::SeqCst);
97+
if mark != 0 {
98+
break;
99+
}
100+
cursor += 1;
101+
}
102+
let end = Line::from_address(self.data_start() + cursor * Line::BYTES);
103+
Some(start..end)
104+
}
105+
}
106+
107+
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
108+
pub struct Line(NonZeroUsize);
109+
110+
impl Line {
111+
pub fn block(self) -> Block {
112+
Block::containing(self.start())
113+
}
114+
115+
pub fn get_index_within_block(self) -> usize {
116+
(self.start() - self.block().data_start()) / Self::BYTES
117+
}
118+
}
119+
120+
impl MemRegion for Line {
121+
const LOG_BYTES: usize = 8;
122+
123+
fn start(&self) -> Address {
124+
Address::from(self.0.get())
125+
}
126+
127+
fn from_address(addr: Address) -> Self {
128+
debug_assert!(!addr.is_zero());
129+
debug_assert!(Self::is_aligned(addr));
130+
Self(unsafe { NonZeroUsize::new_unchecked(usize::from(addr)) })
131+
}
132+
}

‎ix/src/hoard_space.rs

-130
This file was deleted.

‎ix/src/immix_space.rs

+233
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,233 @@
1+
use super::{page_resource::BlockPageResource, Allocator, Space, SpaceId};
2+
use crate::{
3+
block::{self, Block, Line},
4+
pool::Pool,
5+
};
6+
use mallockit::{
7+
space::{
8+
meta::{Box, Meta},
9+
page_resource::MemRegion,
10+
},
11+
util::{mem::alloc::discrete_tlab::DiscreteTLAB, *},
12+
};
13+
14+
/// Global heap
15+
pub struct ImmixSpace {
16+
id: SpaceId,
17+
pr: BlockPageResource<Block>,
18+
pub(crate) pool: Pool,
19+
}
20+
21+
impl Space for ImmixSpace {
22+
const MAX_ALLOCATION_SIZE: usize = Block::BYTES / 2;
23+
24+
type PR = BlockPageResource<Block>;
25+
26+
fn new(id: SpaceId) -> Self {
27+
Self {
28+
id,
29+
pr: BlockPageResource::new(id),
30+
pool: Pool::new(true),
31+
}
32+
}
33+
34+
fn id(&self) -> SpaceId {
35+
self.id
36+
}
37+
38+
fn page_resource(&self) -> &Self::PR {
39+
&self.pr
40+
}
41+
42+
fn get_layout(ptr: Address) -> Layout {
43+
let size = ptr
44+
let block = Block::containing(ptr);
45+
block.size_class.layout()
46+
}
47+
}
48+
49+
impl ImmixSpace {
50+
pub fn can_allocate(layout: Layout) -> bool {
51+
let layout = unsafe { layout.pad_to_align_unchecked() };
52+
let size = layout.size().next_power_of_two();
53+
size <= Self::MAX_ALLOCATION_SIZE
54+
}
55+
56+
pub fn get_clean_block(&self, owner: &ImmixAllocator) -> Option<Block> {
57+
let block = self.pr.acquire_block()?;
58+
block.init(owner as *const ImmixAllocator as usize);
59+
Some(block)
60+
}
61+
62+
pub fn release_block(&self, block: Block) {
63+
self.pr.release_block(block)
64+
}
65+
}
66+
67+
pub struct ImmixAllocator {
68+
cursor: Address,
69+
limit: Address,
70+
space: &'static ImmixSpace,
71+
large_cursor: Address,
72+
large_limit: Address,
73+
request_for_large: bool,
74+
line: Option<Line>,
75+
}
76+
77+
impl ImmixAllocator {
78+
const LOCAL_HEAP_THRESHOLD: usize = 16 * 1024 * 1024;
79+
const LARGEST_SMALL_OBJECT: usize = 1024;
80+
81+
pub fn new(space: &'static ImmixSpace, _space_id: SpaceId) -> Self {
82+
Self {
83+
cursor: Address::ZERO,
84+
limit: Address::ZERO,
85+
space,
86+
large_cursor: Address::ZERO,
87+
large_limit: Address::ZERO,
88+
request_for_large: false,
89+
line: None,
90+
}
91+
}
92+
93+
fn acquire_recyclable_block(&mut self) -> bool {
94+
match self.space.get_reusable_block() {
95+
Some(block) => {
96+
self.line = Some(block.start_line());
97+
true
98+
}
99+
_ => false,
100+
}
101+
}
102+
103+
fn acquire_recyclable_block(&mut self) -> bool {
104+
match self.space.get_reusable_block() {
105+
Some(block) => {
106+
self.line = Some(block.start_line());
107+
true
108+
}
109+
_ => false,
110+
}
111+
}
112+
113+
fn acquire_clean_block(&mut self) -> bool {
114+
match self.space.get_clean_block() {
115+
Some(block) => {
116+
if self.request_for_large {
117+
self.large_cursor = block.start();
118+
self.large_limit = block.end();
119+
} else {
120+
self.cursor = block.start();
121+
self.limit = block.end();
122+
}
123+
true
124+
}
125+
None => false,
126+
}
127+
}
128+
129+
fn acquire_recyclable_lines(&mut self) -> bool {
130+
while self.line.is_some() || self.acquire_recyclable_block() {
131+
let line = self.line.unwrap();
132+
let block = line.block();
133+
if let Some(lines) = block.get_next_available_lines(line) {
134+
// Find recyclable lines. Update the bump allocation cursor and limit.
135+
self.cursor = lines.start.start();
136+
self.limit = lines.end.start();
137+
let block = line.block();
138+
self.line = if lines.end == block.lines().end {
139+
None
140+
} else {
141+
Some(lines.end)
142+
};
143+
return true;
144+
} else {
145+
self.line = None;
146+
}
147+
}
148+
false
149+
}
150+
151+
fn alloc_slow(&mut self, layout: Layout, large: bool) -> Option<Address> {
152+
let old_request_for_large = self.request_for_large;
153+
self.request_for_large = large;
154+
let success = self.acquire_clean_block();
155+
self.request_for_large = old_request_for_large;
156+
if success {
157+
if large {
158+
let result = self.large_cursor;
159+
let new_cursor = self.large_cursor + layout.size();
160+
if new_cursor > self.large_limit {
161+
None
162+
} else {
163+
self.large_cursor = new_cursor;
164+
Some(result)
165+
}
166+
} else {
167+
let result = self.cursor;
168+
let new_cursor = self.cursor + layout.size();
169+
if new_cursor > self.limit {
170+
None
171+
} else {
172+
self.cursor = new_cursor;
173+
Some(result)
174+
}
175+
}
176+
} else {
177+
None
178+
}
179+
}
180+
181+
fn alloc_slow_hot(&mut self, layout: Layout) -> Option<Address> {
182+
if self.acquire_recyclable_lines() {
183+
let result = self.cursor;
184+
let new_cursor = self.cursor + layout.size();
185+
if new_cursor > self.limit {
186+
None
187+
} else {
188+
Some(result)
189+
}
190+
} else {
191+
self.alloc_slow(layout, false)
192+
}
193+
}
194+
fn overflow_alloc(&mut self, layout: Layout) -> Option<Address> {
195+
let start = self.large_cursor;
196+
let end = start + layout.size();
197+
if end > self.large_limit {
198+
self.alloc_slow(layout, true)
199+
} else {
200+
self.large_cursor = end;
201+
Some(start)
202+
}
203+
}
204+
}
205+
206+
impl Allocator for ImmixAllocator {
207+
#[inline(always)]
208+
fn alloc(&mut self, layout: Layout) -> Option<Address> {
209+
let result = self.cursor;
210+
let new_cursor = self.cursor + layout.size();
211+
if new_cursor > self.limit {
212+
if layout.size() > Line::BYTES {
213+
// Size larger than a line: do large allocation
214+
self.overflow_alloc(layout)
215+
} else {
216+
// Size smaller than a line: fit into holes
217+
self.alloc_slow_hot(layout)
218+
}
219+
} else {
220+
Some(result)
221+
}
222+
}
223+
224+
#[inline(always)]
225+
fn dealloc(&mut self, cell: Address) {}
226+
}
227+
228+
impl Drop for ImmixAllocator {
229+
fn drop(&mut self) {
230+
// self.tlab
231+
// .clear(|cell| self.local.free_cell(cell, self.space));
232+
}
233+
}

‎ix/src/lib.rs

+20-20
Original file line numberDiff line numberDiff line change
@@ -7,67 +7,67 @@
77

88
extern crate mallockit;
99

10-
mod hoard_space;
10+
mod block;
11+
mod immix_space;
1112
mod pool;
12-
mod super_block;
1313

14-
use hoard_space::*;
14+
use immix_space::*;
1515
use mallockit::{
1616
space::{large_object_space::*, *},
1717
util::*,
1818
Mutator, Plan,
1919
};
2020

21-
const HOARD_SPACE: SpaceId = SpaceId::DEFAULT;
21+
const IMMIX_SPACE: SpaceId = SpaceId::DEFAULT;
2222
const LARGE_OBJECT_SPACE: SpaceId = SpaceId::LARGE_OBJECT_SPACE;
2323

2424
#[mallockit::plan]
25-
struct Hoard {
26-
hoard_space: HoardSpace,
25+
struct Immix {
26+
immix_space: ImmixSpace,
2727
large_object_space: LargeObjectSpace,
2828
}
2929

30-
impl Plan for Hoard {
31-
type Mutator = HoardMutator;
30+
impl Plan for Immix {
31+
type Mutator = ImmixMutator;
3232

3333
fn new() -> Self {
3434
Self {
35-
hoard_space: HoardSpace::new(HOARD_SPACE),
35+
immix_space: ImmixSpace::new(IMMIX_SPACE),
3636
large_object_space: LargeObjectSpace::new(LARGE_OBJECT_SPACE),
3737
}
3838
}
3939

4040
fn get_layout(ptr: Address) -> Layout {
41-
debug_assert!(HOARD_SPACE.contains(ptr) || LARGE_OBJECT_SPACE.contains(ptr));
42-
if HOARD_SPACE.contains(ptr) {
43-
HoardSpace::get_layout(ptr)
41+
debug_assert!(IMMIX_SPACE.contains(ptr) || LARGE_OBJECT_SPACE.contains(ptr));
42+
if IMMIX_SPACE.contains(ptr) {
43+
ImmixSpace::get_layout(ptr)
4444
} else {
4545
Self::get().large_object_space.get_layout::<Size4K>(ptr)
4646
}
4747
}
4848
}
4949

5050
#[mallockit::mutator]
51-
struct HoardMutator {
52-
hoard: HoardAllocator,
51+
struct ImmixMutator {
52+
hoard: ImmixAllocator,
5353
los: LargeObjectAllocator<Size4K, { 1 << 31 }, { 16 << 20 }>,
5454
_padding: [usize; 8],
5555
}
5656

57-
impl Mutator for HoardMutator {
58-
type Plan = Hoard;
57+
impl Mutator for ImmixMutator {
58+
type Plan = Immix;
5959

6060
fn new() -> Self {
6161
Self {
62-
hoard: HoardAllocator::new(&Self::plan().hoard_space, HOARD_SPACE),
62+
hoard: ImmixAllocator::new(&Self::plan().immix_space, IMMIX_SPACE),
6363
los: LargeObjectAllocator::new(&Self::plan().large_object_space),
6464
_padding: [0; 8],
6565
}
6666
}
6767

6868
#[inline(always)]
6969
fn alloc(&mut self, layout: Layout) -> Option<Address> {
70-
if HoardSpace::can_allocate(layout) {
70+
if ImmixSpace::can_allocate(layout) {
7171
mallockit::stat::track_allocation(layout, false);
7272
self.hoard.alloc(layout)
7373
} else {
@@ -78,8 +78,8 @@ impl Mutator for HoardMutator {
7878

7979
#[inline(always)]
8080
fn dealloc(&mut self, ptr: Address) {
81-
debug_assert!(HOARD_SPACE.contains(ptr) || LARGE_OBJECT_SPACE.contains(ptr));
82-
if HOARD_SPACE.contains(ptr) {
81+
debug_assert!(IMMIX_SPACE.contains(ptr) || LARGE_OBJECT_SPACE.contains(ptr));
82+
if IMMIX_SPACE.contains(ptr) {
8383
mallockit::stat::track_deallocation(false);
8484
self.hoard.dealloc(ptr)
8585
} else {

‎ix/src/pool.rs

+4-342
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use crate::{hoard_space::HoardSpace, super_block::SuperBlock};
1+
use crate::{block::Block, immix_space::ImmixSpace};
22
use mallockit::{
33
space::page_resource::MemRegion,
44
util::{mem::size_class::SizeClass, Address},
@@ -8,361 +8,23 @@ use spin::{relax::Yield, MutexGuard};
88

99
type Mutex<T> = spin::mutex::Mutex<T, Yield>;
1010

11-
struct EmptyClass {
12-
// 0 => emoty blocks
13-
// classes+1 => full blocks
14-
groups: [Option<SuperBlock>; Self::GROUPS],
15-
}
16-
17-
impl EmptyClass {
18-
const EMPTINESS_CLASSES: usize = 8;
19-
const GROUPS: usize = Self::EMPTINESS_CLASSES + 2;
20-
21-
const fn new() -> Self {
22-
Self {
23-
groups: [None; Self::GROUPS],
24-
}
25-
}
26-
27-
const fn group(block: SuperBlock) -> usize {
28-
let t =
29-
SuperBlock::DATA_BYTES >> block.size_class.log_bytes() << block.size_class.log_bytes();
30-
let u = block.used_bytes();
31-
if u == 0 {
32-
0
33-
} else {
34-
1 + (Self::EMPTINESS_CLASSES * u / t)
35-
}
36-
}
37-
38-
#[cold]
39-
fn transfer(&mut self, mut block: SuperBlock, oldg: usize, newg: usize) {
40-
if Some(block) == self.groups[newg] || newg == oldg {
41-
return;
42-
}
43-
if self.groups[oldg] == Some(block) {
44-
self.groups[oldg] = block.next;
45-
}
46-
if let Some(mut prev) = block.prev {
47-
prev.next = block.next;
48-
}
49-
if let Some(mut next) = block.next {
50-
next.prev = block.prev;
51-
}
52-
block.group = newg as _;
53-
block.next = self.groups[newg];
54-
block.prev = None;
55-
if let Some(mut head) = self.groups[newg] {
56-
head.prev = Some(block)
57-
}
58-
self.groups[newg] = Some(block);
59-
}
60-
61-
fn put(&mut self, mut block: SuperBlock) {
62-
let group = Self::group(block);
63-
block.group = group as _;
64-
block.next = self.groups[group];
65-
block.prev = None;
66-
if let Some(mut head) = self.groups[group] {
67-
head.prev = Some(block)
68-
}
69-
self.groups[group] = Some(block);
70-
debug_assert_ne!(block.prev, Some(block));
71-
}
72-
73-
fn remove(&mut self, block: SuperBlock) {
74-
if self.groups[block.group as usize] == Some(block) {
75-
self.groups[block.group as usize] = block.next;
76-
}
77-
if let Some(mut prev) = block.prev {
78-
prev.next = block.next;
79-
}
80-
if let Some(mut next) = block.next {
81-
next.prev = block.prev;
82-
}
83-
}
84-
85-
fn pop(&mut self, group: usize) -> Option<SuperBlock> {
86-
if let Some(block) = self.groups[group] {
87-
self.groups[group] = block.next;
88-
if let Some(mut next) = block.next {
89-
next.prev = None;
90-
}
91-
return Some(block);
92-
}
93-
None
94-
}
95-
96-
fn pop_most_empty_block(&mut self) -> Option<SuperBlock> {
97-
for i in 0..Self::EMPTINESS_CLASSES + 1 {
98-
while let Some(block) = self.groups[i] {
99-
// remove
100-
self.groups[i] = block.next;
101-
if let Some(mut next) = block.next {
102-
next.prev = None;
103-
}
104-
let bg = Self::group(block);
105-
if bg > i {
106-
self.put(block)
107-
} else {
108-
return Some(block);
109-
}
110-
}
111-
}
112-
None
113-
}
114-
115-
#[cold]
116-
fn free_cell(&mut self, a: Address, mut b: SuperBlock) {
117-
let oldg = Self::group(b);
118-
b.free_cell(a);
119-
let newg = Self::group(b);
120-
if oldg != newg {
121-
self.transfer(b, oldg, newg)
122-
}
123-
}
124-
}
125-
126-
pub struct BlockList {
127-
cache: Option<SuperBlock>,
128-
groups: EmptyClass,
129-
used_bytes: usize,
130-
total_bytes: usize,
131-
}
132-
133-
impl BlockList {
134-
const fn new() -> Self {
135-
Self {
136-
cache: None,
137-
groups: EmptyClass::new(),
138-
used_bytes: 0,
139-
total_bytes: 0,
140-
}
141-
}
142-
143-
const fn should_flush(&self, log_obj_size: usize) -> bool {
144-
let u = self.used_bytes;
145-
let a = self.total_bytes;
146-
(EmptyClass::EMPTINESS_CLASSES * u) < ((EmptyClass::EMPTINESS_CLASSES - 1) * a)
147-
&& u + ((2 * SuperBlock::BYTES) >> log_obj_size) < a
148-
}
149-
150-
fn remove(&mut self, block: SuperBlock) {
151-
self.dec_used_bytes(block.used_bytes());
152-
self.dec_total_bytes(SuperBlock::DATA_BYTES);
153-
if self.cache == Some(block) {
154-
self.cache = None;
155-
return;
156-
}
157-
self.groups.remove(block);
158-
}
159-
160-
fn pop_most_empty_block(&mut self) -> Option<SuperBlock> {
161-
if let Some(cache) = self.cache.take() {
162-
self.dec_total_bytes(SuperBlock::DATA_BYTES);
163-
self.dec_used_bytes(cache.used_bytes());
164-
return Some(cache);
165-
}
166-
let b = self.groups.pop_most_empty_block()?;
167-
self.dec_total_bytes(SuperBlock::DATA_BYTES);
168-
self.dec_used_bytes(b.used_bytes());
169-
Some(b)
170-
}
171-
172-
const fn inc_used_bytes(&mut self, used_bytes: usize) {
173-
self.used_bytes += used_bytes;
174-
}
175-
176-
const fn dec_used_bytes(&mut self, used_bytes: usize) {
177-
self.used_bytes -= used_bytes;
178-
}
179-
180-
const fn inc_total_bytes(&mut self, total_bytes: usize) {
181-
self.total_bytes += total_bytes;
182-
}
183-
184-
const fn dec_total_bytes(&mut self, total_bytes: usize) {
185-
self.total_bytes -= total_bytes;
186-
}
187-
188-
fn put(&mut self, b: SuperBlock) {
189-
if Some(b) == self.cache {
190-
return;
191-
}
192-
if let Some(c) = self.cache {
193-
self.groups.put(c);
194-
}
195-
self.cache = Some(b);
196-
self.inc_total_bytes(SuperBlock::DATA_BYTES);
197-
self.inc_used_bytes(b.used_bytes());
198-
}
199-
200-
#[cold]
201-
fn alloc_cell_slow(&mut self, size_class: SizeClass) -> Option<Address> {
202-
loop {
203-
if self.cache.is_none() {
204-
self.cache = Some(self.groups.pop_most_empty_block()?);
205-
}
206-
let mut b = self.cache.unwrap();
207-
if let Some(a) = b.alloc_cell() {
208-
self.inc_used_bytes(size_class.bytes());
209-
return Some(a);
210-
} else {
211-
self.cache = None;
212-
self.groups.put(b);
213-
}
214-
}
215-
}
216-
217-
#[inline]
218-
fn alloc_cell(&mut self, size_class: SizeClass) -> Option<Address> {
219-
if let Some(mut b) = self.cache {
220-
if let Some(a) = b.alloc_cell() {
221-
self.inc_used_bytes(size_class.bytes());
222-
return Some(a);
223-
}
224-
}
225-
self.alloc_cell_slow(size_class)
226-
}
227-
228-
#[inline]
229-
fn free_cell(&mut self, a: Address, mut b: SuperBlock, size_class: SizeClass) {
230-
if Some(b) == self.cache {
231-
b.free_cell(a)
232-
} else {
233-
self.groups.free_cell(a, b);
234-
}
235-
self.dec_used_bytes(size_class.bytes());
236-
}
237-
}
238-
23911
pub struct Pool {
24012
pub global: bool,
241-
// This is a major difference to the original hoard: we lock bins instead of the entire local heap.
242-
blocks: [Mutex<BlockList>; Self::MAX_BINS],
13+
head: Option<Block>,
24314
}
24415

24516
impl Drop for Pool {
246-
fn drop(&mut self) {
247-
let space = &crate::Hoard::get().hoard_space;
248-
for (i, block) in self.blocks.iter().enumerate() {
249-
let sz: SizeClass = SizeClass(i as _);
250-
let mut block = block.lock();
251-
if let Some(b) = block.cache.take() {
252-
space.flush_block(sz, b);
253-
}
254-
for i in 0..EmptyClass::GROUPS {
255-
while let Some(b) = block.groups.pop(i) {
256-
space.flush_block(sz, b);
257-
}
258-
}
259-
}
260-
}
17+
fn drop(&mut self) {}
26118
}
26219

26320
impl Pool {
26421
const MAX_BINS: usize = 32;
26522

26623
pub const fn new(global: bool) -> Self {
267-
Self {
268-
global,
269-
blocks: [const { Mutex::new(BlockList::new()) }; 32],
270-
}
24+
Self { global, head: None }
27125
}
27226

27327
pub const fn static_ref(&self) -> &'static Self {
27428
unsafe { &*(self as *const Self) }
27529
}
276-
277-
pub fn put(&self, size_class: SizeClass, mut block: SuperBlock) {
278-
// debug_assert!(!block.is_full());
279-
let mut blocks = self.lock_blocks(size_class);
280-
block.owner = self.static_ref();
281-
blocks.put(block);
282-
}
283-
284-
pub fn pop_most_empty_block(
285-
&self,
286-
size_class: SizeClass,
287-
) -> Option<(SuperBlock, MutexGuard<BlockList>)> {
288-
debug_assert!(self.global);
289-
let mut blocks = self.lock_blocks(size_class);
290-
if let Some(block) = blocks.pop_most_empty_block() {
291-
debug_assert!(block.is_owned_by(self));
292-
return Some((block, blocks));
293-
}
294-
None
295-
}
296-
297-
fn lock_blocks(&self, size_class: SizeClass) -> MutexGuard<BlockList> {
298-
unsafe { self.blocks.get_unchecked(size_class.as_usize()).lock() }
299-
}
300-
301-
pub fn alloc_cell(
302-
&mut self,
303-
size_class: SizeClass,
304-
space: &'static HoardSpace,
305-
) -> Option<Address> {
306-
debug_assert!(!self.global);
307-
let mut blocks = self.lock_blocks(size_class);
308-
if let Some(a) = blocks.alloc_cell(size_class) {
309-
return Some(a);
310-
}
311-
// slow-path
312-
loop {
313-
if let Some(a) = blocks.alloc_cell(size_class) {
314-
return Some(a);
315-
}
316-
let block = space.acquire_block(size_class, self)?;
317-
blocks.put(block);
318-
}
319-
}
320-
321-
pub fn free_cell(&self, cell: Address, space: &'static HoardSpace) {
322-
let block = SuperBlock::containing(cell);
323-
let mut owner = block.owner;
324-
let mut blocks = owner.lock_blocks(block.size_class);
325-
while !block.is_owned_by(owner) {
326-
std::mem::drop(blocks);
327-
std::thread::yield_now();
328-
owner = block.owner;
329-
blocks = owner.lock_blocks(block.size_class);
330-
}
331-
owner.free_cell_slow_impl(cell, space, &mut blocks, block)
332-
}
333-
334-
fn free_cell_slow_impl(
335-
&self,
336-
cell: Address,
337-
space: &'static HoardSpace,
338-
blocks: &mut BlockList,
339-
block: SuperBlock,
340-
) {
341-
blocks.free_cell(cell, block, block.size_class);
342-
if block.is_empty() {
343-
blocks.remove(block);
344-
space.release_block(block);
345-
}
346-
// Flush?
347-
if !self.global && blocks.should_flush(block.size_class.log_bytes()) {
348-
self.flush_block_slow(block.size_class, space, blocks);
349-
}
350-
}
351-
352-
#[cold]
353-
fn flush_block_slow(
354-
&self,
355-
size_class: SizeClass,
356-
space: &'static HoardSpace,
357-
blocks: &mut BlockList,
358-
) {
359-
// Transit a mostly-empty block to the global pool
360-
debug_assert!(!self.global);
361-
if let Some(mostly_empty_block) = blocks.pop_most_empty_block() {
362-
// debug_assert!(!mostly_empty_block.is_full());
363-
debug_assert!(mostly_empty_block.is_owned_by(self));
364-
space.flush_block(size_class, mostly_empty_block);
365-
debug_assert!(!mostly_empty_block.is_owned_by(self));
366-
}
367-
}
36830
}

‎ix/src/super_block.rs

-106
This file was deleted.

0 commit comments

Comments
 (0)
Please sign in to comment.