Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 085fad8

Browse files
committedFeb 1, 2025
Auto merge of rust-lang#136401 - Mark-Simulacrum:lockfree-as-str, r=<try>
Lazy-page Symbol interner This is a simple and only semi-correct impl that works well only on 64-bit platforms where we can lazily allocate zeroed pages (i.e., not actually use up memory for zeroed pages), meaning that we can grab a 4GB chunk of memory for all interned strings and then just offset allocate into that. This already fixes unsoundness in the Symbol::as_str by leaking that 4gb memory chunk, but could also be faster (TBD). A rewrite supporting ~any platform should be pretty straightforward by sharding the allocation and manually lazily allocating it in chunks. r? `@ghost` -- this impl can't land due to being non-workable on Windows, opening for initial perf run
2 parents e08cd3c + e6f870b commit 085fad8

File tree

6 files changed

+120
-41
lines changed

6 files changed

+120
-41
lines changed
 

‎Cargo.lock

+4
Original file line numberDiff line numberDiff line change
@@ -1533,6 +1533,8 @@ version = "0.15.2"
15331533
source = "registry+https://github.com/rust-lang/crates.io-index"
15341534
checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289"
15351535
dependencies = [
1536+
"allocator-api2",
1537+
"equivalent",
15361538
"foldhash",
15371539
"serde",
15381540
]
@@ -3614,6 +3616,7 @@ dependencies = [
36143616
"either",
36153617
"elsa",
36163618
"ena",
3619+
"hashbrown 0.15.2",
36173620
"indexmap",
36183621
"jobserver",
36193622
"libc",
@@ -4510,6 +4513,7 @@ version = "0.0.0"
45104513
dependencies = [
45114514
"blake3",
45124515
"derive-where",
4516+
"hashbrown 0.15.2",
45134517
"indexmap",
45144518
"itoa",
45154519
"md-5",

‎compiler/rustc_data_structures/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ bitflags = "2.4.1"
1010
either = "1.0"
1111
elsa = "=1.7.1"
1212
ena = "0.14.3"
13+
hashbrown = "0.15.2"
1314
indexmap = "2.4.0"
1415
jobserver_crate = { version = "0.1.28", package = "jobserver" }
1516
measureme = "11"

‎compiler/rustc_data_structures/src/marker.rs

+2
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,7 @@ impl_dyn_send!(
6868
[std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
6969
[std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
7070
[std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
71+
[hashbrown::HashTable<T> where T: DynSend]
7172
[std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
7273
[Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
7374
[Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
@@ -142,6 +143,7 @@ impl_dyn_sync!(
142143
[std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
143144
[std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
144145
[std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
146+
[hashbrown::HashTable<T> where T: DynSync]
145147
[std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
146148
[Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
147149
[Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]

‎compiler/rustc_span/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ edition = "2021"
77
# tidy-alphabetical-start
88
blake3 = "1.5.2"
99
derive-where = "1.2.7"
10+
hashbrown = "0.15.2"
1011
indexmap = { version = "2.0.0" }
1112
itoa = "1.0"
1213
md5 = { package = "md-5", version = "0.10.0" }

‎compiler/rustc_span/src/lib.rs

+2
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,8 @@
3131
#![feature(round_char_boundary)]
3232
#![feature(rustc_attrs)]
3333
#![feature(rustdoc_internals)]
34+
#![feature(str_from_raw_parts)]
35+
#![feature(strict_provenance_atomic_ptr)]
3436
#![warn(unreachable_pub)]
3537
// tidy-alphabetical-end
3638

‎compiler/rustc_span/src/symbol.rs

+110-41
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@
22
//! allows bidirectional lookup; i.e., given a value, one can easily find the
33
//! type, and vice versa.
44
5-
use std::hash::{Hash, Hasher};
5+
use std::alloc::Layout;
6+
use std::hash::{BuildHasher, BuildHasherDefault, Hash, Hasher};
7+
use std::sync::atomic::{AtomicPtr, Ordering};
68
use std::{fmt, str};
79

8-
use rustc_arena::DroplessArena;
9-
use rustc_data_structures::fx::FxIndexSet;
10+
use rustc_data_structures::fx::FxHasher;
1011
use rustc_data_structures::stable_hasher::{
1112
HashStable, StableCompare, StableHasher, ToStableHashKey,
1213
};
@@ -2461,18 +2462,9 @@ impl Symbol {
24612462
with_session_globals(|session_globals| session_globals.symbol_interner.intern(string))
24622463
}
24632464

2464-
/// Access the underlying string. This is a slowish operation because it
2465-
/// requires locking the symbol interner.
2466-
///
2467-
/// Note that the lifetime of the return value is a lie. It's not the same
2468-
/// as `&self`, but actually tied to the lifetime of the underlying
2469-
/// interner. Interners are long-lived, and there are very few of them, and
2470-
/// this function is typically used for short-lived things, so in practice
2471-
/// it works out ok.
2465+
/// Access the underlying string.
24722466
pub fn as_str(&self) -> &str {
2473-
with_session_globals(|session_globals| unsafe {
2474-
std::mem::transmute::<&str, &str>(session_globals.symbol_interner.get(*self))
2475-
})
2467+
with_session_globals(|session_globals| session_globals.symbol_interner.get(*self))
24762468
}
24772469

24782470
pub fn as_u32(self) -> u32 {
@@ -2527,53 +2519,130 @@ impl StableCompare for Symbol {
25272519
}
25282520
}
25292521

2530-
pub(crate) struct Interner(Lock<InternerInner>);
2522+
// This is never de-initialized and stores interned &str in static storage.
2523+
// Each str is stored length-prefixed (u32), and we allow for random-access indexing with a u32
2524+
// index by direct lookup in the arena. Indices <2^16 are stored in a separate structure (they are
2525+
// pre-allocated at dense addresses so we can't use the same lockless O(1) hack for them).
2526+
static GLOBAL_ARENA: std::sync::LazyLock<StringArena> =
2527+
std::sync::LazyLock::new(|| StringArena::new());
2528+
2529+
struct StringArena {
2530+
base: *mut u8,
2531+
end: *mut u8,
2532+
head: AtomicPtr<u8>,
2533+
}
2534+
2535+
unsafe impl Sync for StringArena {}
2536+
unsafe impl Send for StringArena {}
2537+
2538+
impl StringArena {
2539+
fn new() -> Self {
2540+
unsafe {
2541+
let layout =
2542+
Layout::from_size_align(u32::MAX as usize, std::mem::align_of::<u32>()).unwrap();
2543+
let allocation = std::alloc::alloc_zeroed(layout);
2544+
if allocation.is_null() {
2545+
std::alloc::handle_alloc_error(layout)
2546+
}
2547+
StringArena {
2548+
base: allocation,
2549+
end: allocation.add(layout.size()),
2550+
// Reserve 2^16 u32 indices -- these will be used for pre-filled interning where we
2551+
// have a dense SymbolIndex space. We could make this exact but it doesn't really
2552+
// matter for this initial test anyway.
2553+
head: AtomicPtr::new(allocation.add(u16::MAX as usize)),
2554+
}
2555+
}
2556+
}
2557+
2558+
fn alloc(&self, s: &str) -> u32 {
2559+
unsafe {
2560+
// Allocate a chunk of the region, and fill it with the &str's length and bytes.
2561+
let dst = self.head.fetch_byte_add(size_of::<u32>() + s.len(), Ordering::Relaxed);
2562+
// Assert we're in-bounds.
2563+
assert!(
2564+
dst.addr().checked_add(4).unwrap().checked_add(s.len()).unwrap() < self.end.addr()
2565+
);
2566+
dst.cast::<u32>().write_unaligned(s.len().try_into().unwrap());
2567+
dst.add(4).copy_from_nonoverlapping(s.as_ptr(), s.len());
2568+
dst.byte_offset_from(self.base).try_into().unwrap()
2569+
}
2570+
}
2571+
2572+
fn get(&self, idx: u32) -> &str {
2573+
unsafe {
2574+
let src = self.base.add(idx as usize);
2575+
let len = src.cast::<u32>().read_unaligned();
2576+
// Assert we're in-bounds.
2577+
// FIXME: We need to check `len` is in-bounds too prior to reading it, and if this is to
2578+
// truly be safe it needs to check that the memory is utf-8 or otherwise check for
2579+
// validity of the passed index.
2580+
assert!(
2581+
src.addr().checked_add(4).unwrap().checked_add(len as usize).unwrap()
2582+
< self.end.addr()
2583+
);
2584+
std::str::from_raw_parts(src.add(4), len as usize)
2585+
}
2586+
}
2587+
}
2588+
2589+
pub(crate) struct Interner(&'static [&'static str], Lock<InternerInner>);
25312590

25322591
// The `&'static str`s in this type actually point into the arena.
25332592
//
25342593
// This type is private to prevent accidentally constructing more than one
25352594
// `Interner` on the same thread, which makes it easy to mix up `Symbol`s
25362595
// between `Interner`s.
25372596
struct InternerInner {
2538-
arena: DroplessArena,
2539-
strings: FxIndexSet<&'static str>,
2597+
strings: hashbrown::HashTable<Symbol>,
25402598
}
25412599

25422600
impl Interner {
2543-
fn prefill(init: &[&'static str]) -> Self {
2544-
Interner(Lock::new(InternerInner {
2545-
arena: Default::default(),
2546-
strings: init.iter().copied().collect(),
2547-
}))
2601+
fn prefill(init: &'static [&'static str]) -> Self {
2602+
assert!(init.len() < u16::MAX as usize);
2603+
let mut strings = hashbrown::HashTable::new();
2604+
2605+
for (idx, s) in init.iter().copied().enumerate() {
2606+
let mut hasher = FxHasher::default();
2607+
s.hash(&mut hasher);
2608+
let hash = hasher.finish();
2609+
strings.insert_unique(hash, Symbol::new(idx as u32), |val| {
2610+
// has to be from `init` because we haven't yet inserted anything except those.
2611+
BuildHasherDefault::<FxHasher>::default().hash_one(init[val.0.index()])
2612+
});
2613+
}
2614+
2615+
Interner(init, Lock::new(InternerInner { strings }))
25482616
}
25492617

25502618
#[inline]
25512619
fn intern(&self, string: &str) -> Symbol {
2552-
let mut inner = self.0.lock();
2553-
if let Some(idx) = inner.strings.get_index_of(string) {
2554-
return Symbol::new(idx as u32);
2620+
let hash = BuildHasherDefault::<FxHasher>::default().hash_one(string);
2621+
let mut inner = self.1.lock();
2622+
match inner.strings.find_entry(hash, |v| self.get(*v) == string) {
2623+
Ok(e) => return *e.get(),
2624+
Err(e) => {
2625+
let idx = GLOBAL_ARENA.alloc(string);
2626+
let res = Symbol::new(idx as u32);
2627+
2628+
e.into_table().insert_unique(hash, res, |val| {
2629+
BuildHasherDefault::<FxHasher>::default().hash_one(self.get(*val))
2630+
});
2631+
2632+
res
2633+
}
25552634
}
2556-
2557-
let string: &str = inner.arena.alloc_str(string);
2558-
2559-
// SAFETY: we can extend the arena allocation to `'static` because we
2560-
// only access these while the arena is still alive.
2561-
let string: &'static str = unsafe { &*(string as *const str) };
2562-
2563-
// This second hash table lookup can be avoided by using `RawEntryMut`,
2564-
// but this code path isn't hot enough for it to be worth it. See
2565-
// #91445 for details.
2566-
let (idx, is_new) = inner.strings.insert_full(string);
2567-
debug_assert!(is_new); // due to the get_index_of check above
2568-
2569-
Symbol::new(idx as u32)
25702635
}
25712636

25722637
/// Get the symbol as a string.
25732638
///
25742639
/// [`Symbol::as_str()`] should be used in preference to this function.
2575-
fn get(&self, symbol: Symbol) -> &str {
2576-
self.0.lock().strings.get_index(symbol.0.as_usize()).unwrap()
2640+
fn get(&self, symbol: Symbol) -> &'static str {
2641+
if symbol.0.index() < u16::MAX as usize {
2642+
self.0[symbol.0.index()]
2643+
} else {
2644+
GLOBAL_ARENA.get(symbol.0.as_u32())
2645+
}
25772646
}
25782647
}
25792648

0 commit comments

Comments
 (0)
Failed to load comments.