Skip to content

Commit c860a51

Browse files
committed
Remove lots of as casts with modern alternatives:
* `a as *mut T` => `a.cast()` * `a.offset(b as isize)` => `a.add(b)` * `a.offset(-(b as isize))` => `a.sub(b)` * `u32 as usize` => `usize::try_from(_).unwrap()` In general `as` has the potential of losing bits so try to avoid it where we can.
1 parent 9a0e6a1 commit c860a51

File tree

1 file changed

+59
-56
lines changed

1 file changed

+59
-56
lines changed

src/dlmalloc.rs

+59-56
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,9 @@ const NTREEBINS: usize = 32;
3434
const SMALLBIN_SHIFT: usize = 3;
3535
const TREEBIN_SHIFT: usize = 8;
3636

37+
const NSMALLBINS_U32: u32 = NSMALLBINS as u32;
38+
const NTREEBINS_U32: u32 = NTREEBINS as u32;
39+
3740
// TODO: runtime configurable? documentation?
3841
const DEFAULT_GRANULARITY: usize = 64 * 1024;
3942
const DEFAULT_TRIM_THRESHOLD: usize = 2 * 1024 * 1024;
@@ -78,7 +81,7 @@ fn least_bit(x: u32) -> u32 {
7881
}
7982

8083
fn leftshift_for_tree_index(x: u32) -> u32 {
81-
let x = x as usize;
84+
let x = usize::try_from(x).unwrap();
8285
if x == NTREEBINS - 1 {
8386
0
8487
} else {
@@ -181,7 +184,7 @@ impl<A: Allocator> Dlmalloc<A> {
181184
}
182185

183186
fn small_index2size(&self, idx: u32) -> usize {
184-
(idx as usize) << SMALLBIN_SHIFT
187+
usize::try_from(idx).unwrap() << SMALLBIN_SHIFT
185188
}
186189

187190
fn is_small(&self, s: usize) -> bool {
@@ -193,11 +196,11 @@ impl<A: Allocator> Dlmalloc<A> {
193196
}
194197

195198
fn align_offset(&self, addr: *mut u8) -> usize {
196-
self.align_offset_usize(addr as usize)
199+
addr.align_offset(self.malloc_alignment())
197200
}
198201

199202
fn align_offset_usize(&self, addr: usize) -> usize {
200-
align_up(addr, self.malloc_alignment()) - (addr as usize)
203+
align_up(addr, self.malloc_alignment()) - addr
201204
}
202205

203206
fn top_foot_size(&self) -> usize {
@@ -212,8 +215,8 @@ impl<A: Allocator> Dlmalloc<A> {
212215

213216
fn align_as_chunk(&self, ptr: *mut u8) -> *mut Chunk {
214217
unsafe {
215-
let chunk = Chunk::to_mem(ptr as *mut Chunk);
216-
ptr.offset(self.align_offset(chunk) as isize) as *mut Chunk
218+
let chunk = Chunk::to_mem(ptr.cast());
219+
ptr.add(self.align_offset(chunk)).cast()
217220
}
218221
}
219222

@@ -380,28 +383,28 @@ impl<A: Allocator> Dlmalloc<A> {
380383
self.release_checks = MAX_RELEASE_CHECK_RATE;
381384
self.init_bins();
382385
let tsize = tsize - self.top_foot_size();
383-
self.init_top(tbase as *mut Chunk, tsize);
386+
self.init_top(tbase.cast(), tsize);
384387
// let mn = Chunk::next(Chunk::from_mem(self as *mut _ as *mut u8));
385388
// let top_foot_size = self.top_foot_size();
386389
// self.init_top(mn, tbase as usize + tsize - mn as usize - top_foot_size);
387390
} else {
388-
let mut sp = &mut self.seg as *mut Segment;
391+
let mut sp: *mut Segment = &mut self.seg;
389392
while !sp.is_null() && tbase != Segment::top(sp) {
390393
sp = (*sp).next;
391394
}
392395
if !sp.is_null()
393396
&& !Segment::is_extern(sp)
394397
&& Segment::sys_flags(sp) == flags
395-
&& Segment::holds(sp, self.top as *mut u8)
398+
&& Segment::holds(sp, self.top.cast())
396399
{
397400
(*sp).size += tsize;
398401
let ptr = self.top;
399402
let size = self.topsize + tsize;
400403
self.init_top(ptr, size);
401404
} else {
402405
self.least_addr = cmp::min(tbase, self.least_addr);
403-
let mut sp = &mut self.seg as *mut Segment;
404-
while !sp.is_null() && (*sp).base != tbase.offset(tsize as isize) {
406+
let mut sp: *mut Segment = &mut self.seg;
407+
while !sp.is_null() && (*sp).base != tbase.add(tsize) {
405408
sp = (*sp).next;
406409
}
407410
if !sp.is_null() && !Segment::is_extern(sp) && Segment::sys_flags(sp) == flags {
@@ -544,15 +547,15 @@ impl<A: Allocator> Dlmalloc<A> {
544547
let newmmsize =
545548
self.mmap_align(nb + 6 * mem::size_of::<usize>() + self.malloc_alignment() - 1);
546549
let ptr = self.system_allocator.remap(
547-
(oldp as *mut u8).offset(-(offset as isize)),
550+
oldp.cast::<u8>().sub(offset),
548551
oldmmsize,
549552
newmmsize,
550553
can_move,
551554
);
552555
if ptr.is_null() {
553556
return ptr::null_mut();
554557
}
555-
let newp = ptr.offset(offset as isize) as *mut Chunk;
558+
let newp = ptr.add(offset).cast::<Chunk>();
556559
let psize = newmmsize - offset - self.mmap_foot_pad();
557560
(*newp).head = psize;
558561
(*Chunk::plus_offset(newp, psize)).head = Chunk::fencepost_head();
@@ -593,11 +596,11 @@ impl<A: Allocator> Dlmalloc<A> {
593596
let br =
594597
Chunk::from_mem(((mem as usize + alignment - 1) & (!alignment + 1)) as *mut u8);
595598
let pos = if (br as usize - p as usize) > self.min_chunk_size() {
596-
br as *mut u8
599+
br.cast::<u8>()
597600
} else {
598-
(br as *mut u8).offset(alignment as isize)
601+
br.cast::<u8>().add(alignment)
599602
};
600-
let newp = pos as *mut Chunk;
603+
let newp = pos.cast::<Chunk>();
601604
let leadsize = pos as usize - p as usize;
602605
let newsize = Chunk::size(p) - leadsize;
603606

@@ -643,7 +646,7 @@ impl<A: Allocator> Dlmalloc<A> {
643646
psize += prevsize + self.mmap_foot_pad();
644647
if self
645648
.system_allocator
646-
.free((p as *mut u8).offset(-(prevsize as isize)), psize)
649+
.free(p.cast::<u8>().sub(prevsize), psize)
647650
{
648651
self.footprint -= psize;
649652
}
@@ -708,7 +711,7 @@ impl<A: Allocator> Dlmalloc<A> {
708711
}
709712

710713
unsafe fn init_bins(&mut self) {
711-
for i in 0..NSMALLBINS as u32 {
714+
for i in 0..NSMALLBINS_U32 {
712715
let bin = self.smallbin_at(i);
713716
(*bin).next = bin;
714717
(*bin).prev = bin;
@@ -762,28 +765,28 @@ impl<A: Allocator> Dlmalloc<A> {
762765
// TODO: what in the world is this function doing
763766

764767
// Determine locations and sizes of segment, fenceposts, and the old top
765-
let old_top = self.top as *mut u8;
768+
let old_top = self.top.cast::<u8>();
766769
let oldsp = self.segment_holding(old_top);
767770
let old_end = Segment::top(oldsp);
768771
let ssize = self.pad_request(mem::size_of::<Segment>());
769772
let offset = ssize + mem::size_of::<usize>() * 4 + self.malloc_alignment() - 1;
770-
let rawsp = old_end.offset(-(offset as isize));
771-
let offset = self.align_offset(Chunk::to_mem(rawsp as *mut Chunk));
772-
let asp = rawsp.offset(offset as isize);
773-
let csp = if asp < old_top.offset(self.min_chunk_size() as isize) {
773+
let rawsp = old_end.sub(offset);
774+
let offset = self.align_offset(Chunk::to_mem(rawsp.cast()));
775+
let asp = rawsp.add(offset);
776+
let csp = if asp < old_top.add(self.min_chunk_size()) {
774777
old_top
775778
} else {
776779
asp
777780
};
778-
let sp = csp as *mut Chunk;
779-
let ss = Chunk::to_mem(sp) as *mut Segment;
781+
let sp = csp.cast::<Chunk>();
782+
let ss = Chunk::to_mem(sp).cast::<Segment>();
780783
let tnext = Chunk::plus_offset(sp, ssize);
781784
let mut p = tnext;
782785
let mut nfences = 0;
783786

784787
// reset the top to our new space
785788
let size = tsize - self.top_foot_size();
786-
self.init_top(tbase as *mut Chunk, size);
789+
self.init_top(tbase.cast(), size);
787790

788791
// set up our segment record
789792
debug_assert!(self.is_aligned(ss as usize));
@@ -809,7 +812,7 @@ impl<A: Allocator> Dlmalloc<A> {
809812

810813
// insert the rest of the old top into a bin as an ordinary free chunk
811814
if csp != old_top {
812-
let q = old_top as *mut Chunk;
815+
let q = old_top.cast::<Chunk>();
813816
let psize = csp as usize - old_top as usize;
814817
let tn = Chunk::plus_offset(q, psize);
815818
Chunk::set_free_with_pinuse(q, psize, tn);
@@ -851,7 +854,7 @@ impl<A: Allocator> Dlmalloc<A> {
851854
}
852855

853856
let vc = TreeChunk::chunk(v);
854-
let r = Chunk::plus_offset(vc, size) as *mut TreeChunk;
857+
let r = Chunk::plus_offset(vc, size).cast::<TreeChunk>();
855858
debug_assert_eq!(Chunk::size(vc), rsize + size);
856859
self.unlink_large_chunk(v);
857860
if rsize < self.min_chunk_size() {
@@ -956,7 +959,7 @@ impl<A: Allocator> Dlmalloc<A> {
956959
if x == 0 {
957960
0
958961
} else if x > 0xffff {
959-
NTREEBINS as u32 - 1
962+
NTREEBINS_U32 - 1
960963
} else {
961964
let k = mem::size_of_val(&x) * 8 - 1 - (x.leading_zeros() as usize);
962965
((k << 1) + (size >> (k + TREEBIN_SHIFT - 1) & 1)) as u32
@@ -991,7 +994,7 @@ impl<A: Allocator> Dlmalloc<A> {
991994
if self.is_small(size) {
992995
self.insert_small_chunk(chunk, size);
993996
} else {
994-
self.insert_large_chunk(chunk as *mut TreeChunk, size);
997+
self.insert_large_chunk(chunk.cast(), size);
995998
}
996999
}
9971000

@@ -1022,7 +1025,7 @@ impl<A: Allocator> Dlmalloc<A> {
10221025
if !self.treemap_is_marked(idx) {
10231026
self.mark_treemap(idx);
10241027
*h = chunk;
1025-
(*chunk).parent = h as *mut TreeChunk; // TODO: dubious?
1028+
(*chunk).parent = h.cast(); // TODO: dubious?
10261029
(*chunkc).next = chunkc;
10271030
(*chunkc).prev = chunkc;
10281031
} else {
@@ -1083,7 +1086,7 @@ impl<A: Allocator> Dlmalloc<A> {
10831086
if self.is_small(size) {
10841087
self.unlink_small_chunk(chunk, size)
10851088
} else {
1086-
self.unlink_large_chunk(chunk as *mut TreeChunk);
1089+
self.unlink_large_chunk(chunk.cast());
10871090
}
10881091
}
10891092

@@ -1178,7 +1181,7 @@ impl<A: Allocator> Dlmalloc<A> {
11781181
psize += prevsize + self.mmap_foot_pad();
11791182
if self
11801183
.system_allocator
1181-
.free((p as *mut u8).offset(-(prevsize as isize)), psize)
1184+
.free(p.cast::<u8>().sub(prevsize), psize)
11821185
{
11831186
self.footprint -= psize;
11841187
}
@@ -1236,7 +1239,7 @@ impl<A: Allocator> Dlmalloc<A> {
12361239
self.insert_small_chunk(p, psize);
12371240
self.check_free_chunk(p);
12381241
} else {
1239-
self.insert_large_chunk(p as *mut TreeChunk, psize);
1242+
self.insert_large_chunk(p.cast(), psize);
12401243
self.check_free_chunk(p);
12411244
self.release_checks -= 1;
12421245
if self.release_checks == 0 {
@@ -1256,7 +1259,7 @@ impl<A: Allocator> Dlmalloc<A> {
12561259
if self.topsize > pad {
12571260
let unit = DEFAULT_GRANULARITY;
12581261
let extra = ((self.topsize - pad + unit - 1) / unit - 1) * unit;
1259-
let sp = self.segment_holding(self.top as *mut u8);
1262+
let sp = self.segment_holding(self.top.cast());
12601263
debug_assert!(!sp.is_null());
12611264

12621265
if !Segment::is_extern(sp) {
@@ -1296,7 +1299,7 @@ impl<A: Allocator> Dlmalloc<A> {
12961299
unsafe fn has_segment_link(&self, ptr: *mut Segment) -> bool {
12971300
let mut sp = &self.seg as *const Segment as *mut Segment;
12981301
while !sp.is_null() {
1299-
if Segment::holds(ptr, sp as *mut u8) {
1302+
if Segment::holds(ptr, sp.cast()) {
13001303
return true;
13011304
}
13021305
sp = (*sp).next;
@@ -1308,7 +1311,7 @@ impl<A: Allocator> Dlmalloc<A> {
13081311
unsafe fn release_unused_segments(&mut self) -> usize {
13091312
let mut released = 0;
13101313
let mut nsegs = 0;
1311-
let mut pred = &mut self.seg as *mut Segment;
1314+
let mut pred: *mut Segment = &mut self.seg;
13121315
let mut sp = (*pred).next;
13131316
while !sp.is_null() {
13141317
let base = (*sp).base;
@@ -1321,11 +1324,11 @@ impl<A: Allocator> Dlmalloc<A> {
13211324
let psize = Chunk::size(p);
13221325
// We can unmap if the first chunk holds the entire segment and
13231326
// isn't pinned.
1324-
let chunk_top = (p as *mut u8).offset(psize as isize);
1325-
let top = base.offset((size - self.top_foot_size()) as isize);
1327+
let chunk_top = p.cast::<u8>().add(psize);
1328+
let top = base.add(size - self.top_foot_size());
13261329
if !Chunk::inuse(p) && chunk_top >= top {
1327-
let tp = p as *mut TreeChunk;
1328-
debug_assert!(Segment::holds(sp, sp as *mut u8));
1330+
let tp = p.cast::<TreeChunk>();
1331+
debug_assert!(Segment::holds(sp, sp.cast()));
13291332
if p == self.dv {
13301333
self.dv = ptr::null_mut();
13311334
self.dvsize = 0;
@@ -1371,7 +1374,7 @@ impl<A: Allocator> Dlmalloc<A> {
13711374
if !cfg!(debug_assertions) {
13721375
return;
13731376
}
1374-
let sp = self.segment_holding(p as *mut u8);
1377+
let sp = self.segment_holding(p.cast());
13751378
let sz = (*p).head & !INUSE;
13761379
debug_assert!(!sp.is_null());
13771380
debug_assert!(
@@ -1463,11 +1466,11 @@ impl<A: Allocator> Dlmalloc<A> {
14631466
if !cfg!(debug_assertions) {
14641467
return;
14651468
}
1466-
for i in 0..NSMALLBINS {
1467-
self.check_smallbin(i as u32);
1469+
for i in 0..NSMALLBINS_U32 {
1470+
self.check_smallbin(i);
14681471
}
1469-
for i in 0..NTREEBINS {
1470-
self.check_treebin(i as u32);
1472+
for i in 0..NTREEBINS_U32 {
1473+
self.check_treebin(i);
14711474
}
14721475
if self.dvsize != 0 {
14731476
self.check_any_chunk(self.dv);
@@ -1538,7 +1541,7 @@ impl<A: Allocator> Dlmalloc<A> {
15381541
debug_assert_eq!(tindex, idx);
15391542
debug_assert!(tsize >= self.min_large_size());
15401543
debug_assert!(tsize >= self.min_size_for_tree_index(idx));
1541-
debug_assert!(idx == NTREEBINS as u32 - 1 || tsize < self.min_size_for_tree_index(idx + 1));
1544+
debug_assert!(idx == NTREEBINS_U32 - 1 || tsize < self.min_size_for_tree_index(idx + 1));
15421545

15431546
let mut u = t;
15441547
let mut head = ptr::null_mut::<TreeChunk>();
@@ -1591,7 +1594,7 @@ impl<A: Allocator> Dlmalloc<A> {
15911594
}
15921595

15931596
fn min_size_for_tree_index(&self, idx: u32) -> usize {
1594-
let idx = idx as usize;
1597+
let idx = usize::try_from(idx).unwrap();
15951598
(1 << ((idx >> 1) + TREEBIN_SHIFT)) | ((idx & 1) << ((idx >> 1) + TREEBIN_SHIFT - 1))
15961599
}
15971600

@@ -1628,7 +1631,7 @@ impl<A: Allocator> Dlmalloc<A> {
16281631
return false;
16291632
}
16301633
let mut u = t;
1631-
let chunk = chunk as *mut TreeChunk;
1634+
let chunk = chunk.cast();
16321635
loop {
16331636
if u == chunk {
16341637
return true;
@@ -1651,7 +1654,7 @@ impl<A: Allocator> Dlmalloc<A> {
16511654

16521655
pub unsafe fn destroy(mut self) -> usize {
16531656
let mut freed = 0;
1654-
let mut sp = &mut self.seg as *mut Segment;
1657+
let mut sp: *mut Segment = &mut self.seg;
16551658
while !sp.is_null() {
16561659
let base = (*sp).base;
16571660
let size = (*sp).size;
@@ -1682,11 +1685,11 @@ impl Chunk {
16821685
}
16831686

16841687
unsafe fn next(me: *mut Chunk) -> *mut Chunk {
1685-
(me as *mut u8).offset(((*me).head & !FLAG_BITS) as isize) as *mut Chunk
1688+
me.cast::<u8>().add((*me).head & !FLAG_BITS).cast()
16861689
}
16871690

16881691
unsafe fn prev(me: *mut Chunk) -> *mut Chunk {
1689-
(me as *mut u8).offset(-((*me).prev_foot as isize)) as *mut Chunk
1692+
me.cast::<u8>().sub((*me).prev_foot).cast()
16901693
}
16911694

16921695
unsafe fn cinuse(me: *mut Chunk) -> bool {
@@ -1745,7 +1748,7 @@ impl Chunk {
17451748
}
17461749

17471750
unsafe fn minus_offset(me: *mut Chunk, offset: usize) -> *mut Chunk {
1748-
me.cast::<u8>().offset(-(offset as isize)).cast()
1751+
me.cast::<u8>().sub(offset).cast()
17491752
}
17501753

17511754
unsafe fn to_mem(me: *mut Chunk) -> *mut u8 {
@@ -1757,7 +1760,7 @@ impl Chunk {
17571760
}
17581761

17591762
unsafe fn from_mem(mem: *mut u8) -> *mut Chunk {
1760-
mem.offset(-2 * (mem::size_of::<usize>() as isize)) as *mut Chunk
1763+
mem.sub(2 * mem::size_of::<usize>()).cast()
17611764
}
17621765
}
17631766

@@ -1776,11 +1779,11 @@ impl TreeChunk {
17761779
}
17771780

17781781
unsafe fn next(me: *mut TreeChunk) -> *mut TreeChunk {
1779-
(*TreeChunk::chunk(me)).next as *mut TreeChunk
1782+
(*TreeChunk::chunk(me)).next.cast()
17801783
}
17811784

17821785
unsafe fn prev(me: *mut TreeChunk) -> *mut TreeChunk {
1783-
(*TreeChunk::chunk(me)).prev as *mut TreeChunk
1786+
(*TreeChunk::chunk(me)).prev.cast()
17841787
}
17851788
}
17861789

0 commit comments

Comments
 (0)