@@ -34,6 +34,9 @@ const NTREEBINS: usize = 32;
34
34
const SMALLBIN_SHIFT : usize = 3 ;
35
35
const TREEBIN_SHIFT : usize = 8 ;
36
36
37
+ const NSMALLBINS_U32 : u32 = NSMALLBINS as u32 ;
38
+ const NTREEBINS_U32 : u32 = NTREEBINS as u32 ;
39
+
37
40
// TODO: runtime configurable? documentation?
38
41
const DEFAULT_GRANULARITY : usize = 64 * 1024 ;
39
42
const DEFAULT_TRIM_THRESHOLD : usize = 2 * 1024 * 1024 ;
@@ -78,7 +81,7 @@ fn least_bit(x: u32) -> u32 {
78
81
}
79
82
80
83
fn leftshift_for_tree_index ( x : u32 ) -> u32 {
81
- let x = x as usize ;
84
+ let x = usize:: try_from ( x ) . unwrap ( ) ;
82
85
if x == NTREEBINS - 1 {
83
86
0
84
87
} else {
@@ -181,7 +184,7 @@ impl<A: Allocator> Dlmalloc<A> {
181
184
}
182
185
183
186
fn small_index2size ( & self , idx : u32 ) -> usize {
184
- ( idx as usize ) << SMALLBIN_SHIFT
187
+ usize :: try_from ( idx) . unwrap ( ) << SMALLBIN_SHIFT
185
188
}
186
189
187
190
fn is_small ( & self , s : usize ) -> bool {
@@ -193,11 +196,11 @@ impl<A: Allocator> Dlmalloc<A> {
193
196
}
194
197
195
198
fn align_offset ( & self , addr : * mut u8 ) -> usize {
196
- self . align_offset_usize ( addr as usize )
199
+ addr . align_offset ( self . malloc_alignment ( ) )
197
200
}
198
201
199
202
fn align_offset_usize ( & self , addr : usize ) -> usize {
200
- align_up ( addr, self . malloc_alignment ( ) ) - ( addr as usize )
203
+ align_up ( addr, self . malloc_alignment ( ) ) - addr
201
204
}
202
205
203
206
fn top_foot_size ( & self ) -> usize {
@@ -212,8 +215,8 @@ impl<A: Allocator> Dlmalloc<A> {
212
215
213
216
fn align_as_chunk ( & self , ptr : * mut u8 ) -> * mut Chunk {
214
217
unsafe {
215
- let chunk = Chunk :: to_mem ( ptr as * mut Chunk ) ;
216
- ptr. offset ( self . align_offset ( chunk) as isize ) as * mut Chunk
218
+ let chunk = Chunk :: to_mem ( ptr. cast ( ) ) ;
219
+ ptr. add ( self . align_offset ( chunk) ) . cast ( )
217
220
}
218
221
}
219
222
@@ -380,28 +383,28 @@ impl<A: Allocator> Dlmalloc<A> {
380
383
self . release_checks = MAX_RELEASE_CHECK_RATE ;
381
384
self . init_bins ( ) ;
382
385
let tsize = tsize - self . top_foot_size ( ) ;
383
- self . init_top ( tbase as * mut Chunk , tsize) ;
386
+ self . init_top ( tbase. cast ( ) , tsize) ;
384
387
// let mn = Chunk::next(Chunk::from_mem(self as *mut _ as *mut u8));
385
388
// let top_foot_size = self.top_foot_size();
386
389
// self.init_top(mn, tbase as usize + tsize - mn as usize - top_foot_size);
387
390
} else {
388
- let mut sp = & mut self . seg as * mut Segment ;
391
+ let mut sp: * mut Segment = & mut self . seg ;
389
392
while !sp. is_null ( ) && tbase != Segment :: top ( sp) {
390
393
sp = ( * sp) . next ;
391
394
}
392
395
if !sp. is_null ( )
393
396
&& !Segment :: is_extern ( sp)
394
397
&& Segment :: sys_flags ( sp) == flags
395
- && Segment :: holds ( sp, self . top as * mut u8 )
398
+ && Segment :: holds ( sp, self . top . cast ( ) )
396
399
{
397
400
( * sp) . size += tsize;
398
401
let ptr = self . top ;
399
402
let size = self . topsize + tsize;
400
403
self . init_top ( ptr, size) ;
401
404
} else {
402
405
self . least_addr = cmp:: min ( tbase, self . least_addr ) ;
403
- let mut sp = & mut self . seg as * mut Segment ;
404
- while !sp. is_null ( ) && ( * sp) . base != tbase. offset ( tsize as isize ) {
406
+ let mut sp: * mut Segment = & mut self . seg ;
407
+ while !sp. is_null ( ) && ( * sp) . base != tbase. add ( tsize) {
405
408
sp = ( * sp) . next ;
406
409
}
407
410
if !sp. is_null ( ) && !Segment :: is_extern ( sp) && Segment :: sys_flags ( sp) == flags {
@@ -544,15 +547,15 @@ impl<A: Allocator> Dlmalloc<A> {
544
547
let newmmsize =
545
548
self . mmap_align ( nb + 6 * mem:: size_of :: < usize > ( ) + self . malloc_alignment ( ) - 1 ) ;
546
549
let ptr = self . system_allocator . remap (
547
- ( oldp as * mut u8 ) . offset ( - ( offset as isize ) ) ,
550
+ oldp. cast :: < u8 > ( ) . sub ( offset) ,
548
551
oldmmsize,
549
552
newmmsize,
550
553
can_move,
551
554
) ;
552
555
if ptr. is_null ( ) {
553
556
return ptr:: null_mut ( ) ;
554
557
}
555
- let newp = ptr. offset ( offset as isize ) as * mut Chunk ;
558
+ let newp = ptr. add ( offset) . cast :: < Chunk > ( ) ;
556
559
let psize = newmmsize - offset - self . mmap_foot_pad ( ) ;
557
560
( * newp) . head = psize;
558
561
( * Chunk :: plus_offset ( newp, psize) ) . head = Chunk :: fencepost_head ( ) ;
@@ -593,11 +596,11 @@ impl<A: Allocator> Dlmalloc<A> {
593
596
let br =
594
597
Chunk :: from_mem ( ( ( mem as usize + alignment - 1 ) & ( !alignment + 1 ) ) as * mut u8 ) ;
595
598
let pos = if ( br as usize - p as usize ) > self . min_chunk_size ( ) {
596
- br as * mut u8
599
+ br. cast :: < u8 > ( )
597
600
} else {
598
- ( br as * mut u8 ) . offset ( alignment as isize )
601
+ br . cast :: < u8 > ( ) . add ( alignment)
599
602
} ;
600
- let newp = pos as * mut Chunk ;
603
+ let newp = pos. cast :: < Chunk > ( ) ;
601
604
let leadsize = pos as usize - p as usize ;
602
605
let newsize = Chunk :: size ( p) - leadsize;
603
606
@@ -643,7 +646,7 @@ impl<A: Allocator> Dlmalloc<A> {
643
646
psize += prevsize + self . mmap_foot_pad ( ) ;
644
647
if self
645
648
. system_allocator
646
- . free ( ( p as * mut u8 ) . offset ( - ( prevsize as isize ) ) , psize)
649
+ . free ( p . cast :: < u8 > ( ) . sub ( prevsize) , psize)
647
650
{
648
651
self . footprint -= psize;
649
652
}
@@ -708,7 +711,7 @@ impl<A: Allocator> Dlmalloc<A> {
708
711
}
709
712
710
713
unsafe fn init_bins ( & mut self ) {
711
- for i in 0 ..NSMALLBINS as u32 {
714
+ for i in 0 ..NSMALLBINS_U32 {
712
715
let bin = self . smallbin_at ( i) ;
713
716
( * bin) . next = bin;
714
717
( * bin) . prev = bin;
@@ -762,28 +765,28 @@ impl<A: Allocator> Dlmalloc<A> {
762
765
// TODO: what in the world is this function doing
763
766
764
767
// Determine locations and sizes of segment, fenceposts, and the old top
765
- let old_top = self . top as * mut u8 ;
768
+ let old_top = self . top . cast :: < u8 > ( ) ;
766
769
let oldsp = self . segment_holding ( old_top) ;
767
770
let old_end = Segment :: top ( oldsp) ;
768
771
let ssize = self . pad_request ( mem:: size_of :: < Segment > ( ) ) ;
769
772
let offset = ssize + mem:: size_of :: < usize > ( ) * 4 + self . malloc_alignment ( ) - 1 ;
770
- let rawsp = old_end. offset ( - ( offset as isize ) ) ;
771
- let offset = self . align_offset ( Chunk :: to_mem ( rawsp as * mut Chunk ) ) ;
772
- let asp = rawsp. offset ( offset as isize ) ;
773
- let csp = if asp < old_top. offset ( self . min_chunk_size ( ) as isize ) {
773
+ let rawsp = old_end. sub ( offset) ;
774
+ let offset = self . align_offset ( Chunk :: to_mem ( rawsp. cast ( ) ) ) ;
775
+ let asp = rawsp. add ( offset) ;
776
+ let csp = if asp < old_top. add ( self . min_chunk_size ( ) ) {
774
777
old_top
775
778
} else {
776
779
asp
777
780
} ;
778
- let sp = csp as * mut Chunk ;
779
- let ss = Chunk :: to_mem ( sp) as * mut Segment ;
781
+ let sp = csp. cast :: < Chunk > ( ) ;
782
+ let ss = Chunk :: to_mem ( sp) . cast :: < Segment > ( ) ;
780
783
let tnext = Chunk :: plus_offset ( sp, ssize) ;
781
784
let mut p = tnext;
782
785
let mut nfences = 0 ;
783
786
784
787
// reset the top to our new space
785
788
let size = tsize - self . top_foot_size ( ) ;
786
- self . init_top ( tbase as * mut Chunk , size) ;
789
+ self . init_top ( tbase. cast ( ) , size) ;
787
790
788
791
// set up our segment record
789
792
debug_assert ! ( self . is_aligned( ss as usize ) ) ;
@@ -809,7 +812,7 @@ impl<A: Allocator> Dlmalloc<A> {
809
812
810
813
// insert the rest of the old top into a bin as an ordinary free chunk
811
814
if csp != old_top {
812
- let q = old_top as * mut Chunk ;
815
+ let q = old_top. cast :: < Chunk > ( ) ;
813
816
let psize = csp as usize - old_top as usize ;
814
817
let tn = Chunk :: plus_offset ( q, psize) ;
815
818
Chunk :: set_free_with_pinuse ( q, psize, tn) ;
@@ -851,7 +854,7 @@ impl<A: Allocator> Dlmalloc<A> {
851
854
}
852
855
853
856
let vc = TreeChunk :: chunk ( v) ;
854
- let r = Chunk :: plus_offset ( vc, size) as * mut TreeChunk ;
857
+ let r = Chunk :: plus_offset ( vc, size) . cast :: < TreeChunk > ( ) ;
855
858
debug_assert_eq ! ( Chunk :: size( vc) , rsize + size) ;
856
859
self . unlink_large_chunk ( v) ;
857
860
if rsize < self . min_chunk_size ( ) {
@@ -956,7 +959,7 @@ impl<A: Allocator> Dlmalloc<A> {
956
959
if x == 0 {
957
960
0
958
961
} else if x > 0xffff {
959
- NTREEBINS as u32 - 1
962
+ NTREEBINS_U32 - 1
960
963
} else {
961
964
let k = mem:: size_of_val ( & x) * 8 - 1 - ( x. leading_zeros ( ) as usize ) ;
962
965
( ( k << 1 ) + ( size >> ( k + TREEBIN_SHIFT - 1 ) & 1 ) ) as u32
@@ -991,7 +994,7 @@ impl<A: Allocator> Dlmalloc<A> {
991
994
if self . is_small ( size) {
992
995
self . insert_small_chunk ( chunk, size) ;
993
996
} else {
994
- self . insert_large_chunk ( chunk as * mut TreeChunk , size) ;
997
+ self . insert_large_chunk ( chunk. cast ( ) , size) ;
995
998
}
996
999
}
997
1000
@@ -1022,7 +1025,7 @@ impl<A: Allocator> Dlmalloc<A> {
1022
1025
if !self . treemap_is_marked ( idx) {
1023
1026
self . mark_treemap ( idx) ;
1024
1027
* h = chunk;
1025
- ( * chunk) . parent = h as * mut TreeChunk ; // TODO: dubious?
1028
+ ( * chunk) . parent = h. cast ( ) ; // TODO: dubious?
1026
1029
( * chunkc) . next = chunkc;
1027
1030
( * chunkc) . prev = chunkc;
1028
1031
} else {
@@ -1083,7 +1086,7 @@ impl<A: Allocator> Dlmalloc<A> {
1083
1086
if self . is_small ( size) {
1084
1087
self . unlink_small_chunk ( chunk, size)
1085
1088
} else {
1086
- self . unlink_large_chunk ( chunk as * mut TreeChunk ) ;
1089
+ self . unlink_large_chunk ( chunk. cast ( ) ) ;
1087
1090
}
1088
1091
}
1089
1092
@@ -1178,7 +1181,7 @@ impl<A: Allocator> Dlmalloc<A> {
1178
1181
psize += prevsize + self . mmap_foot_pad ( ) ;
1179
1182
if self
1180
1183
. system_allocator
1181
- . free ( ( p as * mut u8 ) . offset ( - ( prevsize as isize ) ) , psize)
1184
+ . free ( p . cast :: < u8 > ( ) . sub ( prevsize) , psize)
1182
1185
{
1183
1186
self . footprint -= psize;
1184
1187
}
@@ -1236,7 +1239,7 @@ impl<A: Allocator> Dlmalloc<A> {
1236
1239
self . insert_small_chunk ( p, psize) ;
1237
1240
self . check_free_chunk ( p) ;
1238
1241
} else {
1239
- self . insert_large_chunk ( p as * mut TreeChunk , psize) ;
1242
+ self . insert_large_chunk ( p. cast ( ) , psize) ;
1240
1243
self . check_free_chunk ( p) ;
1241
1244
self . release_checks -= 1 ;
1242
1245
if self . release_checks == 0 {
@@ -1256,7 +1259,7 @@ impl<A: Allocator> Dlmalloc<A> {
1256
1259
if self . topsize > pad {
1257
1260
let unit = DEFAULT_GRANULARITY ;
1258
1261
let extra = ( ( self . topsize - pad + unit - 1 ) / unit - 1 ) * unit;
1259
- let sp = self . segment_holding ( self . top as * mut u8 ) ;
1262
+ let sp = self . segment_holding ( self . top . cast ( ) ) ;
1260
1263
debug_assert ! ( !sp. is_null( ) ) ;
1261
1264
1262
1265
if !Segment :: is_extern ( sp) {
@@ -1296,7 +1299,7 @@ impl<A: Allocator> Dlmalloc<A> {
1296
1299
unsafe fn has_segment_link ( & self , ptr : * mut Segment ) -> bool {
1297
1300
let mut sp = & self . seg as * const Segment as * mut Segment ;
1298
1301
while !sp. is_null ( ) {
1299
- if Segment :: holds ( ptr, sp as * mut u8 ) {
1302
+ if Segment :: holds ( ptr, sp. cast ( ) ) {
1300
1303
return true ;
1301
1304
}
1302
1305
sp = ( * sp) . next ;
@@ -1308,7 +1311,7 @@ impl<A: Allocator> Dlmalloc<A> {
1308
1311
unsafe fn release_unused_segments ( & mut self ) -> usize {
1309
1312
let mut released = 0 ;
1310
1313
let mut nsegs = 0 ;
1311
- let mut pred = & mut self . seg as * mut Segment ;
1314
+ let mut pred: * mut Segment = & mut self . seg ;
1312
1315
let mut sp = ( * pred) . next ;
1313
1316
while !sp. is_null ( ) {
1314
1317
let base = ( * sp) . base ;
@@ -1321,11 +1324,11 @@ impl<A: Allocator> Dlmalloc<A> {
1321
1324
let psize = Chunk :: size ( p) ;
1322
1325
// We can unmap if the first chunk holds the entire segment and
1323
1326
// isn't pinned.
1324
- let chunk_top = ( p as * mut u8 ) . offset ( psize as isize ) ;
1325
- let top = base. offset ( ( size - self . top_foot_size ( ) ) as isize ) ;
1327
+ let chunk_top = p . cast :: < u8 > ( ) . add ( psize) ;
1328
+ let top = base. add ( size - self . top_foot_size ( ) ) ;
1326
1329
if !Chunk :: inuse ( p) && chunk_top >= top {
1327
- let tp = p as * mut TreeChunk ;
1328
- debug_assert ! ( Segment :: holds( sp, sp as * mut u8 ) ) ;
1330
+ let tp = p. cast :: < TreeChunk > ( ) ;
1331
+ debug_assert ! ( Segment :: holds( sp, sp. cast ( ) ) ) ;
1329
1332
if p == self . dv {
1330
1333
self . dv = ptr:: null_mut ( ) ;
1331
1334
self . dvsize = 0 ;
@@ -1371,7 +1374,7 @@ impl<A: Allocator> Dlmalloc<A> {
1371
1374
if !cfg ! ( debug_assertions) {
1372
1375
return ;
1373
1376
}
1374
- let sp = self . segment_holding ( p as * mut u8 ) ;
1377
+ let sp = self . segment_holding ( p. cast ( ) ) ;
1375
1378
let sz = ( * p) . head & !INUSE ;
1376
1379
debug_assert ! ( !sp. is_null( ) ) ;
1377
1380
debug_assert ! (
@@ -1463,11 +1466,11 @@ impl<A: Allocator> Dlmalloc<A> {
1463
1466
if !cfg ! ( debug_assertions) {
1464
1467
return ;
1465
1468
}
1466
- for i in 0 ..NSMALLBINS {
1467
- self . check_smallbin ( i as u32 ) ;
1469
+ for i in 0 ..NSMALLBINS_U32 {
1470
+ self . check_smallbin ( i) ;
1468
1471
}
1469
- for i in 0 ..NTREEBINS {
1470
- self . check_treebin ( i as u32 ) ;
1472
+ for i in 0 ..NTREEBINS_U32 {
1473
+ self . check_treebin ( i) ;
1471
1474
}
1472
1475
if self . dvsize != 0 {
1473
1476
self . check_any_chunk ( self . dv ) ;
@@ -1538,7 +1541,7 @@ impl<A: Allocator> Dlmalloc<A> {
1538
1541
debug_assert_eq ! ( tindex, idx) ;
1539
1542
debug_assert ! ( tsize >= self . min_large_size( ) ) ;
1540
1543
debug_assert ! ( tsize >= self . min_size_for_tree_index( idx) ) ;
1541
- debug_assert ! ( idx == NTREEBINS as u32 - 1 || tsize < self . min_size_for_tree_index( idx + 1 ) ) ;
1544
+ debug_assert ! ( idx == NTREEBINS_U32 - 1 || tsize < self . min_size_for_tree_index( idx + 1 ) ) ;
1542
1545
1543
1546
let mut u = t;
1544
1547
let mut head = ptr:: null_mut :: < TreeChunk > ( ) ;
@@ -1591,7 +1594,7 @@ impl<A: Allocator> Dlmalloc<A> {
1591
1594
}
1592
1595
1593
1596
fn min_size_for_tree_index ( & self , idx : u32 ) -> usize {
1594
- let idx = idx as usize ;
1597
+ let idx = usize:: try_from ( idx ) . unwrap ( ) ;
1595
1598
( 1 << ( ( idx >> 1 ) + TREEBIN_SHIFT ) ) | ( ( idx & 1 ) << ( ( idx >> 1 ) + TREEBIN_SHIFT - 1 ) )
1596
1599
}
1597
1600
@@ -1628,7 +1631,7 @@ impl<A: Allocator> Dlmalloc<A> {
1628
1631
return false ;
1629
1632
}
1630
1633
let mut u = t;
1631
- let chunk = chunk as * mut TreeChunk ;
1634
+ let chunk = chunk. cast ( ) ;
1632
1635
loop {
1633
1636
if u == chunk {
1634
1637
return true ;
@@ -1651,7 +1654,7 @@ impl<A: Allocator> Dlmalloc<A> {
1651
1654
1652
1655
pub unsafe fn destroy ( mut self ) -> usize {
1653
1656
let mut freed = 0 ;
1654
- let mut sp = & mut self . seg as * mut Segment ;
1657
+ let mut sp: * mut Segment = & mut self . seg ;
1655
1658
while !sp. is_null ( ) {
1656
1659
let base = ( * sp) . base ;
1657
1660
let size = ( * sp) . size ;
@@ -1682,11 +1685,11 @@ impl Chunk {
1682
1685
}
1683
1686
1684
1687
unsafe fn next ( me : * mut Chunk ) -> * mut Chunk {
1685
- ( me as * mut u8 ) . offset ( ( ( * me) . head & !FLAG_BITS ) as isize ) as * mut Chunk
1688
+ me . cast :: < u8 > ( ) . add ( ( * me) . head & !FLAG_BITS ) . cast ( )
1686
1689
}
1687
1690
1688
1691
unsafe fn prev ( me : * mut Chunk ) -> * mut Chunk {
1689
- ( me as * mut u8 ) . offset ( - ( ( * me) . prev_foot as isize ) ) as * mut Chunk
1692
+ me . cast :: < u8 > ( ) . sub ( ( * me) . prev_foot ) . cast ( )
1690
1693
}
1691
1694
1692
1695
unsafe fn cinuse ( me : * mut Chunk ) -> bool {
@@ -1745,7 +1748,7 @@ impl Chunk {
1745
1748
}
1746
1749
1747
1750
unsafe fn minus_offset ( me : * mut Chunk , offset : usize ) -> * mut Chunk {
1748
- me. cast :: < u8 > ( ) . offset ( - ( offset as isize ) ) . cast ( )
1751
+ me. cast :: < u8 > ( ) . sub ( offset) . cast ( )
1749
1752
}
1750
1753
1751
1754
unsafe fn to_mem ( me : * mut Chunk ) -> * mut u8 {
@@ -1757,7 +1760,7 @@ impl Chunk {
1757
1760
}
1758
1761
1759
1762
unsafe fn from_mem ( mem : * mut u8 ) -> * mut Chunk {
1760
- mem. offset ( - 2 * ( mem:: size_of :: < usize > ( ) as isize ) ) as * mut Chunk
1763
+ mem. sub ( 2 * mem:: size_of :: < usize > ( ) ) . cast ( )
1761
1764
}
1762
1765
}
1763
1766
@@ -1776,11 +1779,11 @@ impl TreeChunk {
1776
1779
}
1777
1780
1778
1781
unsafe fn next ( me : * mut TreeChunk ) -> * mut TreeChunk {
1779
- ( * TreeChunk :: chunk ( me) ) . next as * mut TreeChunk
1782
+ ( * TreeChunk :: chunk ( me) ) . next . cast ( )
1780
1783
}
1781
1784
1782
1785
unsafe fn prev ( me : * mut TreeChunk ) -> * mut TreeChunk {
1783
- ( * TreeChunk :: chunk ( me) ) . prev as * mut TreeChunk
1786
+ ( * TreeChunk :: chunk ( me) ) . prev . cast ( )
1784
1787
}
1785
1788
}
1786
1789
0 commit comments