@@ -17,6 +17,8 @@ use unstable::raw;
17
17
18
18
type DropGlue < ' a > = ' a |* * TyDesc , * c_void|;
19
19
20
+ static RC_IMMORTAL : uint = 0x77777777 ;
21
+
20
22
/*
21
23
* Box annihilation
22
24
*
@@ -25,24 +27,21 @@ type DropGlue<'a> = 'a |**TyDesc, *c_void|;
25
27
26
28
struct AnnihilateStats {
27
29
n_total_boxes : uint ,
28
- n_unique_boxes : uint ,
29
30
n_bytes_freed : uint
30
31
}
31
32
32
33
unsafe fn each_live_alloc ( read_next_before : bool ,
33
- f: |alloc: * mut raw:: Box < ( ) > , uniq : bool | -> bool )
34
+ f: |alloc: * mut raw:: Box < ( ) > | -> bool )
34
35
-> bool {
35
36
//! Walks the internal list of allocations
36
37
37
- use managed;
38
38
use rt:: local_heap;
39
39
40
40
let mut alloc = local_heap:: live_allocs ( ) ;
41
41
while alloc != ptr:: mut_null ( ) {
42
42
let next_before = ( * alloc) . next ;
43
- let uniq = ( * alloc) . ref_count == managed:: RC_MANAGED_UNIQUE ;
44
43
45
- if !f ( alloc, uniq ) {
44
+ if !f ( alloc) {
46
45
return false ;
47
46
}
48
47
@@ -70,25 +69,19 @@ fn debug_mem() -> bool {
70
69
pub unsafe fn annihilate ( ) {
71
70
use rt:: local_heap:: local_free;
72
71
use mem;
73
- use managed;
74
72
75
73
let mut stats = AnnihilateStats {
76
74
n_total_boxes : 0 ,
77
- n_unique_boxes : 0 ,
78
75
n_bytes_freed : 0
79
76
} ;
80
77
81
78
// Pass 1: Make all boxes immortal.
82
79
//
83
80
// In this pass, nothing gets freed, so it does not matter whether
84
81
// we read the next field before or after the callback.
85
- each_live_alloc ( true , |alloc, uniq | {
82
+ each_live_alloc ( true , |alloc| {
86
83
stats. n_total_boxes += 1 ;
87
- if uniq {
88
- stats. n_unique_boxes += 1 ;
89
- } else {
90
- ( * alloc) . ref_count = managed:: RC_IMMORTAL ;
91
- }
84
+ ( * alloc) . ref_count = RC_IMMORTAL ;
92
85
true
93
86
} ) ;
94
87
@@ -97,12 +90,10 @@ pub unsafe fn annihilate() {
97
90
// In this pass, unique-managed boxes may get freed, but not
98
91
// managed boxes, so we must read the `next` field *after* the
99
92
// callback, as the original value may have been freed.
100
- each_live_alloc ( false , |alloc, uniq| {
101
- if !uniq {
102
- let tydesc = ( * alloc) . type_desc ;
103
- let data = & ( * alloc) . data as * ( ) ;
104
- ( ( * tydesc) . drop_glue ) ( data as * i8 ) ;
105
- }
93
+ each_live_alloc ( false , |alloc| {
94
+ let tydesc = ( * alloc) . type_desc ;
95
+ let data = & ( * alloc) . data as * ( ) ;
96
+ ( ( * tydesc) . drop_glue ) ( data as * i8 ) ;
106
97
true
107
98
} ) ;
108
99
@@ -112,22 +103,19 @@ pub unsafe fn annihilate() {
112
103
// unique-managed boxes, though I think that none of those are
113
104
// left), so we must read the `next` field before, since it will
114
105
// not be valid after.
115
- each_live_alloc ( true , |alloc, uniq| {
116
- if !uniq {
117
- stats. n_bytes_freed +=
118
- ( * ( ( * alloc) . type_desc ) ) . size
119
- + mem:: size_of :: < raw:: Box < ( ) > > ( ) ;
120
- local_free ( alloc as * i8 ) ;
121
- }
106
+ each_live_alloc ( true , |alloc| {
107
+ stats. n_bytes_freed +=
108
+ ( * ( ( * alloc) . type_desc ) ) . size
109
+ + mem:: size_of :: < raw:: Box < ( ) > > ( ) ;
110
+ local_free ( alloc as * i8 ) ;
122
111
true
123
112
} ) ;
124
113
125
114
if debug_mem ( ) {
126
115
// We do logging here w/o allocation.
127
116
debug ! ( "annihilator stats:\n \
128
117
total boxes: {}\n \
129
- unique boxes: {}\n \
130
118
bytes freed: {}",
131
- stats. n_total_boxes, stats. n_unique_boxes , stats . n_bytes_freed) ;
119
+ stats. n_total_boxes, stats. n_bytes_freed) ;
132
120
}
133
121
}
0 commit comments