@@ -6,25 +6,14 @@ use hashbrown::raw::RawTable;
6
6
use std:: hash:: Hasher ;
7
7
use std:: ops:: Deref ;
8
8
9
+ #[ derive( Default ) ]
9
10
pub ( crate ) struct Delegate {
10
11
changes : Vec < Change > ,
11
- resource_cache : gix:: diff:: blob:: Platform ,
12
12
/// All changes that happen within a file, along the line-number it happens in .
13
13
per_file_changes : Vec < ( usize , Change ) > ,
14
14
err : Option < Error > ,
15
15
}
16
16
17
- impl Delegate {
18
- pub ( crate ) fn new ( resource_cache : gix:: diff:: blob:: Platform ) -> Self {
19
- Delegate {
20
- resource_cache,
21
- changes : Default :: default ( ) ,
22
- per_file_changes : Default :: default ( ) ,
23
- err : None ,
24
- }
25
- }
26
- }
27
-
28
17
impl Delegate {
29
18
pub fn handle (
30
19
& mut self ,
@@ -76,82 +65,73 @@ impl Delegate {
76
65
} ) ;
77
66
}
78
67
}
79
- Modification { entry_mode, .. } => {
68
+ Modification {
69
+ entry_mode,
70
+ previous_id,
71
+ id,
72
+ ..
73
+ } => {
80
74
if entry_mode. is_blob ( ) {
81
- self . resource_cache . clear_resource_cache ( ) ;
82
- let platform = change. diff ( & mut self . resource_cache ) ?;
83
- if let Some ( ( old, new) ) = platform. resource_cache . resources ( ) {
84
- let mut old_lines = AHashSet :: with_capacity ( 1024 ) ;
85
- let location = change. location ;
86
- for ( number, line) in old
87
- . data
88
- . as_slice ( )
89
- . expect ( "present in modification" )
90
- . lines ( )
91
- . enumerate ( )
92
- {
93
- old_lines. insert ( Line ( number, line) ) ;
94
- }
75
+ let old = previous_id. object ( ) ?. into_blob ( ) ;
76
+ let new = id. object ( ) ?. into_blob ( ) ;
77
+ let mut old_lines = AHashSet :: with_capacity ( 1024 ) ;
78
+ let location = change. location ;
79
+ for ( number, line) in old. data . lines ( ) . enumerate ( ) {
80
+ old_lines. insert ( Line ( number, line) ) ;
81
+ }
95
82
96
- // A RawTable is used to represent a Checksum -> CrateVersion map
97
- // because the checksum is already stored in the CrateVersion
98
- // and we want to avoid storing the checksum twice for performance reasons
99
- let mut new_versions = RawTable :: with_capacity ( old_lines. len ( ) . min ( 1024 ) ) ;
100
- let hasher = RandomState :: new ( ) ;
83
+ // A RawTable is used to represent a Checksum -> CrateVersion map
84
+ // because the checksum is already stored in the CrateVersion
85
+ // and we want to avoid storing the checksum twice for performance reasons
86
+ let mut new_versions = RawTable :: with_capacity ( old_lines. len ( ) . min ( 1024 ) ) ;
87
+ let hasher = RandomState :: new ( ) ;
101
88
102
- for ( number, line) in new
103
- . data
104
- . as_slice ( )
105
- . expect ( "present in modification" )
106
- . lines ( )
107
- . enumerate ( )
108
- {
109
- // first quickly check if the exact same line is already present in this file in that case we don't need to do anything else
110
- if old_lines. remove ( & Line ( number, line) ) {
111
- continue ;
112
- }
113
- // no need to check if the checksum already exists in the hashmap
114
- // as each checksum appears only once
115
- let new_version = version_from_json_line ( line, location) ?;
116
- new_versions. insert (
117
- hasher. hash_one ( new_version. checksum ) ,
118
- ( number, new_version) ,
119
- |rehashed| hasher. hash_one ( rehashed. 1 . checksum ) ,
120
- ) ;
89
+ for ( number, line) in new. data . lines ( ) . enumerate ( ) {
90
+ // first quickly check if the exact same line is already present in this file in that case we don't need to do anything else
91
+ if old_lines. remove ( & Line ( number, line) ) {
92
+ continue ;
121
93
}
94
+ // no need to check if the checksum already exists in the hashmap
95
+ // as each checksum appears only once
96
+ let new_version = version_from_json_line ( line, location) ?;
97
+ new_versions. insert (
98
+ hasher. hash_one ( new_version. checksum ) ,
99
+ ( number, new_version) ,
100
+ |rehashed| hasher. hash_one ( rehashed. 1 . checksum ) ,
101
+ ) ;
102
+ }
122
103
123
- for line in old_lines. drain ( ) {
124
- let old_version = version_from_json_line ( & line, location) ?;
125
- let new_version = new_versions
126
- . remove_entry ( hasher. hash_one ( old_version. checksum ) , |version| {
127
- version. 1 . checksum == old_version. checksum
128
- } ) ;
129
- match new_version {
130
- Some ( ( _, new_version) ) => {
131
- let change = match ( old_version. yanked , new_version. yanked ) {
132
- ( true , false ) => Change :: Unyanked ( new_version) ,
133
- ( false , true ) => Change :: Yanked ( new_version) ,
134
- _ => continue ,
135
- } ;
136
- self . per_file_changes . push ( ( line. 0 , change) )
137
- }
138
- None => self
139
- . per_file_changes
140
- . push ( ( line. 0 , Change :: VersionDeleted ( old_version) ) ) ,
104
+ for line in old_lines. drain ( ) {
105
+ let old_version = version_from_json_line ( & line, location) ?;
106
+ let new_version = new_versions
107
+ . remove_entry ( hasher. hash_one ( old_version. checksum ) , |version| {
108
+ version. 1 . checksum == old_version. checksum
109
+ } ) ;
110
+ match new_version {
111
+ Some ( ( _, new_version) ) => {
112
+ let change = match ( old_version. yanked , new_version. yanked ) {
113
+ ( true , false ) => Change :: Unyanked ( new_version) ,
114
+ ( false , true ) => Change :: Yanked ( new_version) ,
115
+ _ => continue ,
116
+ } ;
117
+ self . per_file_changes . push ( ( line. 0 , change) )
141
118
}
119
+ None => self
120
+ . per_file_changes
121
+ . push ( ( line. 0 , Change :: VersionDeleted ( old_version) ) ) ,
142
122
}
143
- for ( number, version) in new_versions. drain ( ) {
144
- let change = if version. yanked {
145
- Change :: AddedAndYanked ( version)
146
- } else {
147
- Change :: Added ( version)
148
- } ;
149
- self . per_file_changes . push ( ( number, change) ) ;
150
- }
151
- self . per_file_changes . sort_by_key ( |t| t. 0 ) ;
152
- self . changes
153
- . extend ( self . per_file_changes . drain ( ..) . map ( |t| t. 1 ) ) ;
154
123
}
124
+ for ( number, version) in new_versions. drain ( ) {
125
+ let change = if version. yanked {
126
+ Change :: AddedAndYanked ( version)
127
+ } else {
128
+ Change :: Added ( version)
129
+ } ;
130
+ self . per_file_changes . push ( ( number, change) ) ;
131
+ }
132
+ self . per_file_changes . sort_by_key ( |t| t. 0 ) ;
133
+ self . changes
134
+ . extend ( self . per_file_changes . drain ( ..) . map ( |t| t. 1 ) ) ;
155
135
}
156
136
}
157
137
}
0 commit comments