@@ -18,11 +18,16 @@ import git from 'isomorphic-git';
18
18
import * as fs from 'graceful-fs' ;
19
19
import { Performance } from '@oclif/core/performance' ;
20
20
import { isDefined } from '../guards' ;
21
+ import { uniqueArrayConcat } from '../functions' ;
21
22
import { isDeleted , isAdded , ensureWindows , toFilenames } from './functions' ;
22
- import { AddAndDeleteMaps , FilenameBasenameHash , StatusRow , StringMap } from './types' ;
23
+ import { AddAndDeleteMaps , DetectionFileInfo , DetectionFileInfoWithType , StatusRow , StringMap } from './types' ;
23
24
24
25
const JOIN_CHAR = '#__#' ; // the __ makes it unlikely to be used in metadata names
25
- type AddAndDeleteFileInfos = { addedInfo : FilenameBasenameHash [ ] ; deletedInfo : FilenameBasenameHash [ ] } ;
26
+ type AddAndDeleteFileInfos = Readonly < { addedInfo : DetectionFileInfo [ ] ; deletedInfo : DetectionFileInfo [ ] } > ;
27
+ type AddAndDeleteFileInfosWithTypes = {
28
+ addedInfo : DetectionFileInfoWithType [ ] ;
29
+ deletedInfo : DetectionFileInfoWithType [ ] ;
30
+ } ;
26
31
type AddedAndDeletedFilenames = { added : Set < string > ; deleted : Set < string > } ;
27
32
type StringMapsForMatches = {
28
33
/** these matches filename=>basename, metadata type/name, and git object hash */
@@ -37,10 +42,15 @@ export const filenameMatchesToMap =
37
42
( registry : RegistryAccess ) =>
38
43
( projectPath : string ) =>
39
44
( gitDir : string ) =>
40
- async ( { added, deleted } : AddedAndDeletedFilenames ) : Promise < StringMapsForMatches > =>
41
- excludeNonMatchingTypes ( isWindows ) ( registry ) (
42
- compareHashes (
43
- await buildMaps ( registry ) (
45
+ async ( { added, deleted } : AddedAndDeletedFilenames ) : Promise < StringMapsForMatches > => {
46
+ const resolver = getResolverForFilenames ( registry ) (
47
+ // TODO: use set.union when node 22 is everywhere
48
+ isWindows ? uniqueArrayConcat ( added , deleted ) . map ( ensureWindows ) : uniqueArrayConcat ( added , deleted )
49
+ ) ;
50
+
51
+ return compareHashes (
52
+ await buildMaps (
53
+ addTypes ( resolver ) (
44
54
await toFileInfo ( {
45
55
projectPath,
46
56
gitDir,
@@ -50,6 +60,7 @@ export const filenameMatchesToMap =
50
60
)
51
61
)
52
62
) ;
63
+ } ;
53
64
54
65
/** compare delete and adds from git.status, matching basenames of the files. returns early when there's nothing to match */
55
66
export const getMatches = ( status : StatusRow [ ] ) : AddedAndDeletedFilenames => {
@@ -89,29 +100,28 @@ export const getLogMessage = (matches: StringMapsForMatches): string =>
89
100
] . join ( EOL ) ;
90
101
91
102
/** build maps of the add/deletes with filenames, returning the matches Logs if we can't make a match because buildMap puts them in the ignored bucket */
92
- const buildMaps =
93
- ( registry : RegistryAccess ) =>
94
- async ( { addedInfo, deletedInfo } : AddAndDeleteFileInfos ) : Promise < AddAndDeleteMaps > => {
95
- const [ addedMap , addedIgnoredMap ] = buildMap ( registry ) ( addedInfo ) ;
96
- const [ deletedMap , deletedIgnoredMap ] = buildMap ( registry ) ( deletedInfo ) ;
103
+ const buildMaps = async ( { addedInfo, deletedInfo } : AddAndDeleteFileInfosWithTypes ) : Promise < AddAndDeleteMaps > => {
104
+ const [ addedMap , addedIgnoredMap ] = buildMap ( addedInfo ) ;
105
+ const [ deletedMap , deletedIgnoredMap ] = buildMap ( deletedInfo ) ;
97
106
98
- // If we detected any files that have the same basename and hash, emit a warning and send telemetry
99
- // These files will still show up as expected in the `sf project deploy preview` output
100
- // We could add more logic to determine and display filepaths that we ignored...
101
- // but this is likely rare enough to not warrant the added complexity
102
- // Telemetry will help us determine how often this occurs
103
- if ( addedIgnoredMap . size || deletedIgnoredMap . size ) {
104
- const message = 'Files were found that have the same basename and hash. Skipping the commit of these files' ;
105
- const logger = Logger . childFromRoot ( 'ShadowRepo.compareHashes' ) ;
106
- logger . warn ( message ) ;
107
- const lifecycle = Lifecycle . getInstance ( ) ;
108
- await Promise . all ( [
109
- lifecycle . emitWarning ( message ) ,
110
- lifecycle . emitTelemetry ( { eventName : 'moveFileHashBasenameCollisionsDetected' } ) ,
111
- ] ) ;
112
- }
113
- return { addedMap, deletedMap } ;
114
- } ;
107
+ // If we detected any files that have the same basename and hash, emit a warning and send telemetry
108
+ // These files will still show up as expected in the `sf project deploy preview` output
109
+ // We could add more logic to determine and display filepaths that we ignored...
110
+ // but this is likely rare enough to not warrant the added complexity
111
+ // Telemetry will help us determine how often this occurs
112
+ if ( addedIgnoredMap . size || deletedIgnoredMap . size ) {
113
+ const message =
114
+ 'Files were found that have the same basename, hash, metadata type, and parent. Skipping the commit of these files' ;
115
+ const logger = Logger . childFromRoot ( 'ShadowRepo.compareHashes' ) ;
116
+ logger . warn ( message ) ;
117
+ const lifecycle = Lifecycle . getInstance ( ) ;
118
+ await Promise . all ( [
119
+ lifecycle . emitWarning ( message ) ,
120
+ lifecycle . emitTelemetry ( { eventName : 'moveFileHashBasenameCollisionsDetected' } ) ,
121
+ ] ) ;
122
+ }
123
+ return { addedMap, deletedMap } ;
124
+ } ;
115
125
116
126
/**
117
127
* builds a map of the values from both maps
@@ -123,7 +133,7 @@ const compareHashes = ({ addedMap, deletedMap }: AddAndDeleteMaps): StringMapsFo
123
133
. map ( ( [ addedKey , addedValue ] ) => {
124
134
const deletedValue = deletedMap . get ( addedKey ) ;
125
135
if ( deletedValue ) {
126
- // these are an exact basename and hash match
136
+ // these are an exact basename + hash match + parent + type
127
137
deletedMap . delete ( addedKey ) ;
128
138
addedMap . delete ( addedKey ) ;
129
139
return [ addedValue , deletedValue ] as const ;
@@ -134,56 +144,19 @@ const compareHashes = ({ addedMap, deletedMap }: AddAndDeleteMaps): StringMapsFo
134
144
135
145
if ( addedMap . size && deletedMap . size ) {
136
146
// the remaining deletes didn't match the basename+hash of an add, and vice versa.
137
- // They *might* match the basename of an add, in which case we *could* have the "move, then edit" case.
138
- // the entry might be sha,basename OR sha,basename,type,parent
139
- const addedBasenameMap = new Map (
140
- [ ...addedMap . entries ( ) ] . filter ( hashEntryHasNoTypeInformation ) . map ( hashEntryToBasenameEntry )
141
- ) ;
142
- const deletedBasenameMap = new Map (
143
- [ ...deletedMap . entries ( ) ] . filter ( hashEntryHasNoTypeInformation ) . map ( hashEntryToBasenameEntry )
144
- ) ;
147
+ // They *might* match the basename,type,parent of an add, in which case we *could* have the "move, then edit" case.
148
+ const addedMapNoHash = new Map ( [ ...addedMap . entries ( ) ] . map ( removeHashFromEntry ) ) ;
149
+ const deletedMapNoHash = new Map ( [ ...deletedMap . entries ( ) ] . map ( removeHashFromEntry ) ) ;
145
150
const deleteOnly = new Map < string , string > (
146
- Array . from ( deletedBasenameMap . entries ( ) )
147
- . filter ( ( [ k ] ) => addedBasenameMap . has ( k ) )
148
- . map ( ( [ k , v ] ) => [ addedBasenameMap . get ( k ) as string , v ] )
151
+ Array . from ( deletedMapNoHash . entries ( ) )
152
+ . filter ( ( [ k ] ) => addedMapNoHash . has ( k ) )
153
+ . map ( ( [ k , v ] ) => [ addedMapNoHash . get ( k ) as string , v ] )
149
154
) ;
150
155
return { fullMatches : matches , deleteOnly } ;
151
156
}
152
157
return { fullMatches : matches , deleteOnly : new Map < string , string > ( ) } ;
153
158
} ;
154
159
155
- /** given a StringMap, resolve the metadata types and return things that having matching type/parent */
156
- const excludeNonMatchingTypes =
157
- ( isWindows : boolean ) =>
158
- ( registry : RegistryAccess ) =>
159
- ( { fullMatches : matches , deleteOnly } : StringMapsForMatches ) : StringMapsForMatches => {
160
- if ( ! matches . size && ! deleteOnly . size ) return { fullMatches : matches , deleteOnly } ;
161
- const [ resolvedAdded , resolvedDeleted ] = [
162
- [ ...matches . keys ( ) , ...deleteOnly . keys ( ) ] , // the keys/values are only used for the resolver, so we use 1 for both add and delete
163
- [ ...matches . values ( ) , ...deleteOnly . values ( ) ] ,
164
- ]
165
- . map ( ( filenames ) => ( isWindows ? filenames . map ( ensureWindows ) : filenames ) )
166
- . map ( getResolverForFilenames ( registry ) )
167
- . map ( resolveType ) ;
168
-
169
- return {
170
- fullMatches : new Map ( [ ...matches . entries ( ) ] . filter ( typeFilter ( isWindows ) ( resolvedAdded , resolvedDeleted ) ) ) ,
171
- deleteOnly : new Map ( [ ...deleteOnly . entries ( ) ] . filter ( typeFilter ( isWindows ) ( resolvedAdded , resolvedDeleted ) ) ) ,
172
- } ;
173
- } ;
174
-
175
- const typeFilter =
176
- ( isWindows : boolean ) =>
177
- ( resolveAdd : ReturnType < typeof resolveType > , resolveDelete : ReturnType < typeof resolveType > ) =>
178
- ( [ added , deleted ] : [ string , string ] ) : boolean => {
179
- const [ resolvedAdded ] = resolveAdd ( isWindows ? [ ensureWindows ( added ) ] : [ added ] ) ;
180
- const [ resolvedDeleted ] = resolveDelete ( isWindows ? [ ensureWindows ( deleted ) ] : [ deleted ] ) ;
181
- return (
182
- resolvedAdded ?. type . name === resolvedDeleted ?. type . name &&
183
- resolvedAdded ?. parent ?. name === resolvedDeleted ?. parent ?. name &&
184
- resolvedAdded ?. parent ?. type . name === resolvedDeleted ?. parent ?. type . name
185
- ) ;
186
- } ;
187
160
/** enrich the filenames with basename and oid (hash) */
188
161
const toFileInfo = async ( {
189
162
projectPath,
@@ -214,51 +187,30 @@ const toFileInfo = async ({
214
187
} ;
215
188
216
189
/** returns a map of <hash+basename, filepath>. If two items result in the same hash+basename, return that in the ignore bucket */
217
- const buildMap =
218
- ( registry : RegistryAccess ) =>
219
- ( info : FilenameBasenameHash [ ] ) : StringMap [ ] => {
220
- const map : StringMap = new Map ( ) ;
221
- const ignore : StringMap = new Map ( ) ;
222
- const ignored : FilenameBasenameHash [ ] = [ ] ; // a raw array so that we don't lose uniqueness when the key matches like a map would
223
-
224
- info . map ( ( i ) => {
225
- const key = toKey ( i ) ;
226
- // If we find a duplicate key, we need to remove it and ignore it in the future.
227
- // Finding duplicate hash#basename means that we cannot accurately determine where it was moved to or from
228
- if ( map . has ( key ) || ignore . has ( key ) ) {
229
- map . delete ( key ) ;
230
- ignore . set ( key , i . filename ) ;
231
- ignored . push ( i ) ;
232
- } else {
233
- map . set ( key , i . filename ) ;
234
- }
235
- } ) ;
190
+ export const buildMap = ( info : DetectionFileInfoWithType [ ] ) : StringMap [ ] => {
191
+ const map : StringMap = new Map ( ) ;
192
+ const ignore : StringMap = new Map ( ) ;
193
+ const ignored : DetectionFileInfo [ ] = [ ] ; // a raw array so that we don't lose uniqueness when the key matches like a map would
236
194
237
- if ( ! ignore . size ) return [ map , ignore ] ;
238
-
239
- // we may be able to differentiate ignored child types by their parent instead of ignoring them. We'll add the type and parent name to the key
240
- // ex: All.ListView-meta.xml that have the same name and hash
241
- const resolver = getResolverForFilenames ( registry ) ( ignored . map ( ( i ) => i . filename ) ) ;
242
- ignored
243
- . flatMap ( ( i ) =>
244
- resolveType ( resolver ) ( [ i . filename ] ) . map ( ( cmp ) => ( {
245
- filename : i . filename ,
246
- simpleKey : toKey ( i ) ,
247
- cmp,
248
- } ) )
249
- )
250
- . filter ( ( { cmp } ) => cmp . type . name && cmp . parent ?. fullName )
251
- . map ( ( { cmp, filename, simpleKey : key } ) => {
252
- map . set ( `${ key } ${ JOIN_CHAR } ${ cmp . type . name } ${ JOIN_CHAR } ${ cmp . parent ?. fullName } ` , filename ) ;
253
- ignore . delete ( key ) ;
254
- } ) ;
195
+ info . map ( ( i ) => {
196
+ const key = toKey ( i ) ;
197
+ // If we find a duplicate key, we need to remove it and ignore it in the future.
198
+ // Finding duplicate hash#basename means that we cannot accurately determine where it was moved to or from
199
+ if ( map . has ( key ) || ignore . has ( key ) ) {
200
+ map . delete ( key ) ;
201
+ ignore . set ( key , i . filename ) ;
202
+ ignored . push ( i ) ;
203
+ } else {
204
+ map . set ( key , i . filename ) ;
205
+ }
206
+ } ) ;
255
207
256
- return [ map , ignore ] ;
257
- } ;
208
+ return [ map , ignore ] ;
209
+ } ;
258
210
259
211
const getHashForAddedFile =
260
212
( projectPath : string ) =>
261
- async ( filepath : string ) : Promise < FilenameBasenameHash > => ( {
213
+ async ( filepath : string ) : Promise < DetectionFileInfo > => ( {
262
214
filename : filepath ,
263
215
basename : path . basename ( filepath ) ,
264
216
hash : ( await git . hashBlob ( { object : await fs . promises . readFile ( path . join ( projectPath , filepath ) ) } ) ) . oid ,
@@ -284,19 +236,44 @@ const getHashFromActualFileContents =
284
236
( gitdir : string ) =>
285
237
( projectPath : string ) =>
286
238
( oid : string ) =>
287
- async ( filepath : string ) : Promise < FilenameBasenameHash > => ( {
239
+ async ( filepath : string ) : Promise < DetectionFileInfo > => ( {
288
240
filename : filepath ,
289
241
basename : path . basename ( filepath ) ,
290
242
hash : ( await git . readBlob ( { fs, dir : projectPath , gitdir, filepath, oid } ) ) . oid ,
291
243
} ) ;
292
244
293
- const toKey = ( input : FilenameBasenameHash ) : string => `${ input . hash } ${ JOIN_CHAR } ${ input . basename } ` ;
245
+ export const toKey = ( input : DetectionFileInfoWithType ) : string =>
246
+ [ input . hash , input . basename , input . type , input . type , input . parentType ?? '' , input . parentFullName ?? '' ] . join (
247
+ JOIN_CHAR
248
+ ) ;
294
249
295
- const hashEntryToBasenameEntry = ( [ k , v ] : [ string , string ] ) : [ string , string ] => [ hashToBasename ( k ) , v ] ;
296
- const hashToBasename = ( hash : string ) : string => hash . split ( JOIN_CHAR ) [ 1 ] ;
297
- const hashEntryHasNoTypeInformation = ( [ k ] : [ string , string ] ) : boolean => k . split ( JOIN_CHAR ) . length === 2 ;
250
+ const removeHashFromEntry = ( [ k , v ] : [ string , string ] ) : [ string , string ] => [ removeHashFromKey ( k ) , v ] ;
251
+ const removeHashFromKey = ( hash : string ) : string => hash . split ( JOIN_CHAR ) . splice ( 1 ) . join ( JOIN_CHAR ) ;
298
252
299
253
const getResolverForFilenames =
300
254
( registry : RegistryAccess ) =>
301
255
( filenames : string [ ] ) : MetadataResolver =>
302
256
new MetadataResolver ( registry , VirtualTreeContainer . fromFilePaths ( filenames ) ) ;
257
+
258
+ /** resolve the metadata types (and possibly parent components) */
259
+ const addTypes =
260
+ ( resolver : MetadataResolver ) =>
261
+ ( info : AddAndDeleteFileInfos ) : AddAndDeleteFileInfosWithTypes => {
262
+ // quick passthrough if we don't have adds and deletes
263
+ if ( ! info . addedInfo . length || ! info . deletedInfo . length ) return { addedInfo : [ ] , deletedInfo : [ ] } ;
264
+ const applied = getTypesForFileInfo ( resolveType ( resolver ) ) ;
265
+ return {
266
+ addedInfo : info . addedInfo . flatMap ( applied ) ,
267
+ deletedInfo : info . deletedInfo . flatMap ( applied ) ,
268
+ } ;
269
+ } ;
270
+
271
+ const getTypesForFileInfo =
272
+ ( appliedResolver : ( filenames : string [ ] ) => SourceComponent [ ] ) =>
273
+ ( fileInfo : DetectionFileInfo ) : DetectionFileInfoWithType [ ] =>
274
+ appliedResolver ( [ fileInfo . filename ] ) . map ( ( c ) => ( {
275
+ ...fileInfo ,
276
+ type : c . type . name ,
277
+ parentType : c . parent ?. type . name ?? '' ,
278
+ parentFullName : c . parent ?. fullName ?? '' ,
279
+ } ) ) ;
0 commit comments