5
5
* For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
6
6
*/
7
7
import path from 'node:path' ;
8
+ import { EOL } from 'node:os' ;
8
9
import { Logger , Lifecycle } from '@salesforce/core' ;
9
10
import {
10
11
MetadataResolver ,
@@ -15,22 +16,29 @@ import {
15
16
// @ts -expect-error isogit has both ESM and CJS exports but node16 module/resolution identifies it as ESM
16
17
import git from 'isomorphic-git' ;
17
18
import * as fs from 'graceful-fs' ;
18
- import { Performance } from '@oclif/core' ;
19
+ import { Performance } from '@oclif/core/performance ' ;
19
20
import { sourceComponentGuard } from '../guards' ;
20
21
import { isDeleted , isAdded , ensureWindows , toFilenames } from './functions' ;
21
22
import { AddAndDeleteMaps , FilenameBasenameHash , StatusRow , StringMap } from './types' ;
22
23
24
+ const JOIN_CHAR = '#__#' ; // the __ makes it unlikely to be used in metadata names
23
25
type AddAndDeleteFileInfos = { addedInfo : FilenameBasenameHash [ ] ; deletedInfo : FilenameBasenameHash [ ] } ;
24
26
type AddedAndDeletedFilenames = { added : Set < string > ; deleted : Set < string > } ;
27
+ type StringMapsForMatches = {
28
+ /** these matches filename=>basename, metadata type/name, and git object hash */
29
+ fullMatches : StringMap ;
30
+ /** these did not match the hash. They *probably* are matches where the "add" is also modified */
31
+ deleteOnly : StringMap ;
32
+ } ;
25
33
26
34
/** composed functions to simplified use by the shadowRepo class */
27
35
export const filenameMatchesToMap =
28
36
( isWindows : boolean ) =>
29
37
( registry : RegistryAccess ) =>
30
38
( projectPath : string ) =>
31
39
( gitDir : string ) =>
32
- async ( { added, deleted } : AddedAndDeletedFilenames ) : Promise < StringMap > =>
33
- removeNonMatches ( isWindows ) ( registry ) (
40
+ async ( { added, deleted } : AddedAndDeletedFilenames ) : Promise < StringMapsForMatches > =>
41
+ excludeNonMatchingTypes ( isWindows ) ( registry ) (
34
42
compareHashes (
35
43
await buildMaps (
36
44
await toFileInfo ( {
@@ -73,7 +81,14 @@ export const getMatches = (status: StatusRow[]): AddedAndDeletedFilenames => {
73
81
return { added : addedFilenamesWithMatches , deleted : deletedFilenamesWithMatches } ;
74
82
} ;
75
83
76
- /** build maps of the add/deletes with filenames, returning the matches Logs if non-matches */
84
+ export const getLogMessage = ( matches : StringMapsForMatches ) : string =>
85
+ [
86
+ 'Files have moved. Committing moved files:' ,
87
+ ...[ ...matches . fullMatches . entries ( ) ] . map ( ( [ add , del ] ) => `- File ${ del } was moved to ${ add } ` ) ,
88
+ ...[ ...matches . deleteOnly . entries ( ) ] . map ( ( [ add , del ] ) => `- File ${ del } was moved to ${ add } and modified` ) ,
89
+ ] . join ( EOL ) ;
90
+
91
+ /** build maps of the add/deletes with filenames, returning the matches Logs if we can't make a match because buildMap puts them in the ignored bucket */
77
92
const buildMaps = async ( { addedInfo, deletedInfo } : AddAndDeleteFileInfos ) : Promise < AddAndDeleteMaps > => {
78
93
const [ addedMap , addedIgnoredMap ] = buildMap ( addedInfo ) ;
79
94
const [ deletedMap , deletedIgnoredMap ] = buildMap ( deletedInfo ) ;
@@ -96,51 +111,70 @@ const buildMaps = async ({ addedInfo, deletedInfo }: AddAndDeleteFileInfos): Pro
96
111
return { addedMap, deletedMap } ;
97
112
} ;
98
113
99
- /** builds a map of the values from both maps */
100
- const compareHashes = ( { addedMap, deletedMap } : AddAndDeleteMaps ) : StringMap => {
114
+ /**
115
+ * builds a map of the values from both maps
116
+ * side effect: mutates the passed-in maps!
117
+ */
118
+ const compareHashes = ( { addedMap, deletedMap } : AddAndDeleteMaps ) : StringMapsForMatches => {
101
119
const matches : StringMap = new Map ( ) ;
102
120
103
- for ( const [ addedKey , addedValue ] of addedMap ) {
121
+ [ ... addedMap . entries ( ) ] . map ( ( [ addedKey , addedValue ] ) => {
104
122
const deletedValue = deletedMap . get ( addedKey ) ;
105
123
if ( deletedValue ) {
124
+ // these are an exact basename and hash match
106
125
matches . set ( addedValue , deletedValue ) ;
126
+ deletedMap . delete ( addedKey ) ;
127
+ addedMap . delete ( addedKey ) ;
107
128
}
108
- }
129
+ } ) ;
109
130
110
- return matches ;
131
+ if ( addedMap . size && deletedMap . size ) {
132
+ // the remaining deletes didn't match the basename+hash of an add, and vice versa.
133
+ // They *might* match the basename of an add, in which case we *could* have the "move, then edit" case.
134
+ const addedBasenameMap = new Map ( [ ...addedMap . entries ( ) ] . map ( hashEntryToBasenameEntry ) ) ;
135
+ const deletedBasenameMap = new Map ( [ ...deletedMap . entries ( ) ] . map ( hashEntryToBasenameEntry ) ) ;
136
+ const deleteOnly = new Map < string , string > (
137
+ Array . from ( deletedBasenameMap . entries ( ) )
138
+ . filter ( ( [ k ] ) => addedBasenameMap . has ( k ) )
139
+ . map ( ( [ k , v ] ) => [ addedBasenameMap . get ( k ) as string , v ] )
140
+ ) ;
141
+ return { fullMatches : matches , deleteOnly } ;
142
+ }
143
+ return { fullMatches : matches , deleteOnly : new Map < string , string > ( ) } ;
111
144
} ;
112
145
113
146
/** given a StringMap, resolve the metadata types and return things that having matching type/parent */
114
- const removeNonMatches =
147
+ const excludeNonMatchingTypes =
115
148
( isWindows : boolean ) =>
116
149
( registry : RegistryAccess ) =>
117
- ( matches : StringMap ) : StringMap => {
118
- if ( ! matches . size ) return matches ;
119
- const addedFiles = isWindows ? [ ...matches . keys ( ) ] . map ( ensureWindows ) : [ ...matches . keys ( ) ] ;
120
- const deletedFiles = isWindows ? [ ...matches . values ( ) ] . map ( ensureWindows ) : [ ...matches . values ( ) ] ;
121
- const resolverAdded = new MetadataResolver ( registry , VirtualTreeContainer . fromFilePaths ( addedFiles ) ) ;
122
- const resolverDeleted = new MetadataResolver ( registry , VirtualTreeContainer . fromFilePaths ( deletedFiles ) ) ;
123
-
124
- return new Map (
125
- [ ...matches . entries ( ) ] . filter ( ( [ addedFile , deletedFile ] ) => {
126
- // we're only ever using the first element of the arrays
127
- const [ resolvedAdded ] = resolveType ( resolverAdded , isWindows ? [ ensureWindows ( addedFile ) ] : [ addedFile ] ) ;
128
- const [ resolvedDeleted ] = resolveType (
129
- resolverDeleted ,
130
- isWindows ? [ ensureWindows ( deletedFile ) ] : [ deletedFile ]
131
- ) ;
132
- return (
133
- // they could match, or could both be undefined (because unresolved by SDR)
134
- resolvedAdded ?. type . name === resolvedDeleted ?. type . name &&
135
- // parent names match, if resolved and there are parents
136
- resolvedAdded ?. parent ?. name === resolvedDeleted ?. parent ?. name &&
137
- // parent types match, if resolved and there are parents
138
- resolvedAdded ?. parent ?. type . name === resolvedDeleted ?. parent ?. type . name
139
- ) ;
140
- } )
141
- ) ;
150
+ ( { fullMatches : matches , deleteOnly } : StringMapsForMatches ) : StringMapsForMatches => {
151
+ if ( ! matches . size && ! deleteOnly . size ) return { fullMatches : matches , deleteOnly } ;
152
+ const [ resolvedAdded , resolvedDeleted ] = [
153
+ [ ...matches . keys ( ) , ...deleteOnly . keys ( ) ] , // the keys/values are only used for the resolver, so we use 1 for both add and delete
154
+ [ ...matches . values ( ) , ...deleteOnly . values ( ) ] ,
155
+ ]
156
+ . map ( ( filenames ) => filenames . map ( isWindows ? ensureWindows : stringNoOp ) )
157
+ . map ( ( filenames ) => new MetadataResolver ( registry , VirtualTreeContainer . fromFilePaths ( filenames ) ) )
158
+ . map ( resolveType ) ;
159
+
160
+ return {
161
+ fullMatches : new Map ( [ ...matches . entries ( ) ] . filter ( typeFilter ( isWindows ) ( resolvedAdded , resolvedDeleted ) ) ) ,
162
+ deleteOnly : new Map ( [ ...deleteOnly . entries ( ) ] . filter ( typeFilter ( isWindows ) ( resolvedAdded , resolvedDeleted ) ) ) ,
163
+ } ;
142
164
} ;
143
165
166
+ const typeFilter =
167
+ ( isWindows : boolean ) =>
168
+ ( resolveAdd : ReturnType < typeof resolveType > , resolveDelete : ReturnType < typeof resolveType > ) =>
169
+ ( [ added , deleted ] : [ string , string ] ) : boolean => {
170
+ const [ resolvedAdded ] = resolveAdd ( isWindows ? [ ensureWindows ( added ) ] : [ added ] ) ;
171
+ const [ resolvedDeleted ] = resolveDelete ( isWindows ? [ ensureWindows ( deleted ) ] : [ deleted ] ) ;
172
+ return (
173
+ resolvedAdded ?. type . name === resolvedDeleted ?. type . name &&
174
+ resolvedAdded ?. parent ?. name === resolvedDeleted ?. parent ?. name &&
175
+ resolvedAdded ?. parent ?. type . name === resolvedDeleted ?. parent ?. type . name
176
+ ) ;
177
+ } ;
144
178
/** enrich the filenames with basename and oid (hash) */
145
179
const toFileInfo = async ( {
146
180
projectPath,
@@ -170,11 +204,12 @@ const toFileInfo = async ({
170
204
return { addedInfo, deletedInfo } ;
171
205
} ;
172
206
207
+ /** returns a map of <hash+basename, filepath>. If two items result in the same hash+basename, return that in the ignore bucket */
173
208
const buildMap = ( info : FilenameBasenameHash [ ] ) : StringMap [ ] => {
174
209
const map : StringMap = new Map ( ) ;
175
210
const ignore : StringMap = new Map ( ) ;
176
211
info . map ( ( i ) => {
177
- const key = `${ i . hash } # ${ i . basename } ` ;
212
+ const key = `${ i . hash } ${ JOIN_CHAR } ${ i . basename } ` ;
178
213
// If we find a duplicate key, we need to remove it and ignore it in the future.
179
214
// Finding duplicate hash#basename means that we cannot accurately determine where it was moved to or from
180
215
if ( map . has ( key ) || ignore . has ( key ) ) {
@@ -195,18 +230,20 @@ const getHashForAddedFile =
195
230
hash : ( await git . hashBlob ( { object : await fs . promises . readFile ( path . join ( projectPath , filepath ) ) } ) ) . oid ,
196
231
} ) ;
197
232
198
- const resolveType = ( resolver : MetadataResolver , filenames : string [ ] ) : SourceComponent [ ] =>
199
- filenames
200
- . flatMap ( ( filename ) => {
201
- try {
202
- return resolver . getComponentsFromPath ( filename ) ;
203
- } catch ( e ) {
204
- const logger = Logger . childFromRoot ( 'ShadowRepo.compareTypes' ) ;
205
- logger . warn ( `unable to resolve ${ filename } ` ) ;
206
- return undefined ;
207
- }
208
- } )
209
- . filter ( sourceComponentGuard ) ;
233
+ const resolveType =
234
+ ( resolver : MetadataResolver ) =>
235
+ ( filenames : string [ ] ) : SourceComponent [ ] =>
236
+ filenames
237
+ . flatMap ( ( filename ) => {
238
+ try {
239
+ return resolver . getComponentsFromPath ( filename ) ;
240
+ } catch ( e ) {
241
+ const logger = Logger . childFromRoot ( 'ShadowRepo.compareTypes' ) ;
242
+ logger . warn ( `unable to resolve ${ filename } ` ) ;
243
+ return undefined ;
244
+ }
245
+ } )
246
+ . filter ( sourceComponentGuard ) ;
210
247
211
248
/** where we don't have git objects to use, read the file contents to generate the hash */
212
249
const getHashFromActualFileContents =
@@ -218,3 +255,8 @@ const getHashFromActualFileContents =
218
255
basename : path . basename ( filepath ) ,
219
256
hash : ( await git . readBlob ( { fs, dir : projectPath , gitdir, filepath, oid } ) ) . oid ,
220
257
} ) ;
258
+
259
+ const hashEntryToBasenameEntry = ( [ k , v ] : [ string , string ] ) : [ string , string ] => [ hashToBasename ( k ) , v ] ;
260
+ const hashToBasename = ( hash : string ) : string => hash . split ( JOIN_CHAR ) [ 1 ] ;
261
+
262
+ const stringNoOp = ( s : string ) : string => s ;
0 commit comments