Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
236 changes: 151 additions & 85 deletions src/shared/local/localShadowRepo.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,11 @@
import path from 'node:path';
import * as os from 'node:os';
import * as fs from 'graceful-fs';
import { NamedPackageDir, Lifecycle, Logger, SfError } from '@salesforce/core';
import { NamedPackageDir, Lifecycle, Logger, SfError, lockInit } from '@salesforce/core';
import { env } from '@salesforce/kit';
import git from 'isomorphic-git';
import { GitIndexManager } from 'isomorphic-git/managers';
import { FileSystem as IsoGitFS } from 'isomorphic-git/models';
import { RegistryAccess } from '@salesforce/source-deploy-retrieve';
import { chunkArray, excludeLwcLocalOnlyTest, folderContainsPath } from '../functions';
import { filenameMatchesToMap, getLogMessage, getMatches } from './moveDetection';
Expand Down Expand Up @@ -79,12 +81,15 @@ export class ShadowRepo {
private status!: StatusRow[];
private logger!: Logger;
private readonly registry: RegistryAccess;
private lockHeld = false;
private readonly indexLockPath: string;

private constructor(options: ShadowRepoOptions) {
this.gitDir = getGitDir(options.orgId, options.projectPath);
this.projectPath = options.projectPath;
this.packageDirs = options.packageDirs.map(packageDirToRelativePosixPath(options.projectPath));
this.registry = options.registry;
this.indexLockPath = path.join(this.gitDir, 'index');
}

// think of singleton behavior but unique to the projectPath
Expand Down Expand Up @@ -113,12 +118,14 @@ export class ShadowRepo {
*/
public async gitInit(): Promise<void> {
this.logger.trace(`initializing git repo at ${this.gitDir}`);
await fs.promises.mkdir(this.gitDir, { recursive: true });
try {
await git.init({ fs, dir: this.projectPath, gitdir: this.gitDir, defaultBranch: 'main' });
} catch (e) {
redirectToCliRepoError(e);
}
await this.withGitLock(async () => {
await fs.promises.mkdir(this.gitDir, { recursive: true });
try {
await git.init({ fs, dir: this.projectPath, gitdir: this.gitDir, defaultBranch: 'main' });
} catch (e) {
redirectToCliRepoError(e);
}
});
}

/**
Expand All @@ -127,8 +134,10 @@ export class ShadowRepo {
* @returns the deleted directory
*/
public async delete(): Promise<string> {
await fs.promises.rm(this.gitDir, { recursive: true, force: true });
return this.gitDir;
return this.withGitLock(async () => {
await fs.promises.rm(this.gitDir, { recursive: true, force: true });
return this.gitDir;
});
}
/**
* If the status already exists, return it. Otherwise, set the status before returning.
Expand All @@ -142,32 +151,35 @@ export class ShadowRepo {
this.logger.trace(`start: getStatus (noCache = ${noCache})`);

if (!this.status || noCache) {
try {
// status hasn't been initialized yet
this.status = await git.statusMatrix({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
filepaths: this.packageDirs,
ignored: true,
filter: fileFilter(this.packageDirs),
});
// lock reads too: a concurrent write could leave a partially-written index that statusMatrix would misparse
await this.withGitLock(async () => {
try {
// status hasn't been initialized yet
this.status = await git.statusMatrix({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
filepaths: this.packageDirs,
ignored: true,
filter: fileFilter(this.packageDirs),
});

// isomorphic-git stores things in unix-style tree. Convert to windows-style if necessary
if (IS_WINDOWS) {
this.status = this.status.map((row) => [path.normalize(row[FILE]), row[HEAD], row[WORKDIR], row[3]]);
}
// isomorphic-git stores things in unix-style tree. Convert to windows-style if necessary
if (IS_WINDOWS) {
this.status = this.status.map((row) => [path.normalize(row[FILE]), row[HEAD], row[WORKDIR], row[3]]);
}

if (env.getBoolean('SF_DISABLE_SOURCE_MOBILITY') === true) {
await Lifecycle.getInstance().emitTelemetry({ eventName: 'moveFileDetectionDisabled' });
} else {
// Check for moved files and update local git status accordingly
await Lifecycle.getInstance().emitTelemetry({ eventName: 'moveFileDetectionEnabled' });
await this.detectMovedFiles();
if (env.getBoolean('SF_DISABLE_SOURCE_MOBILITY') === true) {
await Lifecycle.getInstance().emitTelemetry({ eventName: 'moveFileDetectionDisabled' });
} else {
// Check for moved files and update local git status accordingly
await Lifecycle.getInstance().emitTelemetry({ eventName: 'moveFileDetectionEnabled' });
await this.detectMovedFiles();
}
} catch (e) {
redirectToCliRepoError(e);
}
} catch (e) {
redirectToCliRepoError(e);
}
});
}
this.logger.trace(`done: getStatus (noCache = ${noCache})`);
return this.status;
Expand Down Expand Up @@ -248,77 +260,131 @@ export class ShadowRepo {
return 'no files to commit';
}

if (deployedFiles.length) {
const chunks = chunkArray(
return this.withGitLock(async () => {
// Phase 1: Write blob objects and collect file metadata.
// Blob writes go to .git/objects/ and are independent of the index.
// Chunked to avoid EMFILE (too many open files).
const insertions: Array<{ filepath: string; stats: fs.Stats; oid: string }> = [];

if (deployedFiles.length) {
// these are stored in posix/style/path format. We have to convert inbound stuff from windows
[...new Set(IS_WINDOWS ? deployedFiles.map(normalize).map(ensurePosix) : deployedFiles)],
MAX_FILE_ADD
);
for (const chunk of chunks) {
try {
this.logger.debug(`adding ${chunk.length} files of ${deployedFiles.length} deployedFiles to git`);
// these need to be done sequentially (it's already batched) because isogit manages file locking
const uniqueFiles = [...new Set(IS_WINDOWS ? deployedFiles.map(normalize).map(ensurePosix) : deployedFiles)];
const chunks = chunkArray(uniqueFiles, MAX_FILE_ADD);

for (const chunk of chunks) {
this.logger.debug(`writing ${chunk.length} blobs of ${uniqueFiles.length} deployedFiles`);
// eslint-disable-next-line no-await-in-loop
await git.add({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
filepath: chunk,
force: true,
});
} catch (e) {
if (e instanceof git.Errors.MultipleGitError) {
this.logger.error(`${e.errors.length} errors on git.add, showing the first 5:`, e.errors.slice(0, 5));
const settled = await Promise.allSettled(
chunk.map(async (filepath) => {
const fullPath = path.join(this.projectPath, filepath);
const stats = await fs.promises.lstat(fullPath);
const fileBuffer = await fs.promises.readFile(fullPath);
const oid = await git.writeBlob({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
blob: fileBuffer,
});
return { filepath, stats, oid };
})
);
Comment thread
Screendead marked this conversation as resolved.

// Mirror isomorphic-git's addToIndex: allSettled -> aggregate errors
const rejected = settled
.filter((r): r is PromiseRejectedResult => r.status === 'rejected')
.map((r) => r.reason as Error);
if (rejected.length > 1) {
this.logger.error(`${rejected.length} errors writing blobs, showing the first 5:`, rejected.slice(0, 5));
throw SfError.create({
message: e.message,
name: e.name,
data: e.errors.map((err) => err.message),
cause: e,
message: `Multiple errors occurred writing blob objects (${rejected.length} failures)`,
name: 'MultipleGitError',
data: rejected.map((err) => err.message),
cause: rejected[0],
actions: [
`One potential reason you're getting this error is that the number of files that source tracking is batching exceeds your user-specific file limits. Increase your hard file limit in the same session by executing 'ulimit -Hn ${MAX_FILE_ADD}'. Or set the 'SFDX_SOURCE_TRACKING_BATCH_SIZE' environment variable to a value lower than the output of 'ulimit -Hn'.\nNote: Don't set this environment variable too close to the upper limit or your system will still hit it. If you continue to get the error, lower the value of the environment variable even more.`,
],
});
}
redirectToCliRepoError(e);
if (rejected.length === 1) {
redirectToCliRepoError(rejected[0]);
}

insertions.push(
...settled
.filter(
(r): r is PromiseFulfilledResult<{ filepath: string; stats: fs.Stats; oid: string }> =>
r.status === 'fulfilled'
)
.map((r) => r.value)
);
}
}
}

if (deletedFiles.length) {
// Using a cache here speeds up the performance by ~24.4%
let cache = {};
const deletions = deletedFiles.length
? [...new Set(IS_WINDOWS ? deletedFiles.map(normalize).map(ensurePosix) : deletedFiles)]
: [];

// Phase 2: Single index update — one GitIndexManager.acquire() call reads the index,
// applies all inserts and deletes in memory, and writes it back once when the callback resolves.
if (insertions.length || deletions.length) {
const isoGitFS = new IsoGitFS(fs);
await GitIndexManager.acquire({ fs: isoGitFS, gitdir: this.gitDir, cache: {} }, (index) => {
for (const { filepath, stats, oid } of insertions) {
index.insert({ filepath, stats, oid });
}
for (const filepath of deletions) {
index.delete({ filepath });
}
});
}

for (const filepath of [...new Set(IS_WINDOWS ? deletedFiles.map(normalize).map(ensurePosix) : deletedFiles)]) {
try {
// these need to be done sequentially because isogit manages file locking. Isogit remove does not support multiple files at once
// eslint-disable-next-line no-await-in-loop
await git.remove({ fs, dir: this.projectPath, gitdir: this.gitDir, filepath, cache });
} catch (e) {
redirectToCliRepoError(e);
try {
this.logger.trace('start: commitChanges git.commit');

const sha = await git.commit({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
message,
author: { name: 'sfdx source tracking' },
});
// status changed as a result of the commit. This prevents users from having to run getStatus(true) to avoid cache
if (needsUpdatedStatus) {
await this.getStatus(true);
}
this.logger.trace('done: commitChanges git.commit');
return sha;
} catch (e) {
redirectToCliRepoError(e);
}
// clear cache
cache = {};
});
}

/**
* Cross-process file lock on the git index using proper-lockfile (via @salesforce/core lockInit).
* The lockHeld flag provides reentrancy within the same process, needed because
* getStatus() -> detectMovedFiles() -> commitChanges() -> getStatus(true).
* Cross-process coordination is handled by proper-lockfile's mkdir-based lock.
*/
private async withGitLock<T>(operation: () => Promise<T>): Promise<T> {
if (this.lockHeld) {
return operation();
}

this.logger.trace('acquiring git index lock');
const { unlock } = await lockInit(this.indexLockPath);
this.lockHeld = true;
try {
this.logger.trace('start: commitChanges git.commit');

const sha = await git.commit({
fs,
dir: this.projectPath,
gitdir: this.gitDir,
message,
author: { name: 'sfdx source tracking' },
});
// status changed as a result of the commit. This prevents users from having to run getStatus(true) to avoid cache
if (needsUpdatedStatus) {
await this.getStatus(true);
return await operation();
} finally {
this.lockHeld = false;
try {
await unlock();
this.logger.trace('released git index lock');
} catch (e) {
// unlock can fail if gitDir was deleted (e.g., by delete()), which is expected
this.logger.trace('could not release git index lock (lock dir may have been removed)', e);
}
this.logger.trace('done: commitChanges git.commit');
return sha;
} catch (e) {
redirectToCliRepoError(e);
}
Comment thread
Screendead marked this conversation as resolved.
}

Expand Down
Loading