When building sites, I typically keep the contents of the site in a separate GitHub repository.
Why?
This approach is nice but comes with some challenges. Mainly keeping the remotes in sync.
So I wrote a lil script to help.
// Sync remote repos
const fs = require('fs')
const path = require('path')
const childProcess = require('child_process')
/**
* Asynchronously reads folders from a specified directory and checks if they are git repositories.
* @param {string} dirName - The directory path to read folders from.
* @returns {Promise<string[]>} An array of paths to git repositories found in the specified directory.
*/
async function syncRemoteGitContent(dirName) {
try {
const folders = await fs.promises.readdir(dirName)
const repoFolders = folders
/* Get all directories */
.map((folder) => path.resolve(dirName, folder))
/* Verify if folder is a git repo */
.filter((folder) => {
try {
fs.statSync(path.resolve(folder, '.git'))
return true
} catch (err) {
return false
}
})
console.log('Git Sync on folders', repoFolders)
return syncRepos(repoFolders)
} catch (error) {
throw new Error(error)
}
}
/**
* Asynchronously updates git repositories located at specified paths.
* @param {string[]} repoFolders - An array of paths to git repositories.
* @returns {Promise<string>} A promise that resolves with a message indicating successful repository update.
*/
async function syncRepos(repoFolders) {
const start = async () => {
let state = 0
await asyncForEach(repoFolders, async (repoPath) => {
await updateRepo(repoPath)
})
return state
}
try {
await start()
} catch (error) {
throw new Error(error)
}
}
/**
* Asynchronously updates a git repository.
* @param {string} filePath - The path to the git repository.
* @returns {Promise<string>} A promise that resolves with a message indicating successful repository update.
*/
function updateRepo(filePath) {
return new Promise((resolve, reject) => {
const name = path.basename(path.dirname(filePath)) + '/' + path.basename(filePath)
console.log(`Running git pull on ${filePath}`)
console.log('───────────────────────')
const child = childProcess.exec('git rev-parse --abbrev-ref HEAD && git pull', { cwd: filePath }, (error) => {
if (error) {
console.log(`Git Sync Error ${filePath}`)
console.log(error)
}
})
child.stdout.on('data', (data) => {
console.log(`[${name}]:`, data.trim())
return resolve(`${filePath} repo updated`)
})
child.stderr.on('data', (data) => {
if (data.indexOf(`disabling multiplexing`)) {
return
}
console.log(`${name}: update failed`)
return reject(data)
})
child.on('close', (_code) => {
console.log('--------------------------')
})
})
}
/**
* Asynchronously performs a function for each element in the array.
* @param {Array} array - The array to iterate over.
* @param {Function} callback - The function to call for each element.
* @returns {Promise<void>} A promise that resolves when all iterations are complete.
*/
async function asyncForEach(array, callback) {
for (let index = 0; index < array.length; index++) {
await callback(array[index], index, array)
}
}
module.exports = {
syncRemoteGitContent
}
Overview of the Code: The script leverages the fs
, path
, and child_process
modules to interact with the filesystem and execute Git commands.
SOURCES_DIR
) and identifies Git repositories within them. It returns an array of paths to these repositories.asyncForEach
to iterate over the array of repository paths and calls the updateRepo
function for each repository.git pull
command within the repository directory. It uses child_process.exec
to run the command asynchronously and handles both standard output and error streams.To use the script, just pass in the directory where your remotes live.
// scripts/_sync.js
const { syncRemoteGitContent } = require('./utils/git-pull')
syncRemoteGitContent(path.resolve(__dirname, 'sources'))
Then include this in your package.json to run it
{
"scripts": {
"sync": "node scripts/_sync.js"
}
}
Then run it with
npm run sync
Enjoy