yarn2nix: format code

This commit is contained in:
Mel Bourgeois 2022-10-26 19:30:06 -05:00
parent 0bb8e94900
commit 965665f788
8 changed files with 179 additions and 159 deletions

View File

@ -1,14 +1,14 @@
#!/usr/bin/env node
const fs = require('fs')
const lockfile = require('@yarnpkg/lockfile')
const { docopt } = require('docopt')
const deepEqual = require('deep-equal')
const R = require('ramda')
const fs = require("fs");
const lockfile = require("@yarnpkg/lockfile");
const { docopt } = require("docopt");
const deepEqual = require("deep-equal");
const R = require("ramda");
const fixPkgAddMissingSha1 = require('../lib/fixPkgAddMissingSha1')
const mapObjIndexedReturnArray = require('../lib/mapObjIndexedReturnArray')
const generateNix = require('../lib/generateNix')
const fixPkgAddMissingSha1 = require("../lib/fixPkgAddMissingSha1");
const mapObjIndexedReturnArray = require("../lib/mapObjIndexedReturnArray");
const generateNix = require("../lib/generateNix");
const USAGE = `
Usage: yarn2nix [options]
@ -19,11 +19,11 @@ Options:
--no-patch Don't patch the lockfile if hashes are missing
--lockfile=FILE Specify path to the lockfile [default: ./yarn.lock].
--builtin-fetchgit Use builtin fetchGit for git dependencies to support on-the-fly generation of yarn.nix without an internet connection
`
`;
const options = docopt(USAGE)
const options = docopt(USAGE);
const data = fs.readFileSync(options['--lockfile'], 'utf8')
const data = fs.readFileSync(options["--lockfile"], "utf8");
// json example:
@ -45,10 +45,10 @@ const data = fs.readFileSync(options['--lockfile'], 'utf8')
// }
// }
const json = lockfile.parse(data)
const json = lockfile.parse(data);
if (json.type !== 'success') {
throw new Error('yarn.lock parse error')
if (json.type !== "success") {
throw new Error("yarn.lock parse error");
}
// Check for missing hashes in the yarn.lock and patch if necessary
@ -56,35 +56,35 @@ if (json.type !== 'success') {
let pkgs = R.pipe(
mapObjIndexedReturnArray((value, key) => ({
...value,
nameWithVersion: key,
nameWithVersion: key
})),
R.uniqBy(R.prop('resolved')),
)(json.object)
R.uniqBy(R.prop("resolved"))
)(json.object);
;(async () => {
if (!options['--no-patch']) {
pkgs = await Promise.all(R.map(fixPkgAddMissingSha1, pkgs))
(async () => {
if (!options["--no-patch"]) {
pkgs = await Promise.all(R.map(fixPkgAddMissingSha1, pkgs));
}
const origJson = lockfile.parse(data)
const origJson = lockfile.parse(data);
if (!deepEqual(origJson, json)) {
console.error('found changes in the lockfile', options['--lockfile'])
console.error("found changes in the lockfile", options["--lockfile"]);
if (options['--no-patch']) {
console.error('...aborting')
process.exit(1)
if (options["--no-patch"]) {
console.error("...aborting");
process.exit(1);
}
fs.writeFileSync(options['--lockfile'], lockfile.stringify(json.object))
fs.writeFileSync(options["--lockfile"], lockfile.stringify(json.object));
}
if (!options['--no-nix']) {
if (!options["--no-nix"]) {
// print to stdout
console.log(generateNix(pkgs, options['--builtin-fetchgit']))
console.log(generateNix(pkgs, options["--builtin-fetchgit"]));
}
})().catch(error => {
console.error(error)
console.error(error);
process.exit(1)
})
process.exit(1);
});

View File

@ -4,50 +4,50 @@
* node fixup_bin.js <bin_dir> <modules_dir> [<bin_pkg_1>, <bin_pkg_2> ... ]
*/
const fs = require('fs')
const path = require('path')
const fs = require("fs");
const path = require("path");
const derivationBinPath = process.argv[2]
const nodeModules = process.argv[3]
const packagesToPublishBin = process.argv.slice(4)
const derivationBinPath = process.argv[2];
const nodeModules = process.argv[3];
const packagesToPublishBin = process.argv.slice(4);
function processPackage(name) {
console.log('fixup_bin: Processing ', name)
console.log("fixup_bin: Processing ", name);
const packagePath = `${nodeModules}/${name}`
const packageJsonPath = `${packagePath}/package.json`
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath))
const packagePath = `${nodeModules}/${name}`;
const packageJsonPath = `${packagePath}/package.json`;
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath));
if (!packageJson.bin) {
console.log('fixup_bin: No binaries provided')
return
console.log("fixup_bin: No binaries provided");
return;
}
// There are two alternative syntaxes for `bin`
// a) just a plain string, in which case the name of the package is the name of the binary.
// b) an object, where key is the name of the eventual binary, and the value the path to that binary.
if (typeof packageJson.bin === 'string') {
const binName = packageJson.bin
packageJson.bin = {}
packageJson.bin[packageJson.name] = binName
if (typeof packageJson.bin === "string") {
const binName = packageJson.bin;
packageJson.bin = {};
packageJson.bin[packageJson.name] = binName;
}
// eslint-disable-next-line no-restricted-syntax, guard-for-in
for (const binName in packageJson.bin) {
const binPath = packageJson.bin[binName]
const normalizedBinName = binName.replace('@', '').replace('/', '-')
const binPath = packageJson.bin[binName];
const normalizedBinName = binName.replace("@", "").replace("/", "-");
const targetPath = path.normalize(`${packagePath}/${binPath}`)
const createdPath = `${derivationBinPath}/${normalizedBinName}`
const targetPath = path.normalize(`${packagePath}/${binPath}`);
const createdPath = `${derivationBinPath}/${normalizedBinName}`;
console.log(
`fixup_bin: creating link ${createdPath} that points to ${targetPath}`,
)
`fixup_bin: creating link ${createdPath} that points to ${targetPath}`
);
fs.symlinkSync(targetPath, createdPath)
fs.symlinkSync(targetPath, createdPath);
}
}
packagesToPublishBin.forEach(pkg => {
processPackage(pkg)
})
processPackage(pkg);
});

View File

@ -4,46 +4,46 @@
* node fixup_yarn_lock.js yarn.lock
*/
const fs = require('fs')
const readline = require('readline')
const fs = require("fs");
const readline = require("readline");
const urlToName = require('../lib/urlToName')
const urlToName = require("../lib/urlToName");
const yarnLockPath = process.argv[2]
const yarnLockPath = process.argv[2];
const readFile = readline.createInterface({
input: fs.createReadStream(yarnLockPath, { encoding: 'utf8' }),
input: fs.createReadStream(yarnLockPath, { encoding: "utf8" }),
// Note: we use the crlfDelay option to recognize all instances of CR LF
// ('\r\n') in input.txt as a single line break.
crlfDelay: Infinity,
terminal: false, // input and output should be treated like a TTY
})
terminal: false // input and output should be treated like a TTY
});
const result = []
const result = [];
readFile
.on('line', line => {
const arr = line.match(/^ {2}resolved "([^#]+)(#[^"]+)?"$/)
.on("line", line => {
const arr = line.match(/^ {2}resolved "([^#]+)(#[^"]+)?"$/);
if (arr !== null) {
const [_, url, shaOrRev] = arr
const [_, url, shaOrRev] = arr;
const fileName = urlToName(url)
const fileName = urlToName(url);
result.push(` resolved "${fileName}${shaOrRev ?? ''}"`)
result.push(` resolved "${fileName}${shaOrRev ?? ""}"`);
} else {
result.push(line)
result.push(line);
}
})
.on('close', () => {
fs.writeFile(yarnLockPath, result.join('\n'), 'utf8', err => {
.on("close", () => {
fs.writeFile(yarnLockPath, result.join("\n"), "utf8", err => {
if (err) {
console.error(
'fixup_yarn_lock: fatal error when trying to write to yarn.lock',
err,
)
"fixup_yarn_lock: fatal error when trying to write to yarn.lock",
err
);
}
})
})
});
});

View File

@ -1,5 +1,5 @@
const https = require('https')
const crypto = require('crypto')
const https = require("https");
const crypto = require("crypto");
// TODO:
// make test case where getSha1 function is used, i.e. the case when resolved is without sha1?
@ -8,29 +8,29 @@ const crypto = require('crypto')
function getSha1(url) {
return new Promise((resolve, reject) => {
https.get(url, res => {
const { statusCode } = res
const hash = crypto.createHash('sha1')
const { statusCode } = res;
const hash = crypto.createHash("sha1");
if (statusCode !== 200) {
const err = new Error(`Request Failed.\nStatus Code: ${statusCode}`)
const err = new Error(`Request Failed.\nStatus Code: ${statusCode}`);
// consume response data to free up memory
res.resume()
res.resume();
reject(err)
reject(err);
}
res.on('data', chunk => {
hash.update(chunk)
})
res.on("data", chunk => {
hash.update(chunk);
});
res.on('end', () => {
resolve(hash.digest('hex'))
})
res.on("end", () => {
resolve(hash.digest("hex"));
});
res.on('error', reject)
})
})
res.on("error", reject);
});
});
}
// Object -> Object
@ -39,28 +39,26 @@ async function fixPkgAddMissingSha1(pkg) {
if (!pkg.resolved) {
console.error(
`yarn2nix: can't find "resolved" field for package ${
pkg.nameWithVersion
}, you probably required it using "file:...", this feature is not supported, ignoring`,
)
return pkg
`yarn2nix: can't find "resolved" field for package ${pkg.nameWithVersion}, you probably required it using "file:...", this feature is not supported, ignoring`
);
return pkg;
}
const [url, sha1] = pkg.resolved.split('#', 2)
const [url, sha1] = pkg.resolved.split("#", 2);
if (sha1 || url.startsWith('https://codeload.github.com')) {
return pkg
if (sha1 || url.startsWith("https://codeload.github.com")) {
return pkg;
}
// if there is no sha1 in resolved url
// (this could happen if yarn.lock was generated by older version of yarn)
// - request it from registry by https and add it to pkg
const newSha1 = await getSha1(url)
const newSha1 = await getSha1(url);
return {
...pkg,
resolved: `${url}#${newSha1}`,
}
resolved: `${url}#${newSha1}`
};
}
module.exports = fixPkgAddMissingSha1
module.exports = fixPkgAddMissingSha1;

View File

@ -1,8 +1,8 @@
const R = require('ramda')
const R = require("ramda");
const ssri = require("ssri");
const urlToName = require('./urlToName')
const { execFileSync } = require('child_process')
const urlToName = require("./urlToName");
const { execFileSync } = require("child_process");
// fetchgit transforms
//
@ -34,18 +34,24 @@ const { execFileSync } = require('child_process')
function prefetchgit(url, rev) {
return JSON.parse(
execFileSync("nix-prefetch-git", ["--rev", rev, url, "--fetch-submodules"], {
stdio: [ "ignore", "pipe", "ignore" ],
timeout: 60000,
})
).sha256
execFileSync(
"nix-prefetch-git",
["--rev", rev, url, "--fetch-submodules"],
{
stdio: ["ignore", "pipe", "ignore"],
timeout: 60000
}
)
).sha256;
}
function fetchgit(fileName, url, rev, branch, builtinFetchGit) {
return ` {
name = "${fileName}";
path =
let${builtinFetchGit ? `
let${
builtinFetchGit
? `
repo = builtins.fetchGit ({
url = "${url}";
ref = "${branch}";
@ -54,54 +60,68 @@ function fetchgit(fileName, url, rev, branch, builtinFetchGit) {
# workaround for https://github.com/NixOS/nix/issues/5128
allRefs = true;
} else {}));
` : `
`
: `
repo = fetchgit {
url = "${url}";
rev = "${rev}";
sha256 = "${prefetchgit(url, rev)}";
};
`}in
`
}in
runCommand "${fileName}" { buildInputs = [gnutar]; } ''
# Set u+w because tar-fs can't unpack archives with read-only dirs
# https://github.com/mafintosh/tar-fs/issues/79
tar cf $out --mode u+w -C \${repo} .
'';
}`
}`;
}
function fetchLockedDep(builtinFetchGit) {
return function (pkg) {
const { integrity, nameWithVersion, resolved } = pkg
return function(pkg) {
const { integrity, nameWithVersion, resolved } = pkg;
if (!resolved) {
console.error(
`yarn2nix: can't find "resolved" field for package ${nameWithVersion}, you probably required it using "file:...", this feature is not supported, ignoring`,
)
return ''
`yarn2nix: can't find "resolved" field for package ${nameWithVersion}, you probably required it using "file:...", this feature is not supported, ignoring`
);
return "";
}
const [url, sha1OrRev] = resolved.split('#')
const [url, sha1OrRev] = resolved.split("#");
const fileName = urlToName(url)
const fileName = urlToName(url);
if (resolved.startsWith('https://codeload.github.com/')) {
const s = resolved.split('/')
const githubUrl = `https://github.com/${s[3]}/${s[4]}.git`
const githubRev = s[6]
if (resolved.startsWith("https://codeload.github.com/")) {
const s = resolved.split("/");
const githubUrl = `https://github.com/${s[3]}/${s[4]}.git`;
const githubRev = s[6];
const [_, branch] = nameWithVersion.split('#')
const [_, branch] = nameWithVersion.split("#");
return fetchgit(fileName, githubUrl, githubRev, branch || 'master', builtinFetchGit)
return fetchgit(
fileName,
githubUrl,
githubRev,
branch || "master",
builtinFetchGit
);
}
if (url.startsWith('git+') || url.startsWith("git:")) {
const rev = sha1OrRev
if (url.startsWith("git+") || url.startsWith("git:")) {
const rev = sha1OrRev;
const [_, branch] = nameWithVersion.split('#')
const [_, branch] = nameWithVersion.split("#");
const urlForGit = url.replace(/^git\+/, '')
const urlForGit = url.replace(/^git\+/, "");
return fetchgit(fileName, urlForGit, rev, branch || 'master', builtinFetchGit)
return fetchgit(
fileName,
urlForGit,
rev,
branch || "master",
builtinFetchGit
);
}
// Pull out integrity hash, providing a default and using the "best" algorithm if there are multiple.
@ -120,26 +140,29 @@ function fetchLockedDep(builtinFetchGit) {
url = "${url}";
${algo} = "${hash}";
};
}`
}
}`;
};
}
const HEAD = `
{ fetchurl, fetchgit, linkFarm, runCommand, gnutar }: rec {
offline_cache = linkFarm "offline" packages;
packages = [
`.trim()
`.trim();
// Object -> String
function generateNix(pkgs, builtinFetchGit) {
const nameWithVersionAndPackageNix = R.map(fetchLockedDep(builtinFetchGit), pkgs)
const nameWithVersionAndPackageNix = R.map(
fetchLockedDep(builtinFetchGit),
pkgs
);
const packagesDefinition = R.join(
'\n',
R.values(nameWithVersionAndPackageNix),
)
"\n",
R.values(nameWithVersionAndPackageNix)
);
return R.join('\n', [HEAD, packagesDefinition, ' ];', '}'])
return R.join("\n", [HEAD, packagesDefinition, " ];", "}"]);
}
module.exports = generateNix
module.exports = generateNix;

View File

@ -1,6 +1,6 @@
const _curry2 = require('ramda/src/internal/_curry2')
const _map = require('ramda/src/internal/_map')
const keys = require('ramda/src/keys')
const _curry2 = require("ramda/src/internal/_curry2");
const _map = require("ramda/src/internal/_map");
const keys = require("ramda/src/keys");
// mapObjIndexed: ((v, k, {k: v}) → v') → {k: v} → {k: v'}
// mapObjIndexedReturnArray: ((v, k, {k: v}) → v') → {k: v} → [v']
@ -15,7 +15,7 @@ const keys = require('ramda/src/keys')
*/
const mapObjIndexedReturnArray = _curry2((fn, obj) =>
_map(key => fn(obj[key], key, obj), keys(obj)),
)
_map(key => fn(obj[key], key, obj), keys(obj))
);
module.exports = mapObjIndexedReturnArray
module.exports = mapObjIndexedReturnArray;

View File

@ -1,4 +1,4 @@
const path = require('path')
const path = require("path");
// String -> String
@ -10,20 +10,19 @@ const path = require('path')
// - https://codeload.github.com/Gargron/emoji-mart/tar.gz/934f314fd8322276765066e8a2a6be5bac61b1cf
function urlToName(url) {
// Yarn generates `codeload.github.com` tarball URLs, where the final
// path component (file name) is the git hash. See #111.
// See also https://github.com/yarnpkg/yarn/blob/989a7406/src/resolvers/exotics/github-resolver.js#L24-L26
let isCodeloadGitTarballUrl =
url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
url.startsWith("https://codeload.github.com/") && url.includes("/tar.gz/");
if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
return path.basename(url)
if (url.startsWith("git+") || isCodeloadGitTarballUrl) {
return path.basename(url);
}
return url
.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
.replace(/https:\/\/(.)*(.com)\//g, "") // prevents having long directory names
.replace(/[@/%:-]/g, "_"); // replace @ and : and - and % characters with underscore
}
module.exports = urlToName
module.exports = urlToName;

View File

@ -14,8 +14,8 @@
"bin": {
"yarn2nix": "bin/yarn2nix.js"
},
"engines" : {
"node" : ">=8.0.0"
"engines": {
"node": ">=8.0.0"
},
"dependencies": {
"@yarnpkg/lockfile": "^1.1.0",