2021-10-17 09:34:42 +00:00
#!/usr/bin/env node
'use strict'
const fs = require ( 'fs' )
const crypto = require ( 'crypto' )
const process = require ( 'process' )
const https = require ( 'https' )
const child _process = require ( 'child_process' )
const path = require ( 'path' )
const lockfile = require ( './yarnpkg-lockfile.js' )
const { promisify } = require ( 'util' )
2021-12-30 13:39:12 +00:00
const url = require ( 'url' )
2023-07-15 17:15:38 +00:00
const { urlToName } = require ( './common.js' )
2021-10-17 09:34:42 +00:00
const execFile = promisify ( child _process . execFile )
const exec = async ( ... args ) => {
const res = await execFile ( ... args )
if ( res . error ) throw new Error ( res . stderr )
return res
}
2022-06-16 17:23:12 +00:00
const downloadFileHttps = ( fileName , url , expectedHash , hashType = 'sha1' ) => {
2021-10-17 09:34:42 +00:00
return new Promise ( ( resolve , reject ) => {
2023-08-04 22:07:22 +00:00
const get = ( url , redirects = 0 ) => https . get ( url , ( res ) => {
if ( redirects > 10 ) {
reject ( 'Too many redirects!' ) ;
return ;
}
if ( res . statusCode === 301 || res . statusCode === 302 ) {
return get ( res . headers . location , redirects + 1 )
}
2021-10-17 09:34:42 +00:00
const file = fs . createWriteStream ( fileName )
2022-06-16 17:23:12 +00:00
const hash = crypto . createHash ( hashType )
2021-10-17 09:34:42 +00:00
res . pipe ( file )
res . pipe ( hash ) . setEncoding ( 'hex' )
res . on ( 'end' , ( ) => {
file . close ( )
const h = hash . read ( )
2023-10-09 19:29:22 +00:00
if ( expectedHash === undefined ) {
console . log ( ` Warning: lockfile url ${ url } doesn't end in "#<hash>" to validate against. Downloaded file had hash ${ h } . ` ) ;
} else if ( h != expectedHash ) return reject ( new Error ( ` hash mismatch, expected ${ expectedHash } , got ${ h } ` ) )
2021-10-17 09:34:42 +00:00
resolve ( )
} )
res . on ( 'error' , e => reject ( e ) )
} )
2023-08-04 22:07:22 +00:00
get ( url )
2021-10-17 09:34:42 +00:00
} )
}
const downloadGit = async ( fileName , url , rev ) => {
await exec ( 'nix-prefetch-git' , [
'--out' , fileName + '.tmp' ,
'--url' , url ,
'--rev' , rev ,
'--builder'
] )
await exec ( 'tar' , [
// hopefully make it reproducible across runs and systems
'--owner=0' , '--group=0' , '--numeric-owner' , '--format=gnu' , '--sort=name' , '--mtime=@1' ,
// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
'--mode' , 'u+w' ,
'-C' , fileName + '.tmp' ,
'-cf' , fileName , '.'
] )
await exec ( 'rm' , [ '-rf' , fileName + '.tmp' , ] )
}
2021-12-30 13:39:12 +00:00
const isGitUrl = pattern => {
// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
const GIT _HOSTS = [ 'github.com' , 'gitlab.com' , 'bitbucket.com' , 'bitbucket.org' ]
const GIT _PATTERN _MATCHERS = [ /^git:/ , /^git\+.+:/ , /^ssh:/ , /^https?:.+\.git$/ , /^https?:.+\.git#.+/ ]
for ( const matcher of GIT _PATTERN _MATCHERS ) if ( matcher . test ( pattern ) ) return true
const { hostname , path } = url . parse ( pattern )
if ( hostname && path && GIT _HOSTS . indexOf ( hostname ) >= 0
// only if dependency is pointing to a git repo,
// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
&& path . split ( '/' ) . filter ( p => ! ! p ) . length === 2
) return true
return false
}
2021-10-17 09:34:42 +00:00
const downloadPkg = ( pkg , verbose ) => {
2024-01-02 11:29:13 +00:00
const fileMarker = '@file:'
const split = pkg . key . split ( fileMarker )
if ( split . length == 2 ) {
console . info ( ` ignoring lockfile entry " ${ split [ 0 ] } " which points at path " ${ split [ 1 ] } " ` )
2023-08-04 22:07:22 +00:00
return
2024-01-02 11:29:13 +00:00
} else if ( split . length > 2 ) {
throw new Error ( ` The lockfile entry key " ${ pkg . key } " contains " ${ fileMarker } " more than once. Processing is not implemented. ` )
}
if ( pkg . resolved === undefined ) {
throw new Error ( ` The lockfile entry with key " ${ pkg . key } " cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid. ` )
2023-08-04 22:07:22 +00:00
}
2021-10-17 09:34:42 +00:00
const [ url , hash ] = pkg . resolved . split ( '#' )
if ( verbose ) console . log ( 'downloading ' + url )
2021-12-06 16:07:01 +00:00
const fileName = urlToName ( url )
2021-10-17 09:34:42 +00:00
if ( url . startsWith ( 'https://codeload.github.com/' ) && url . includes ( '/tar.gz/' ) ) {
const s = url . split ( '/' )
2023-08-04 22:07:22 +00:00
return downloadGit ( fileName , ` https://github.com/ ${ s [ 3 ] } / ${ s [ 4 ] } .git ` , s [ s . length - 1 ] )
} else if ( url . startsWith ( 'https://github.com/' ) && url . endsWith ( '.tar.gz' ) ) {
const s = url . split ( '/' )
return downloadGit ( fileName , ` https://github.com/ ${ s [ 3 ] } / ${ s [ 4 ] } .git ` , s [ s . length - 1 ] . replace ( /.tar.gz$/ , '' ) )
2021-12-30 13:39:12 +00:00
} else if ( isGitUrl ( url ) ) {
return downloadGit ( fileName , url . replace ( /^git\+/ , '' ) , hash )
2021-10-17 09:34:42 +00:00
} else if ( url . startsWith ( 'https://' ) ) {
2022-06-16 17:23:12 +00:00
if ( typeof pkg . integrity === 'string' || pkg . integrity instanceof String ) {
const [ type , checksum ] = pkg . integrity . split ( '-' )
return downloadFileHttps ( fileName , url , Buffer . from ( checksum , 'base64' ) . toString ( 'hex' ) , type )
}
2021-10-17 09:34:42 +00:00
return downloadFileHttps ( fileName , url , hash )
2021-12-06 16:07:01 +00:00
} else if ( url . startsWith ( 'file:' ) ) {
console . warn ( ` ignoring unsupported file:path url " ${ url } " ` )
2021-10-17 09:34:42 +00:00
} else {
throw new Error ( 'don\'t know how to download "' + url + '"' )
}
}
const performParallel = tasks => {
const worker = async ( ) => {
while ( tasks . length > 0 ) await tasks . shift ( ) ( )
}
const workers = [ ]
for ( let i = 0 ; i < 4 ; i ++ ) {
workers . push ( worker ( ) )
}
return Promise . all ( workers )
}
const prefetchYarnDeps = async ( lockContents , verbose ) => {
const lockData = lockfile . parse ( lockContents )
2024-01-02 11:29:13 +00:00
await performParallel (
2021-10-17 09:34:42 +00:00
Object . entries ( lockData . object )
2024-01-02 11:29:13 +00:00
. map ( ( [ key , value ] ) => ( ) => downloadPkg ( { key , ... value } , verbose ) )
2021-10-17 09:34:42 +00:00
)
await fs . promises . writeFile ( 'yarn.lock' , lockContents )
if ( verbose ) console . log ( 'Done' )
}
const showUsage = async ( ) => {
process . stderr . write ( `
syntax : prefetch - yarn - deps [ path to yarn . lock ] [ options ]
Options :
- h -- help Show this help
- v -- verbose Verbose output
-- builder Only perform the download to current directory , then exit
` )
process . exit ( 1 )
}
const main = async ( ) => {
const args = process . argv . slice ( 2 )
let next , lockFile , verbose , isBuilder
while ( next = args . shift ( ) ) {
if ( next == '--builder' ) {
isBuilder = true
} else if ( next == '--verbose' || next == '-v' ) {
verbose = true
} else if ( next == '--help' || next == '-h' ) {
showUsage ( )
} else if ( ! lockFile ) {
lockFile = next
} else {
showUsage ( )
}
}
let lockContents
try {
lockContents = await fs . promises . readFile ( lockFile || 'yarn.lock' , 'utf-8' )
} catch {
showUsage ( )
}
if ( isBuilder ) {
await prefetchYarnDeps ( lockContents , verbose )
} else {
const { stdout : tmpDir } = await exec ( 'mktemp' , [ '-d' ] )
try {
process . chdir ( tmpDir . trim ( ) )
await prefetchYarnDeps ( lockContents , verbose )
const { stdout : hash } = await exec ( 'nix-hash' , [ '--type' , 'sha256' , '--base32' , tmpDir . trim ( ) ] )
console . log ( hash )
} finally {
await exec ( 'rm' , [ '-rf' , tmpDir . trim ( ) ] )
}
}
}
main ( )
. catch ( e => {
console . error ( e )
process . exit ( 1 )
} )