"Fossies" - the Fresh Open Source Software Archive

Member "Atom/resources/app/apm/node_modules/npm/lib/fetch-package-metadata.js" (11 Apr 2017, 11284 Bytes) of package /windows/misc/atom-windows.zip:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Javascript source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file.

    1 'use strict'
    2 var fs = require('graceful-fs')
    3 var path = require('path')
    4 var zlib = require('zlib')
    5 
    6 var log = require('npmlog')
    7 var realizePackageSpecifier = require('realize-package-specifier')
    8 var tar = require('tar')
    9 var once = require('once')
   10 var semver = require('semver')
   11 var readPackageTree = require('read-package-tree')
   12 var readPackageJson = require('read-package-json')
   13 var iferr = require('iferr')
   14 var rimraf = require('rimraf')
   15 var clone = require('lodash.clonedeep')
   16 var validate = require('aproba')
   17 var unpipe = require('unpipe')
   18 var normalizePackageData = require('normalize-package-data')
   19 
   20 var npm = require('./npm.js')
   21 var mapToRegistry = require('./utils/map-to-registry.js')
   22 var cache = require('./cache.js')
   23 var cachedPackageRoot = require('./cache/cached-package-root.js')
   24 var tempFilename = require('./utils/temp-filename.js')
   25 var getCacheStat = require('./cache/get-stat.js')
   26 var unpack = require('./utils/tar.js').unpack
   27 var pulseTillDone = require('./utils/pulse-till-done.js')
   28 var parseJSON = require('./utils/parse-json.js')
   29 
   30 function andLogAndFinish (spec, tracker, done) {
   31   validate('SF', [spec, done])
   32   return function (er, pkg) {
   33     if (er) {
   34       log.silly('fetchPackageMetaData', 'error for ' + spec, er)
   35       if (tracker) tracker.finish()
   36     }
   37     return done(er, pkg)
   38   }
   39 }
   40 
   41 module.exports = function fetchPackageMetadata (spec, where, tracker, done) {
   42   if (!done) {
   43     done = tracker || where
   44     tracker = null
   45     if (done === where) where = null
   46   }
   47   if (typeof spec === 'object') {
   48     var dep = spec
   49     spec = dep.raw
   50   }
   51   var logAndFinish = andLogAndFinish(spec, tracker, done)
   52   if (!dep) {
   53     log.silly('fetchPackageMetaData', spec)
   54     return realizePackageSpecifier(spec, where, iferr(logAndFinish, function (dep) {
   55       fetchPackageMetadata(dep, where, tracker, done)
   56     }))
   57   }
   58   if (dep.type === 'version' || dep.type === 'range' || dep.type === 'tag') {
   59     fetchNamedPackageData(dep, addRequestedAndFinish)
   60   } else if (dep.type === 'directory') {
   61     fetchDirectoryPackageData(dep, where, addRequestedAndFinish)
   62   } else {
   63     fetchOtherPackageData(spec, dep, where, addRequestedAndFinish)
   64   }
   65   function addRequestedAndFinish (er, pkg) {
   66     if (pkg) annotateMetadata(pkg, dep, spec, where)
   67     logAndFinish(er, pkg)
   68   }
   69 }
   70 
   71 var annotateMetadata = module.exports.annotateMetadata = function (pkg, requested, spec, where) {
   72   validate('OOSS', arguments)
   73   pkg._requested = requested
   74   pkg._spec = spec
   75   pkg._where = where
   76   if (!pkg._args) pkg._args = []
   77   pkg._args.push([requested, where])
   78   // non-npm registries can and will return unnormalized data, plus
   79   // even the npm registry may have package data normalized with older
   80   // normalization rules. This ensures we get package data in a consistent,
   81   // stable format.
   82   try {
   83     normalizePackageData(pkg)
   84   } catch (ex) {
   85     // don't care
   86   }
   87 }
   88 
   89 function fetchOtherPackageData (spec, dep, where, next) {
   90   validate('SOSF', arguments)
   91   log.silly('fetchOtherPackageData', spec)
   92   cache.add(spec, null, where, false, iferr(next, function (pkg) {
   93     var result = clone(pkg)
   94     result._inCache = true
   95     next(null, result)
   96   }))
   97 }
   98 
   99 function fetchDirectoryPackageData (dep, where, next) {
  100   validate('OSF', arguments)
  101   log.silly('fetchDirectoryPackageData', dep.name || dep.rawSpec)
  102   readPackageJson(path.join(dep.spec, 'package.json'), false, next)
  103 }
  104 
  105 var regCache = {}
  106 
  107 function fetchNamedPackageData (dep, next) {
  108   validate('OF', arguments)
  109   log.silly('fetchNamedPackageData', dep.name || dep.rawSpec)
  110   mapToRegistry(dep.name || dep.rawSpec, npm.config, iferr(next, function (url, auth) {
  111     if (regCache[url]) {
  112       pickVersionFromRegistryDocument(clone(regCache[url]))
  113     } else {
  114       npm.registry.get(url, {auth: auth}, pulseTillDone('fetchMetadata', iferr(next, pickVersionFromRegistryDocument)))
  115     }
  116     function returnAndAddMetadata (pkg) {
  117       pkg._from = dep.raw
  118       pkg._resolved = pkg.dist.tarball
  119       pkg._shasum = pkg.dist.shasum
  120 
  121       next(null, pkg)
  122     }
  123     function pickVersionFromRegistryDocument (pkg) {
  124       if (!regCache[url]) regCache[url] = pkg
  125       var versions = Object.keys(pkg.versions).sort(semver.rcompare)
  126 
  127       if (dep.type === 'tag') {
  128         var tagVersion = pkg['dist-tags'][dep.spec]
  129         if (pkg.versions[tagVersion]) return returnAndAddMetadata(pkg.versions[tagVersion])
  130       } else {
  131         var latestVersion = pkg['dist-tags'][npm.config.get('tag')] || versions[0]
  132 
  133         // Find the the most recent version less than or equal
  134         // to latestVersion that satisfies our spec
  135         for (var ii = 0; ii < versions.length; ++ii) {
  136           if (semver.gt(versions[ii], latestVersion)) continue
  137           if (semver.satisfies(versions[ii], dep.spec)) {
  138             return returnAndAddMetadata(pkg.versions[versions[ii]])
  139           }
  140         }
  141 
  142         // Failing that, try finding the most recent version that matches
  143         // our spec
  144         for (var jj = 0; jj < versions.length; ++jj) {
  145           if (semver.satisfies(versions[jj], dep.spec)) {
  146             return returnAndAddMetadata(pkg.versions[versions[jj]])
  147           }
  148         }
  149 
  150         // Failing THAT, if the range was '*' uses latestVersion
  151         if (dep.spec === '*') {
  152           return returnAndAddMetadata(pkg.versions[latestVersion])
  153         }
  154       }
  155 
  156       // We didn't manage to find a compatible version
  157       // If this package was requested from cache, force hitting the network
  158       if (pkg._cached) {
  159         log.silly('fetchNamedPackageData', 'No valid target from cache, forcing network')
  160         return npm.registry.get(url, {
  161           auth: auth,
  162           skipCache: true
  163         }, pulseTillDone('fetchMetadata', iferr(next, pickVersionFromRegistryDocument)))
  164       }
  165 
  166       // And failing that, we error out
  167       var targets = versions.length
  168                   ? 'Valid install targets:\n' + versions.join(', ') + '\n'
  169                   : 'No valid targets found.'
  170       var er = new Error('No compatible version found: ' +
  171                          dep.raw + '\n' + targets)
  172       return next(er)
  173     }
  174   }))
  175 }
  176 
  177 function retryWithCached (pkg, asserter, next) {
  178   if (!pkg._inCache) {
  179     cache.add(pkg._spec, null, pkg._where, false, iferr(next, function (newpkg) {
  180       Object.keys(newpkg).forEach(function (key) {
  181         if (key[0] !== '_') return
  182         pkg[key] = newpkg[key]
  183       })
  184       pkg._inCache = true
  185       return asserter(pkg, next)
  186     }))
  187   }
  188   return !pkg._inCache
  189 }
  190 
  191 module.exports.addShrinkwrap = function addShrinkwrap (pkg, next) {
  192   validate('OF', arguments)
  193   if (pkg._shrinkwrap !== undefined) return next(null, pkg)
  194   if (retryWithCached(pkg, addShrinkwrap, next)) return
  195   pkg._shrinkwrap = null
  196   // FIXME: cache the shrinkwrap directly
  197   var pkgname = pkg.name
  198   var ver = pkg.version
  199   var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
  200   untarStream(tarball, function (er, untar) {
  201     if (er) {
  202       if (er.code === 'ENOTTARBALL') {
  203         pkg._shrinkwrap = null
  204         return next()
  205       } else {
  206         return next(er)
  207       }
  208     }
  209     if (er) return next(er)
  210     var foundShrinkwrap = false
  211     untar.on('entry', function (entry) {
  212       if (!/^(?:[^\/]+[\/])npm-shrinkwrap.json$/.test(entry.path)) return
  213       log.silly('addShrinkwrap', 'Found shrinkwrap in ' + pkgname + ' ' + entry.path)
  214       foundShrinkwrap = true
  215       var shrinkwrap = ''
  216       entry.on('data', function (chunk) {
  217         shrinkwrap += chunk
  218       })
  219       entry.on('end', function () {
  220         untar.close()
  221         log.silly('addShrinkwrap', 'Completed reading shrinkwrap in ' + pkgname)
  222         try {
  223           pkg._shrinkwrap = parseJSON(shrinkwrap)
  224         } catch (ex) {
  225           var er = new Error('Error parsing ' + pkgname + '@' + ver + "'s npm-shrinkwrap.json: " + ex.message)
  226           er.type = 'ESHRINKWRAP'
  227           return next(er)
  228         }
  229         next(null, pkg)
  230       })
  231       entry.resume()
  232     })
  233     untar.on('end', function () {
  234       if (!foundShrinkwrap) {
  235         pkg._shrinkwrap = null
  236         next(null, pkg)
  237       }
  238     })
  239   })
  240 }
  241 
  242 module.exports.addBundled = function addBundled (pkg, next) {
  243   validate('OF', arguments)
  244   if (pkg._bundled !== undefined) return next(null, pkg)
  245   if (!pkg.bundleDependencies) return next(null, pkg)
  246   if (retryWithCached(pkg, addBundled, next)) return
  247   pkg._bundled = null
  248   var pkgname = pkg.name
  249   var ver = pkg.version
  250   var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
  251   var target = tempFilename('unpack')
  252   getCacheStat(iferr(next, function (cs) {
  253     log.verbose('addBundled', 'extract', tarball)
  254     unpack(tarball, target, null, null, cs.uid, cs.gid, iferr(next, function () {
  255       log.silly('addBundled', 'read tarball')
  256       readPackageTree(target, function (er, tree) {
  257         log.silly('cleanup', 'remove extracted module')
  258         rimraf(target, function () {
  259           if (tree) {
  260             pkg._bundled = tree.children
  261           }
  262           next(null, pkg)
  263         })
  264       })
  265     }))
  266   }))
  267 }
  268 
  269 // FIXME: hasGzipHeader / hasTarHeader / untarStream duplicate a lot
  270 // of code from lib/utils/tar.js– these should be brought together.
  271 
  272 function hasGzipHeader (c) {
  273   return c[0] === 0x1F && c[1] === 0x8B && c[2] === 0x08
  274 }
  275 
  276 function hasTarHeader (c) {
  277   return c[257] === 0x75 && // tar archives have 7573746172 at position
  278          c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
  279          c[259] === 0x74 &&
  280          c[260] === 0x61 &&
  281          c[261] === 0x72 &&
  282 
  283        ((c[262] === 0x00 &&
  284          c[263] === 0x30 &&
  285          c[264] === 0x30) ||
  286 
  287         (c[262] === 0x20 &&
  288          c[263] === 0x20 &&
  289          c[264] === 0x00))
  290 }
  291 
  292 function untarStream (tarball, cb) {
  293   validate('SF', arguments)
  294   cb = once(cb)
  295 
  296   var stream
  297   var file = stream = fs.createReadStream(tarball)
  298   var tounpipe = [file]
  299   file.on('error', function (er) {
  300     er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
  301     er.code = 'EREADFILE'
  302     cb(er)
  303   })
  304   file.on('data', function OD (c) {
  305     if (hasGzipHeader(c)) {
  306       doGunzip()
  307     } else if (hasTarHeader(c)) {
  308       doUntar()
  309     } else {
  310       if (file.close) file.close()
  311       if (file.destroy) file.destroy()
  312       var er = new Error('Non-gzip/tarball ' + tarball)
  313       er.code = 'ENOTTARBALL'
  314       return cb(er)
  315     }
  316     file.removeListener('data', OD)
  317     file.emit('data', c)
  318     cb(null, stream)
  319   })
  320 
  321   function doGunzip () {
  322     var gunzip = stream.pipe(zlib.createGunzip())
  323     gunzip.on('error', function (er) {
  324       er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
  325       er.code = 'EGUNZIP'
  326       cb(er)
  327     })
  328     tounpipe.push(gunzip)
  329     stream = gunzip
  330     doUntar()
  331   }
  332 
  333   function doUntar () {
  334     var untar = stream.pipe(tar.Parse())
  335     untar.on('error', function (er) {
  336       er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
  337       er.code = 'EUNTAR'
  338       cb(er)
  339     })
  340     tounpipe.push(untar)
  341     stream = untar
  342     addClose()
  343   }
  344 
  345   function addClose () {
  346     stream.close = function () {
  347       tounpipe.forEach(function (stream) {
  348         unpipe(stream)
  349       })
  350 
  351       if (file.close) file.close()
  352       if (file.destroy) file.destroy()
  353     }
  354   }
  355 }