1 var mkdir = require("mkdirp")
2 , assert = require("assert")
3 , log = require("npmlog")
4 , path = require("path")
6 , retry = require("retry")
7 , writeStreamAtomic = require("fs-write-stream-atomic")
8 , PassThrough = require('readable-stream').PassThrough
9 , npm = require("../npm.js")
10 , inflight = require("inflight")
11 , addLocalTarball = require("./add-local-tarball.js")
12 , cacheFile = require("npm-cache-filename")
14 module.exports = addRemoteTarball
16 function addRemoteTarball (u, pkgData, shasum, auth, cb_) {
17 assert(typeof u === "string", "must have module URL")
18 assert(typeof cb_ === "function", "must have callback")
20 function cb (er, data) {
24 data._shasum = data._shasum || shasum
29 cb_ = inflight(u, cb_)
30 if (!cb_) return log.verbose("addRemoteTarball", u, "already in flight; waiting")
31 log.verbose("addRemoteTarball", u, "not in flight; adding")
33 // XXX Fetch direct to cache location, store tarballs under
34 // ${cache}/registry.npmjs.org/pkg/-/pkg-1.2.3.tgz
35 var tmp = cacheFile(npm.tmp, u)
37 function next (er, resp, shasum) {
39 addLocalTarball(tmp, pkgData, shasum, cb)
42 log.verbose("addRemoteTarball", [u, shasum])
43 mkdir(path.dirname(tmp), function (er) {
45 addRemoteTarball_(u, tmp, shasum, auth, next)
49 function addRemoteTarball_ (u, tmp, shasum, auth, cb) {
50 // Tuned to spread 3 attempts over about a minute.
51 // See formula at <https://github.com/tim-kos/node-retry>.
52 var operation = retry.operation({
53 retries: npm.config.get("fetch-retries")
54 , factor: npm.config.get("fetch-retry-factor")
55 , minTimeout: npm.config.get("fetch-retry-mintimeout")
56 , maxTimeout: npm.config.get("fetch-retry-maxtimeout")
59 operation.attempt(function (currentAttempt) {
60 log.info("retry", "fetch attempt " + currentAttempt
61 + " at " + (new Date()).toLocaleTimeString())
62 fetchAndShaCheck(u, tmp, shasum, auth, function (er, response, shasum) {
63 // Only retry on 408, 5xx or no `response`.
64 var sc = response && response.statusCode
65 var statusRetry = !sc || (sc === 408 || sc >= 500)
66 if (er && statusRetry && operation.retry(er)) {
67 log.warn("retry", "will retry, error on last attempt: " + er)
70 cb(er, response, shasum)
75 function fetchAndShaCheck (u, tmp, shasum, auth, cb) {
76 npm.registry.fetch(u, { auth : auth }, function (er, response) {
78 log.error("fetch failed", u)
79 return cb(er, response)
82 var tarball = writeStreamAtomic(tmp, { mode: npm.modes.file })
83 tarball.on('error', function (er) {
88 tarball.on("finish", function () {
90 // Well, we weren't given a shasum, so at least sha what we have
91 // in case we want to compare it to something else later
92 return sha.get(tmp, function (er, shasum) {
93 log.silly("fetchAndShaCheck", "shasum", shasum)
94 cb(er, response, shasum)
98 // validate that the url we just downloaded matches the expected shasum.
99 log.silly("fetchAndShaCheck", "shasum", shasum)
100 sha.check(tmp, shasum, function (er) {
101 if (er && er.message) {
102 // add original filename for better debuggability
103 er.message = er.message + "\n" + "From: " + u
105 return cb(er, response, shasum)
109 // 0.8 http streams have a bug, where if they're paused with data in
110 // their buffers when the socket closes, they call `end` before emptying
111 // those buffers, which results in the entire pipeline ending and thus
112 // the point that applied backpressure never being able to trigger a
114 // We work around this by piping into a pass through stream that has
115 // unlimited buffering. The pass through stream is from readable-stream
116 // and is thus a current streams3 implementation that is free of these
118 response.pipe(PassThrough({highWaterMark: Infinity})).pipe(tarball)