All files / tar/lib pack.js

100% Statements 194/194
100% Branches 101/101
100% Functions 41/41
100% Lines 191/191
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353                          226x 226x 226x 226x 226x 226x 226x       5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x 5x   5x 5x       102x 102x 102x 102x 102x 102x 102x 102x 102x 102x 102x 102x 102x 3x   102x 102x 4x 3x 4x 45x 4x 4x 2x     98x   102x 102x   171x   102x 102x 102x 102x 102x       50x       82x 78x       80x 8x 80x 80x 80x       90x 1x   89x 86x       226x 226x 2x 226x 226x       158x 158x 158x 158x 158x 158x 1x 157x         223x 223x     223x 21x   223x       80x 80x 80x 80x 80x 80x 1x 79x         109x 109x 109x       836x 379x   457x 457x     620x   454x   454x 78x 3x   75x 75x           368x       198x 198x 198x       620x 98x   522x 226x 4x   222x   520x 158x     362x 42x 21x 42x     320x 111x 2x   109x 110x 80x     239x 200x 200x 1x 1x       238x 199x       1x       200x 200x 200x   1x                     198x     1x         72x 1x         139x   139x 79x 103x 103x     139x 139x   139x 60x 76x 1x     79x 127x 26x         4x 3x 4x       5x 3x 5x           39x 39x               64x       29x         60x 60x   60x 30x 34x     60x 31x 39x     29x 50x         5x   5x  
'use strict'
 
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
 
class PackJob {
  constructor (path, absolute) {
    this.path = path || './'
    this.absolute = absolute
    this.stat = null
    this.readdir = null
    this.pending = false
    this.ignore = false
    this.piped = false
  }
}
 
const MiniPass = require('minipass')
const zlib = require('minizlib')
const WriteEntry = require('./write-entry.js')
const WriteEntrySync = WriteEntry.Sync
const Yallist = require('yallist')
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDENTRY = Symbol('addEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
 
const fs = require('fs')
const path = require('path')
 
class Pack extends MiniPass {
  constructor (opt) {
    super(opt)
    opt = opt || Object.create(null)
    this.opt = opt
    this.cwd = opt.cwd || process.cwd()
    this.maxReadSize = opt.maxReadSize
    this.preservePaths = !!opt.preservePaths
    this.strict = !!opt.strict
    this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '')
    this.linkCache = opt.linkCache || new Map()
    this.statCache = opt.statCache || new Map()
    this.readdirCache = opt.readdirCache || new Map()
    this[WRITEENTRYCLASS] = WriteEntry
    if (typeof opt.onwarn === 'function')
      this.on('warn', opt.onwarn)
 
    this.zip = null
    if (opt.gzip) {
      if (typeof opt.gzip !== 'object')
        opt.gzip = {}
      this.zip = new zlib.Gzip(opt.gzip)
      this.zip.on('data', chunk => super.write(chunk))
      this.zip.on('end', _ => super.end())
      this.zip.on('drain', _ => {
        this[ONDRAIN]()
      })
    } else
      this.on('drain', this[ONDRAIN])
 
    this.portable = !!opt.portable
    this.noDirRecurse = !!opt.noDirRecurse
 
    this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
 
    this[QUEUE] = new Yallist
    this[JOBS] = 0
    this.jobs = +opt.jobs || 4
    this[PROCESSING] = false
    this[ENDED] = false
  }
 
  [WRITE] (chunk) {
    return super.write(chunk)
  }
 
  add (path) {
    this.write(path)
    return this
  }
 
  end (path) {
    if (path)
      this.write(path)
    this[ENDED] = true
    this[PROCESS]()
    return this
  }
 
  write (path) {
    if (this[ENDED])
      throw new Error('write after end')
 
    this[ADDENTRY](path)
    return this.flowing
  }
 
  [ADDENTRY] (p) {
    const absolute = path.resolve(this.cwd, p)
    if (this.prefix)
      p = this.prefix + '/' + p
    this[QUEUE].push(new PackJob(p, absolute))
    this[PROCESS]()
  }
 
  [STAT] (job) {
    job.pending = true
    this[JOBS] += 1
    fs.lstat(job.absolute, (er, stat) => {
      job.pending = false
      this[JOBS] -= 1
      if (er)
        return this.emit('error', er)
      this[ONSTAT](job, stat)
    })
  }
 
  [ONSTAT] (job, stat) {
    this.statCache.set(job.absolute, stat)
    job.stat = stat
 
    // now we have the stat, we can filter it.
    if (!this.filter(job.path, stat))
      job.ignore = true
 
    this[PROCESS]()
  }
 
  [READDIR] (job) {
    job.pending = true
    this[JOBS] += 1
    fs.readdir(job.absolute, (er, entries) => {
      job.pending = false
      this[JOBS] -= 1
      if (er)
        return this.emit('error', er)
      this[ONREADDIR](job, entries)
    })
  }
 
  [ONREADDIR] (job, entries) {
    this.readdirCache.set(job.absolute, entries)
    job.readdir = entries
    this[PROCESS]()
  }
 
  [PROCESS] () {
    if (this[PROCESSING])
      return
 
    this[PROCESSING] = true
    for (let w = this[QUEUE].head;
         w !== null && this[JOBS] < this.jobs;
         w = w.next) {
      this[PROCESSJOB](w.value)
    }
    this[PROCESSING] = false
 
    if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
      if (this.zip)
        this.zip.end(EOF)
      else {
        super.write(EOF)
        super.end()
      }
    }
  }
 
  get [CURRENT] () {
    return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
  }
 
  [JOBDONE] (job) {
    this[QUEUE].shift()
    this[JOBS] -= 1
    this[PROCESS]()
  }
 
  [PROCESSJOB] (job) {
    if (job.pending)
      return
 
    if (!job.stat) {
      if (this.statCache.has(job.absolute))
        this[ONSTAT](job, this.statCache.get(job.absolute))
      else
        this[STAT](job)
    }
    if (!job.stat)
      return
 
    // filtered out!
    if (job.ignore) {
      if (job === this[CURRENT])
        this[QUEUE].shift()
      return
    }
 
    if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
      if (this.readdirCache.has(job.absolute))
        this[ONREADDIR](job, this.readdirCache.get(job.absolute))
      else
        this[READDIR](job)
      if (!job.readdir)
        return
    }
 
    if (!job.entry) {
      job.entry = this[ENTRY](job)
      if (!job.entry) {
        job.ignore = true
        return
      }
    }
 
    if (job === this[CURRENT] && !job.piped)
      this[PIPE](job)
  }
 
  warn (msg, data) {
    return this.emit('warn', msg, data)
  }
 
  [ENTRY] (job) {
    this[JOBS] += 1
    try {
      return new this[WRITEENTRYCLASS](job.path, {
        onwarn: (msg, data) => {
          this.warn(msg, data)
        },
        cwd: this.cwd,
        absolute: job.absolute,
        preservePaths: this.preservePaths,
        maxReadSize: this.maxReadSize,
        strict: this.strict,
        portable: this.portable,
        linkCache: this.linkCache,
        statCache: this.statCache
      }).on('end', _ => {
        this[JOBDONE](job)
      })
    } catch (er) {
      this.emit('error', er)
    }
  }
 
  [ONDRAIN] () {
    if (this[CURRENT] && this[CURRENT].entry)
      this[CURRENT].entry.resume()
  }
 
  // like .pipe() but using super, because our write() is special
  [PIPE] (job) {
    job.piped = true
 
    if (job.readdir)
      job.readdir.forEach(entry => {
        const base = job.path === './' ? '' : job.path.replace(/\/*$/, '/')
        this[ADDENTRY](base + entry)
      })
 
    const source = job.entry
    const zip = this.zip
 
    if (zip)
      source.on('data', chunk => {
        if (!zip.write(chunk))
          source.pause()
      })
    else
      source.on('data', chunk => {
        if (!super.write(chunk))
          source.pause()
      })
  }
 
  resume () {
    if (this.zip)
      this.zip.resume()
    return super.resume()
  }
 
  pause () {
    if (this.zip)
      this.zip.pause()
    return super.pause()
  }
}
 
class PackSync extends Pack {
  constructor (opt) {
    super(opt)
    this[WRITEENTRYCLASS] = WriteEntrySync
  }
 
  // pause/resume are no-ops in sync streams.
  pause () {}
  resume () {}
 
  [STAT] (job) {
    this[ONSTAT](job, fs.lstatSync(job.absolute))
  }
 
  [READDIR] (job, stat) {
    this[ONREADDIR](job, fs.readdirSync(job.absolute))
  }
 
  // gotta get it all in this tick
  [PIPE] (job) {
    const source = job.entry
    const zip = this.zip
 
    if (job.readdir)
      job.readdir.forEach(entry => {
        this[ADDENTRY](job.path + '/' + entry)
      })
 
    if (zip)
      source.on('data', chunk => {
        zip.write(chunk)
      })
    else
      source.on('data', chunk => {
        super[WRITE](chunk)
      })
  }
}
 
Pack.Sync = PackSync
 
module.exports = Pack