All files / tar/lib replace.js

100% Statements 113/113
100% Branches 63/63
100% Functions 15/15
100% Lines 107/107
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180      3x 3x 3x 3x               3x   3x 68x   68x 1x   67x 1x   66x 1x   65x         3x 25x   25x   25x 25x 25x   4x 2x   2x     23x 21x     21x 59x 61x       59x 2x   57x 2x     53x 53x 7x 46x 46x 2x     44x 44x 28x     17x 49x 49x   17x   18x 17x 17x   25x 8x       3x 40x   40x 36x 36x 4x   32x     36x 36x 8x   28x 28x 28x 110x 2x 108x 108x 6x         102x 2x     100x 4x   96x 96x 12x   84x 84x 4x   80x 80x 4x   76x 46x 76x 76x   28x     40x 40x 40x 44x 6x 4x 2x   38x 38x 2x 36x 36x 4x 32x         32x 32x 32x 32x 32x       40x     40x    
'use strict'
 
// tar -r
const hlo = require('./high-level-opt.js')
const Pack = require('./pack.js')
const Parse = require('./parse.js')
const fs = require('fs')
 
// starting at the head of the file, read a Header
// If the checksum is invalid, that's our position to start writing
// If it is, jump forward by the specified size (round up to 512)
// and try again.
// Write the new Pack stream starting there.
 
const Header = require('./header.js')
 
const r = module.exports = (opt_, files, cb) => {
  const opt = hlo(opt_)
 
  if (!opt.file)
    throw new TypeError('file is required')
 
  if (opt.gzip)
    throw new TypeError('cannot append to compressed archives')
 
  if (!files || !Array.isArray(files) || !files.length)
    throw new TypeError('no files or directories specified')
 
  return opt.sync ? replaceSync(opt, files)
    : replace(opt, files, cb)
}
 
 
const replaceSync = (opt, files) => {
  const p = new Pack.Sync(opt)
 
  let threw = true
  let fd
  try {
    try {
      fd = fs.openSync(opt.file, 'r+')
    } catch (er) {
      if (er.code === 'ENOENT')
        fd = fs.openSync(opt.file, 'w+')
      else
        throw er
    }
 
    const st = fs.fstatSync(fd)
    const headBuf = Buffer.alloc(512)
    let position
 
    POSITION: for (position = 0; position < st.size; position += 512) {
      for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
        bytes = fs.readSync(
          fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
        )
 
        if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
          throw new Error('cannot append to compressed archives')
 
        if (!bytes)
          break POSITION
      }
 
      let h = new Header(headBuf)
      if (!h.cksumValid)
        break
      let entryBlockSize = 512 * Math.ceil(h.size / 512)
      if (position + entryBlockSize + 512 > st.size)
        break
      // the 512 for the header we just parsed will be added as well
      // also jump ahead all the blocks for the body
      position += entryBlockSize
      if (opt.mtimeCache)
        opt.mtimeCache.set(h.path, h.mtime)
    }
 
    p.on('data', c => {
      fs.writeSync(fd, c, 0, c.length, position)
      position += c.length
    })
    p.on('end', _ => fs.closeSync(fd))
 
    files.forEach(file => p.add(file))
    p.end()
    threw = false
  } finally {
    if (threw)
      try { fs.closeSync(fd) } catch (er) {}
  }
}
 
const replace = (opt, files, cb) => {
  const p = new Pack(opt)
 
  const getPos = (fd, size, cb_) => {
    const cb = (er, pos) => {
      if (er)
        fs.close(fd, _ => cb_(er))
      else
        cb_(null, pos)
    }
 
    let position = 0
    if (size === 0)
      return cb(null, 0)
 
    let bufPos = 0
    const headBuf = Buffer.alloc(512)
    const onread = (er, bytes) => {
      if (er)
        return cb(er)
      bufPos += bytes
      if (bufPos < 512 && bytes)
        return fs.read(
          fd, headBuf, bufPos, headBuf.length - bufPos,
          position + bufPos, onread
        )
 
      if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
        return cb(new Error('cannot append to compressed archives'))
 
      // truncated header
      if (bufPos < 512)
        return cb(null, position)
 
      const h = new Header(headBuf)
      if (!h.cksumValid)
        return cb(null, position)
 
      const entryBlockSize = 512 * Math.ceil(h.size / 512)
      if (position + entryBlockSize + 512 > size)
        return cb(null, position)
 
      position += entryBlockSize + 512
      if (position >= size)
        return cb(null, position)
 
      if (opt.mtimeCache)
        opt.mtimeCache.set(h.path, h.mtime)
      bufPos = 0
      fs.read(fd, headBuf, 0, 512, position, onread)
    }
    fs.read(fd, headBuf, 0, 512, position, onread)
  }
 
  const promise = new Promise((resolve, reject) => {
    p.on('error', reject)
    const onopen = (er, fd) => {
      if (er) {
        if (er.code === 'ENOENT')
          return fs.open(opt.file, 'w+', onopen)
        return reject(er)
      }
      fs.fstat(fd, (er, st) => {
        if (er)
          return reject(er)
        getPos(fd, st.size, (er, position) => {
          if (er)
            return reject(er)
          const stream = fs.createWriteStream(opt.file, {
            fd: fd,
            flags: 'r+',
            start: position
          })
          p.pipe(stream)
          stream.on('error', reject)
          stream.on('close', resolve)
          files.forEach(file => p.add(file))
          p.end()
        })
      })
    }
    fs.open(opt.file, 'r+', onopen)
  })
 
  return cb ? promise.then(cb, cb) : promise
}