diff --git a/lib/pack-codec.js b/lib/pack-codec.js index 93ac7c1..5b7a09f 100644 --- a/lib/pack-codec.js +++ b/lib/pack-codec.js @@ -89,7 +89,9 @@ function decodePack(emit) { state = state(chunk[i], i, chunk); position++; } - if (!state) return; + if (!state) { + return emit(); + } if (state !== $checksum) sha1sum.update(chunk); var buff = inf.flush(); if (buff.length) { diff --git a/lib/pack-index.js b/lib/pack-index.js new file mode 100644 index 0000000..812391c --- /dev/null +++ b/lib/pack-index.js @@ -0,0 +1,87 @@ +var bodec = require('bodec'); +var sha1 = require('git-sha1'); + +exports.parseIndex = parseIndex; + +function parseIndex(buffer) { + if (readUint32(buffer, 0) !== 0xff744f63 || + readUint32(buffer, 4) !== 0x00000002) { + throw new Error("Only v2 pack indexes supported"); + } + + // Get the number of hashes in index + // This is the value of the last fan-out entry + var hashOffset = 8 + 255 * 4; + var length = readUint32(buffer, hashOffset); + hashOffset += 4; + var crcOffset = hashOffset + 20 * length; + var lengthOffset = crcOffset + 4 * length; + var largeOffset = lengthOffset + 4 * length; + var checkOffset = largeOffset; + var indexes = new Array(length); + for (var i = 0; i < length; i++) { + var start = hashOffset + i * 20; + var hash = bodec.toHex(bodec.slice(buffer, start, start + 20)); + var crc = readUint32(buffer, crcOffset + i * 4); + var offset = readUint32(buffer, lengthOffset + i * 4); + if (offset & 0x80000000) { + offset = largeOffset + (offset &0x7fffffff) * 8; + checkOffset = Math.max(checkOffset, offset + 8); + offset = readUint64(buffer, offset); + } + indexes[i] = { + hash: hash, + offset: offset, + crc: crc + }; + } + var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20)); + var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40)); + if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) { + throw new Error("Checksum mistmatch"); + } + + var byHash = {}; + indexes.sort(function (a, b) { + return a.offset - b.offset; + }); + indexes.forEach(function (data) { + byHash[data.hash] = { + offset: data.offset, + crc: data.crc, + }; + }); + var offsets = indexes.map(function (entry) { + return entry.offset; + }).sort(function (a, b) { + return a - b; + }); + + return { + offsets: offsets, + byHash: byHash, + checksum: packChecksum + }; +} + +function readUint32(buffer, offset) { + return (buffer[offset] << 24 | + buffer[offset + 1] << 16 | + buffer[offset + 2] << 8 | + buffer[offset + 3] << 0) >>> 0; +} + +// Yes this will lose precision over 2^53, but that can't be helped when +// returning a single integer. +// We simply won't support packfiles over 8 petabytes. I'm ok with that. +function readUint64(buffer, offset) { + var hi = (buffer[offset] << 24 | + buffer[offset + 1] << 16 | + buffer[offset + 2] << 8 | + buffer[offset + 3] << 0) >>> 0; + var lo = (buffer[offset + 4] << 24 | + buffer[offset + 5] << 16 | + buffer[offset + 6] << 8 | + buffer[offset + 7] << 0) >>> 0; + return hi * 0x100000000 + lo; +} diff --git a/mixins/fs-db.js b/mixins/fs-db.js index 12e1cb0..a344723 100644 --- a/mixins/fs-db.js +++ b/mixins/fs-db.js @@ -7,6 +7,7 @@ var parsePackEntry = require('../lib/pack-codec').parseEntry; var applyDelta = require('../lib/apply-delta'); var sha1 = require('git-sha1'); var pathJoin = require('path').join; +var parseIndex = require('../lib/pack-index').parseIndex; // The fs object has the following interface: // - readFile(path) => binary @@ -254,86 +255,3 @@ module.exports = function (repo, fs) { } }; - -function parseIndex(buffer) { - if (readUint32(buffer, 0) !== 0xff744f63 || - readUint32(buffer, 4) !== 0x00000002) { - throw new Error("Only v2 pack indexes supported"); - } - - // Get the number of hashes in index - // This is the value of the last fan-out entry - var hashOffset = 8 + 255 * 4; - var length = readUint32(buffer, hashOffset); - hashOffset += 4; - var crcOffset = hashOffset + 20 * length; - var lengthOffset = crcOffset + 4 * length; - var largeOffset = lengthOffset + 4 * length; - var checkOffset = largeOffset; - var indexes = new Array(length); - for (var i = 0; i < length; i++) { - var start = hashOffset + i * 20; - var hash = bodec.toHex(bodec.slice(buffer, start, start + 20)); - var crc = readUint32(buffer, crcOffset + i * 4); - var offset = readUint32(buffer, lengthOffset + i * 4); - if (offset & 0x80000000) { - offset = largeOffset + (offset &0x7fffffff) * 8; - checkOffset = Math.max(checkOffset, offset + 8); - offset = readUint64(buffer, offset); - } - indexes[i] = { - hash: hash, - offset: offset, - crc: crc - }; - } - var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20)); - var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40)); - if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) { - throw new Error("Checksum mistmatch"); - } - - var byHash = {}; - indexes.sort(function (a, b) { - return a.offset - b.offset; - }); - indexes.forEach(function (data) { - byHash[data.hash] = { - offset: data.offset, - crc: data.crc, - }; - }); - var offsets = indexes.map(function (entry) { - return entry.offset; - }).sort(function (a, b) { - return a - b; - }); - - return { - offsets: offsets, - byHash: byHash, - checksum: packChecksum - }; -} - -function readUint32(buffer, offset) { - return (buffer[offset] << 24 | - buffer[offset + 1] << 16 | - buffer[offset + 2] << 8 | - buffer[offset + 3] << 0) >>> 0; -} - -// Yes this will lose precision over 2^53, but that can't be helped when -// returning a single integer. -// We simply won't support packfiles over 8 petabytes. I'm ok with that. -function readUint64(buffer, offset) { - var hi = (buffer[offset] << 24 | - buffer[offset + 1] << 16 | - buffer[offset + 2] << 8 | - buffer[offset + 3] << 0) >>> 0; - var lo = (buffer[offset + 4] << 24 | - buffer[offset + 5] << 16 | - buffer[offset + 6] << 8 | - buffer[offset + 7] << 0) >>> 0; - return hi * 0x100000000 + lo; -} diff --git a/mixins/high-level.js b/mixins/high-level.js new file mode 100644 index 0000000..79661fc --- /dev/null +++ b/mixins/high-level.js @@ -0,0 +1,191 @@ +// -*- mode: js; js-indent-level: 2; -*- + +"use strict"; + +var request = require('../net/request-xhr'); +var fetchPackProtocol = require('../net/git-fetch-pack'); +var sendPackProtocol = require('../net/git-send-pack'); + +module.exports = highLevel; + +function highLevel(repo, uName, uPass, hostName) { + + require('./mem-db')(repo); + require('./create-tree')(repo); + require('./pack-ops')(repo); + require('./walkers')(repo); + require('./formats')(repo); + + var httpTransport = require('../net/transport-http')(request); + var transport = httpTransport(hostName, uName, uPass); + + repo.clone = clone; + repo.commit = commit; + repo.push = push; + repo.resolveRepo = resolveRepo; + repo.getContentByHash = getContentByHash; + repo.transport = transport; + + function remoteRefs(callback) { + var fetchStream = fetchPackProtocol(this.transport, callback); + fetchStream.take(callback); + } + + function clone(branch, depth, callback) { + var fetchStream = fetchPackProtocol(this.transport, callback); + fetchStream.take(function (err, refs) { + if (!refs[branch]) { + // create empty branch + repo.updateRef(branch, "0000000000000000000000000000000000000000", function () { + callback('create empty branch '+branch); + }); + return; + } + + fetchStream.put({ + want: refs[branch] + }); + if (depth) { + fetchStream.put({ + deepen: depth + }); + } + fetchStream.put(null); + + repo.listRefs(false, function (err, haveRefs) { + Object.values(haveRefs).forEach(function (refhash) { + fetchStream.put({ + have: refhash + }); + }); + + fetchStream.put({ + done: true + }); + + fetchStream.take(function (err, channels) { + repo.unpack(channels.pack, {}, function () { + repo.updateRef(branch, refs[branch], function () { + return callback('Repo is cloned to '+refs[branch]); + }); + }); + }); + }); + }); + } + + function commit(branch, changes, metadata, callback) { + repo.readRef(branch, function(err, refHash) { + repo.loadAs('commit', refHash, function(err, parentcommit) { + // Changes to files that already exists + changes.base = parentcommit.tree; + repo.createTree(changes, function(err, treeHash) { + var commitObj = { + tree: treeHash, + author: metadata.author, + message: metadata.message + } + + if (refHash != "0000000000000000000000000000000000000000") { + commitObj.parent = refHash; + } + + repo.saveAs('commit', commitObj, function(err, commitHash) { + repo.updateRef(branch, commitHash, function(err, res) { + return callback('Commit done.'); + }); + }); + }); + }); + }); + } + + function push(branch, callback) { + var self = this; + repo.readRef(branch, function(err, refHash) { + repo.loadAs('commit', refHash, function(err, commit) { + var pushStream = sendPackProtocol(self.transport, callback); + pushStream.take(function() { + if (commit.parents[0] === undefined) { + pushStream.put({ oldhash: "0000000000000000000000000000000000000000", newhash: refHash, ref: branch }); + } else { + pushStream.put({ oldhash: commit.parents[0], newhash: refHash, ref: branch }); + } + pushStream.put(null); + + var hashes = [refHash]; + repo.treeWalk(commit.tree, function(err, item) { + function collectHashes(err, object) { + if (object !== undefined) { + hashes.push(object.hash); + item.read(collectHashes); + } else { + repo.pack(hashes, {}, function(err, stream) { + function putHashes(err, packObject) { + if (packObject !== undefined) { + pushStream.put(packObject); + stream.take(putHashes); + } else { + pushStream.put({flush: true}); + var takedone = function(_, response) { + if (response && response.progress) { + callback(response.progress); + } + if (response === null) { + return callback(null); + } else { + pushStream.take(takedone); + } + } + pushStream.take(takedone); + } + } + + stream.take(putHashes); + }); + } + } + + item.read(collectHashes); + }); + }); + }); + }); + } + + function getContentByHash(hash, callback){ + repo.loadAs('text', hash, function(err, content){ + callback(content); + }) + } + + function resolveRepo(branch, callback) { + repo.readRef(branch, function(err, refHash) { + repo.loadAs('commit', refHash, function(err, commit) { + var repoStructure = {}; + if (commit === undefined || commit.length === 0) { + repoStructure["/"] = { + body: {} + }; + return callback(repoStructure); + } + + repo.treeWalk(commit.tree, function(err, item) { + function collectFiles(err, object) { + if (object !== undefined && !err) { + repoStructure[object.path] = object; + item.read(collectFiles); + } else { + if (err) { + console.log(err); + } + callback(repoStructure); + } + } + + item.read(collectFiles); + }); + }); + }); + } +} diff --git a/mixins/http-db.js b/mixins/http-db.js new file mode 100644 index 0000000..ccfc05f --- /dev/null +++ b/mixins/http-db.js @@ -0,0 +1,191 @@ +// -*- mode: js; js-indent-level: 2; -*- + +// Get refs: +// $ curl -H "Authorization: Basic bm9uZToxajM4OGRtZDhsZnRvamJuazI3enN0b3BrYXQ2bHZtcXU3dDYwOWh3cmdhdmt4N3Zkbw==" http://172.17.0.2:9001/2tlax-s0uqq-u6kz3a8x06tczjb.git/info/refs +// 51779a651e7125f07b537cd1785bae642996f1f9 refs/heads/master +// +// Get object: +// $ curl -O -H "Authorization: Basic bm9uZToxajM4OGRtZDhsZnRvamJuazI3enN0b3BrYXQ2bHZtcXU3dDYwOWh3cmdhdmt4N3Zkbw==" http://172.17.0.2:9001/a/a.git/objects/51/779a651e7125f07b537cd1785bae642996f1f9 +// +// Get packs: +// curl -H "Authorization: Basic bm9uZToxajM4OGRtZDhsZnRvamJuazI3enN0b3BrYXQ2bHZtcXU3dDYwOWh3cmdhdmt4N3Zkbw==" http://172.17.0.2:9001/2tlax-s0uqq-a997dxaw11u0lyr.git/objects/info/packs +// +// P pack-4cb362a32ab3424490c7c3dfe28dc69e4016459c.pack + +var request = require('../net/request-xhr'); +var inflate = require('../lib/inflate'); +var codec = require('../lib/object-codec.js'); +var sha1 = require('git-sha1'); +var parseIndex = require('../lib/pack-index').parseIndex; +var parsePackEntry = require('../lib/pack-codec').parseEntry; +var applyDelta = require('../lib/apply-delta'); + +module.exports = mixin; +var isHash = /^[0-9a-f]{40}$/; + +function mixin(repo, username, password, hostName) { + var cachedIndexes = {}; + var headers = {}; + if (username) { + headers.Authorization = "Basic " + btoa(username + ":" + (password || "")); + } + + repo.readRef = readRef; + repo.listRefs = listRefs; + + repo.loadAs = loadAs; + repo.loadRaw = loadRaw; + + repo.hasHash = hasHash; + + function readRef(ref, callback) { + return listRefs(null, function(err, out) { + console.log("out "+ref); + console.log(out); + callback(err, out[ref]); + }); + } + + function listRefs(prefix, callback) { + return request("GET", hostName+"/info/refs", headers, null, function (err, response) { + if (response.statusCode != 200) { + return callback("Error code " + response.statusCode, null); + } + var refs = {}; + if (response.body) { + var regex = prefix && new RegExp("^" + prefix + "[/$]"); + var sp = response.body.split("\n"); + for (var i in sp) { + var m = sp[i].match(/^([0-9a-f]{40})\t(.*)$/); + if (m) { + if (regex && !regex.test(m[2])) continue; + refs[m[2]] = m[1]; + } + } + } + console.log(refs); + callback(err, refs); + }, "text"); + } + + function hasHash(hash, callback) { + return loadRaw(hash, function (err, body) { + if (err) return callback(err); + return callback(null, !!body); + }); + } + + function loadAs(type, hash, callback) { + return loadRaw(hash, function(err, buffer) { + if (!buffer) return []; + var obj = codec.deframe(buffer, true); + if (obj.type !== type) throw new TypeError("Type mismatch " + obj.type + "!==" + type); + callback(err, obj.body); + }); + } + + function loadRaw(hash, callback) { + return request("GET", hostName+"/objects/"+hash.substr(0, 2)+"/"+hash.substr(2), headers, null, function(err, response) { + if (response.statusCode == 200) { + var raw; + try { raw = inflate(response.body); } + catch (err) { return callback(err); } + return callback(err, raw); + } + return loadRawPacked(hash, callback); + }, "arraybuffer"); + } + + function loadRawPacked(hash, callback) { + var packHashes = []; + return request("GET", hostName+"/objects/info/packs", headers, null, function(err, response) { + if (!response.body) return callback(err); + response.body.split("\n").forEach(function (line) { + var match = line.match(/P pack-([0-9a-f]{40}).pack/); + if (match) packHashes.push(match[1]); + }); + start(); + }, "text"); + + function start() { + var packHash = packHashes.pop(); + var offsets; + if (!packHash) return callback(); + if (!cachedIndexes[packHash]) loadIndex(packHash); + else onIndex(); + + function loadIndex() { + return request("GET", hostName+"/objects/pack/pack-" + packHash + ".idx", headers, null, function(err, response) { + var buffer = response.body; + if (!buffer) return callback(err); + console.log("Looking at index"); + try { + cachedIndexes[packHash] = parseIndex(buffer); + } + catch (err) { + console.log("failure " +err); + return callback(err); } + console.log("cachedIndexes"); + console.log(cachedIndexes); + onIndex(); + }); + } + + function onIndex() { + var cached = cachedIndexes[packHash]; + var packFile = hostName+"/objects/pack/pack-" + packHash + ".pack"; + var index = cached.byHash[hash]; + console.log("looking for "+hash+" in "+packHash+" index"); + console.log(index); + if (!index) return start(); + offsets = cached.offsets; + loadChunk(packFile, index.offset, callback); + } + + function loadChunk(packFile, start, callback) { + var index = offsets.indexOf(start); + if (index < 0) { + var error = new Error("Can't find chunk starting at " + start); + return callback(error); + } + var end = index + 1 < offsets.length ? offsets[index + 1] : -20; + // FIXME git http-backend doesn't actually support Range requests, + // so this doesn't work. Will need to download the whole packfile. + var headerWithRange = {Authorization: headers.Authorization, Range: "bytes="+start+"-"+end}; + console.log("loading chunk "+packFile); + console.log(headerWithRange); + return request("GET", packFile, headerWithRange, null, function(err, response) { + var chunk = response.body; + if (!chunk) return callback(err); + var raw; + try { + var entry = parsePackEntry(chunk); + if (entry.type === "ref-delta") { + return loadRaw.call(repo, entry.ref, onBase); + } + else if (entry.type === "ofs-delta") { + return loadChunk(packFile, start - entry.ref, onBase); + } + raw = codec.frame(entry); + } + catch (err) { return callback(err); } + callback(null, raw); + + function onBase(err, base) { + if (!base) return callback(err); + var object = codec.deframe(base); + var buffer; + try { + object.body = applyDelta(entry.body, object.body); + buffer = codec.frame(object); + } + catch (err) { return callback(err); } + callback(null, buffer); + } + }); + } + + } + } + +} diff --git a/net/git-fetch-pack.js b/net/git-fetch-pack.js index 4e75303..34ab053 100644 --- a/net/git-fetch-pack.js +++ b/net/git-fetch-pack.js @@ -1,3 +1,5 @@ +// -*- mode: js; js-indent-level: 2; -*- + "use strict"; var makeChannel = require('culvert'); @@ -105,14 +107,16 @@ function fetchPack(transport, onError) { var extra = ""; if (!capsSent) { capsSent = true; - if (caps["ofs-delta"]) extra += " ofs-delta"; - if (caps["thin-pack"]) extra += " thin-pack"; + var caplist = []; + if (caps["ofs-delta"]) caplist.push("ofs-delta"); + if (caps["thin-pack"]) caplist.push("thin-pack"); // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed"; // else if (caps["multi_ack"]) extra +=" multi_ack"; - if (caps["side-band-64k"]) extra += " side-band-64k"; - else if (caps["side-band"]) extra += " side-band"; + if (caps["side-band-64k"]) caplist.push("side-band-64k"); + else if (caps["side-band"]) caplist.push("side-band"); // if (caps["agent"]) extra += " agent=" + agent; - if (caps.agent) extra += " agent=" + caps.agent; + if (caps.agent) extra += caplist.push("agent=" + caps.agent); + extra = " " + caplist.join(" "); } extra += "\n"; socket.put("want " + line.want + extra); diff --git a/net/git-send-pack.js b/net/git-send-pack.js new file mode 100644 index 0000000..e1455e1 --- /dev/null +++ b/net/git-send-pack.js @@ -0,0 +1,142 @@ +// -*- mode: js; js-indent-level: 2; -*- +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); +var bodec = require('bodec'); + +module.exports = sendPack; + +function sendPack(transport, onError) { + + if (!onError) onError = throwIt; + + // Wrap our handler functions to route errors properly. + onRef = wrapHandler(onRef, onError); + onPush = wrapHandler(onPush, onError); + + var caps = null; + var capsSent = false; + var refs = {}; + var haves = {}; + var havesCount = 0; + + // Create a duplex channel for talking with the agent. + var libraryChannel = makeChannel(); + var agentChannel = makeChannel(); + var api = { + put: libraryChannel.put, + drain: libraryChannel.drain, + take: agentChannel.take + }; + + // Start the connection and listen for the response. + var socket = transport("git-receive-pack", onError); + socket.take(onRef); + + // Return the other half of the duplex API channel. + return { + put: agentChannel.put, + drain: agentChannel.drain, + take: libraryChannel.take + }; + + function onRef(line) { + if (line === undefined) { + throw new Error("Socket disconnected"); + } + if (line === null) { + api.put(refs); + api.take(onPush); + return; + } else if (!caps) { + caps = {}; + Object.defineProperty(refs, "caps", { + value: caps + }); + var index = line.indexOf("\0"); + if (index >= 0) { + line.substring(index + 1).split(" ").forEach(function (cap) { + var i = cap.indexOf("="); + if (i >= 0) { + caps[cap.substring(0, i)] = cap.substring(i + 1); + } else { + caps[cap] = true; + } + }); + line = line.substring(0, index); + } + } + var match = line.match(/(^[0-9a-f]{40}) (.*)$/); + if (!match) { + if (typeof line === "string" && /^ERR/i.test(line)) { + throw new Error(line); + } + throw new Error("Invalid line: " + JSON.stringify(line)); + } + refs[match[2]] = match[1]; + socket.take(onRef); + } + + var packChannel; + var progressChannel; + var errorChannel; + + function onPush(line) { + if (line === undefined) return socket.put(); + if (line === null) { + socket.put(null); + return api.take(onPack); + } + if (line.oldhash) { + var extra = ""; + if (!capsSent) { + capsSent = true; + var caplist = []; + if (caps["ofs-delta"]) caplist.push("ofs-delta"); + if (caps["thin-pack"]) caplist.push("thin-pack"); + // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed"; + // else if (caps["multi_ack"]) extra +=" multi_ack"; + if (caps["side-band-64k"]) caplist.push("side-band-64k"); + else if (caps["side-band"]) caplist.push("side-band"); + // if (caps["agent"]) extra += " agent=" + agent; + if (caps.agent) extra += caplist.push("agent=" + caps.agent); + extra = "\0" + caplist.join(" "); + } + extra += "\n"; + socket.put(line.oldhash + " " + line.newhash + " " + line.ref + extra); + return api.take(onPush); + } + throw new Error("Invalid push command"); + } + + function onPack(_, line) { + if (line.flush) { + socket.put(line); + var fwd = function(_, b) { + api.put(b); + socket.take(fwd); + } + socket.take(fwd); + } else { + socket.put({ + noframe: line + }); + } + return api.take(onPack); + } + + function onResponse(h) { + callback(h); + } + +} + +var defer = require('js-git/lib/defer'); + +function throwIt(err) { + defer(function () { + throw err; + }); + // throw err; +} diff --git a/net/request-xhr.js b/net/request-xhr.js index 5bf9064..2f00ae3 100644 --- a/net/request-xhr.js +++ b/net/request-xhr.js @@ -1,8 +1,10 @@ +// -*- mode: js; js-indent-level: 2; -*- + "use strict"; module.exports = request; -function request(method, url, headers, body, callback) { +function request(method, url, headers, body, callback, responseType) { if (typeof body === "function") { callback = body; body = undefined; @@ -12,7 +14,10 @@ function request(method, url, headers, body, callback) { } var xhr = new XMLHttpRequest(); xhr.open(method, url, true); - xhr.responseType = "arraybuffer"; + if (!responseType) { + responseType = "arraybuffer"; + } + xhr.responseType = responseType; Object.keys(headers).forEach(function (name) { xhr.setRequestHeader(name, headers[name]); @@ -26,10 +31,15 @@ function request(method, url, headers, body, callback) { resHeaders[line.substring(0, index).toLowerCase()] = line.substring(index + 1).trim(); }); + var body = xhr.response; + if (body && xhr.responseType == "arraybuffer") { + body = new Uint8Array(body); + } + callback(null, { statusCode: xhr.status, headers: resHeaders, - body: xhr.response && new Uint8Array(xhr.response) + body: body }); }; xhr.send(body); diff --git a/net/transport-http.js b/net/transport-http.js index fd3b0c3..493317b 100644 --- a/net/transport-http.js +++ b/net/transport-http.js @@ -1,3 +1,4 @@ +// -*- mode: js; js-indent-level: 2; -*- "use strict"; var makeChannel = require('culvert'); @@ -81,9 +82,17 @@ module.exports = function (request) { function onWrite(item) { if (item === undefined) return socket.put(); - bodyWrite(item); + if (item === null || !item.flush) { + if (item !== null && item.noframe !== undefined) { + bodyParts.push(item.noframe); + } else { + bodyWrite(item); + } + } socket.take(onWrite); - if (item !== "done\n" || !bodyParts.length) return; + if (item === null || (!item.flush)) { + if ((item !== "done\n" || !bodyParts.length) ) return; + } var body = bodec.join(bodyParts); bodyParts.length = 0; request("POST", gitUrl + "/" + serviceName, headers, body, onResult);