Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

High level API #140

Open
wants to merge 19 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion lib/pack-codec.js
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,9 @@ function decodePack(emit) {
state = state(chunk[i], i, chunk);
position++;
}
if (!state) return;
if (!state) {
return emit();
}
if (state !== $checksum) sha1sum.update(chunk);
var buff = inf.flush();
if (buff.length) {
Expand Down
87 changes: 87 additions & 0 deletions lib/pack-index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
var bodec = require('bodec');
var sha1 = require('git-sha1');

exports.parseIndex = parseIndex;

function parseIndex(buffer) {
if (readUint32(buffer, 0) !== 0xff744f63 ||
readUint32(buffer, 4) !== 0x00000002) {
throw new Error("Only v2 pack indexes supported");
}

// Get the number of hashes in index
// This is the value of the last fan-out entry
var hashOffset = 8 + 255 * 4;
var length = readUint32(buffer, hashOffset);
hashOffset += 4;
var crcOffset = hashOffset + 20 * length;
var lengthOffset = crcOffset + 4 * length;
var largeOffset = lengthOffset + 4 * length;
var checkOffset = largeOffset;
var indexes = new Array(length);
for (var i = 0; i < length; i++) {
var start = hashOffset + i * 20;
var hash = bodec.toHex(bodec.slice(buffer, start, start + 20));
var crc = readUint32(buffer, crcOffset + i * 4);
var offset = readUint32(buffer, lengthOffset + i * 4);
if (offset & 0x80000000) {
offset = largeOffset + (offset &0x7fffffff) * 8;
checkOffset = Math.max(checkOffset, offset + 8);
offset = readUint64(buffer, offset);
}
indexes[i] = {
hash: hash,
offset: offset,
crc: crc
};
}
var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20));
var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40));
if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) {
throw new Error("Checksum mistmatch");
}

var byHash = {};
indexes.sort(function (a, b) {
return a.offset - b.offset;
});
indexes.forEach(function (data) {
byHash[data.hash] = {
offset: data.offset,
crc: data.crc,
};
});
var offsets = indexes.map(function (entry) {
return entry.offset;
}).sort(function (a, b) {
return a - b;
});

return {
offsets: offsets,
byHash: byHash,
checksum: packChecksum
};
}

function readUint32(buffer, offset) {
return (buffer[offset] << 24 |
buffer[offset + 1] << 16 |
buffer[offset + 2] << 8 |
buffer[offset + 3] << 0) >>> 0;
}

// Yes this will lose precision over 2^53, but that can't be helped when
// returning a single integer.
// We simply won't support packfiles over 8 petabytes. I'm ok with that.
function readUint64(buffer, offset) {
var hi = (buffer[offset] << 24 |
buffer[offset + 1] << 16 |
buffer[offset + 2] << 8 |
buffer[offset + 3] << 0) >>> 0;
var lo = (buffer[offset + 4] << 24 |
buffer[offset + 5] << 16 |
buffer[offset + 6] << 8 |
buffer[offset + 7] << 0) >>> 0;
return hi * 0x100000000 + lo;
}
84 changes: 1 addition & 83 deletions mixins/fs-db.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ var parsePackEntry = require('../lib/pack-codec').parseEntry;
var applyDelta = require('../lib/apply-delta');
var sha1 = require('git-sha1');
var pathJoin = require('path').join;
var parseIndex = require('../lib/pack-index').parseIndex;

// The fs object has the following interface:
// - readFile(path) => binary
Expand Down Expand Up @@ -254,86 +255,3 @@ module.exports = function (repo, fs) {
}

};

function parseIndex(buffer) {
if (readUint32(buffer, 0) !== 0xff744f63 ||
readUint32(buffer, 4) !== 0x00000002) {
throw new Error("Only v2 pack indexes supported");
}

// Get the number of hashes in index
// This is the value of the last fan-out entry
var hashOffset = 8 + 255 * 4;
var length = readUint32(buffer, hashOffset);
hashOffset += 4;
var crcOffset = hashOffset + 20 * length;
var lengthOffset = crcOffset + 4 * length;
var largeOffset = lengthOffset + 4 * length;
var checkOffset = largeOffset;
var indexes = new Array(length);
for (var i = 0; i < length; i++) {
var start = hashOffset + i * 20;
var hash = bodec.toHex(bodec.slice(buffer, start, start + 20));
var crc = readUint32(buffer, crcOffset + i * 4);
var offset = readUint32(buffer, lengthOffset + i * 4);
if (offset & 0x80000000) {
offset = largeOffset + (offset &0x7fffffff) * 8;
checkOffset = Math.max(checkOffset, offset + 8);
offset = readUint64(buffer, offset);
}
indexes[i] = {
hash: hash,
offset: offset,
crc: crc
};
}
var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20));
var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40));
if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) {
throw new Error("Checksum mistmatch");
}

var byHash = {};
indexes.sort(function (a, b) {
return a.offset - b.offset;
});
indexes.forEach(function (data) {
byHash[data.hash] = {
offset: data.offset,
crc: data.crc,
};
});
var offsets = indexes.map(function (entry) {
return entry.offset;
}).sort(function (a, b) {
return a - b;
});

return {
offsets: offsets,
byHash: byHash,
checksum: packChecksum
};
}

function readUint32(buffer, offset) {
return (buffer[offset] << 24 |
buffer[offset + 1] << 16 |
buffer[offset + 2] << 8 |
buffer[offset + 3] << 0) >>> 0;
}

// Yes this will lose precision over 2^53, but that can't be helped when
// returning a single integer.
// We simply won't support packfiles over 8 petabytes. I'm ok with that.
function readUint64(buffer, offset) {
var hi = (buffer[offset] << 24 |
buffer[offset + 1] << 16 |
buffer[offset + 2] << 8 |
buffer[offset + 3] << 0) >>> 0;
var lo = (buffer[offset + 4] << 24 |
buffer[offset + 5] << 16 |
buffer[offset + 6] << 8 |
buffer[offset + 7] << 0) >>> 0;
return hi * 0x100000000 + lo;
}
191 changes: 191 additions & 0 deletions mixins/high-level.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,191 @@
// -*- mode: js; js-indent-level: 2; -*-

"use strict";

var request = require('../net/request-xhr');
var fetchPackProtocol = require('../net/git-fetch-pack');
var sendPackProtocol = require('../net/git-send-pack');

module.exports = highLevel;

function highLevel(repo, uName, uPass, hostName) {

require('./mem-db')(repo);
require('./create-tree')(repo);
require('./pack-ops')(repo);
require('./walkers')(repo);
require('./formats')(repo);

var httpTransport = require('../net/transport-http')(request);
var transport = httpTransport(hostName, uName, uPass);

repo.clone = clone;
repo.commit = commit;
repo.push = push;
repo.resolveRepo = resolveRepo;
repo.getContentByHash = getContentByHash;
repo.transport = transport;

function remoteRefs(callback) {
var fetchStream = fetchPackProtocol(this.transport, callback);
fetchStream.take(callback);
}

function clone(branch, depth, callback) {
var fetchStream = fetchPackProtocol(this.transport, callback);
fetchStream.take(function (err, refs) {
if (!refs[branch]) {
// create empty branch
repo.updateRef(branch, "0000000000000000000000000000000000000000", function () {
callback('create empty branch '+branch);
});
return;
}

fetchStream.put({
want: refs[branch]
});
if (depth) {
fetchStream.put({
deepen: depth
});
}
fetchStream.put(null);

repo.listRefs(false, function (err, haveRefs) {
Object.values(haveRefs).forEach(function (refhash) {
fetchStream.put({
have: refhash
});
});

fetchStream.put({
done: true
});

fetchStream.take(function (err, channels) {
repo.unpack(channels.pack, {}, function () {
repo.updateRef(branch, refs[branch], function () {
return callback('Repo is cloned to '+refs[branch]);
});
});
});
});
});
}

function commit(branch, changes, metadata, callback) {
repo.readRef(branch, function(err, refHash) {
repo.loadAs('commit', refHash, function(err, parentcommit) {
// Changes to files that already exists
changes.base = parentcommit.tree;
repo.createTree(changes, function(err, treeHash) {
var commitObj = {
tree: treeHash,
author: metadata.author,
message: metadata.message
}

if (refHash != "0000000000000000000000000000000000000000") {
commitObj.parent = refHash;
}

repo.saveAs('commit', commitObj, function(err, commitHash) {
repo.updateRef(branch, commitHash, function(err, res) {
return callback('Commit done.');
});
});
});
});
});
}

function push(branch, callback) {
var self = this;
repo.readRef(branch, function(err, refHash) {
repo.loadAs('commit', refHash, function(err, commit) {
var pushStream = sendPackProtocol(self.transport, callback);
pushStream.take(function() {
if (commit.parents[0] === undefined) {
pushStream.put({ oldhash: "0000000000000000000000000000000000000000", newhash: refHash, ref: branch });
} else {
pushStream.put({ oldhash: commit.parents[0], newhash: refHash, ref: branch });
}
pushStream.put(null);

var hashes = [refHash];
repo.treeWalk(commit.tree, function(err, item) {
function collectHashes(err, object) {
if (object !== undefined) {
hashes.push(object.hash);
item.read(collectHashes);
} else {
repo.pack(hashes, {}, function(err, stream) {
function putHashes(err, packObject) {
if (packObject !== undefined) {
pushStream.put(packObject);
stream.take(putHashes);
} else {
pushStream.put({flush: true});
var takedone = function(_, response) {
if (response && response.progress) {
callback(response.progress);
}
if (response === null) {
return callback(null);
} else {
pushStream.take(takedone);
}
}
pushStream.take(takedone);
}
}

stream.take(putHashes);
});
}
}

item.read(collectHashes);
});
});
});
});
}

function getContentByHash(hash, callback){
repo.loadAs('text', hash, function(err, content){
callback(content);
})
}

function resolveRepo(branch, callback) {
repo.readRef(branch, function(err, refHash) {
repo.loadAs('commit', refHash, function(err, commit) {
var repoStructure = {};
if (commit === undefined || commit.length === 0) {
repoStructure["/"] = {
body: {}
};
return callback(repoStructure);
}

repo.treeWalk(commit.tree, function(err, item) {
function collectFiles(err, object) {
if (object !== undefined && !err) {
repoStructure[object.path] = object;
item.read(collectFiles);
} else {
if (err) {
console.log(err);
}
callback(repoStructure);
}
}

item.read(collectFiles);
});
});
});
}
}
Loading