diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 349834e..0000000 --- a/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -*.git -node_modules diff --git a/.jshintrc b/.jshintrc deleted file mode 100644 index 953ae25..0000000 --- a/.jshintrc +++ /dev/null @@ -1,12 +0,0 @@ -{ - // Enforcing options, without name - set to false to ignore violations - "-W030": false, // 'Expected an assignment or function call and instead saw an expression.' - "-W058": false, // 'Missing '()' invoking a constructor.' - - "loopfunc": true, - - // Environments - set to true to allow environment variables - "browser": true, - "node": true, - "esnext": true -} diff --git a/BACKERS-2.md b/BACKERS-2.md index b6bfc40..a8fdbae 100644 --- a/BACKERS-2.md +++ b/BACKERS-2.md @@ -24,13 +24,74 @@ After the successful KickStarter, I decided to do a second fundraiser so that I > >Also you will be listed in BACKERS-2.md for all history to see. -TODO: List 11 people who contributed here. + - jden + - othiym23 + - chrisjpowers + - JohnSz + - sindresorhus + - aeby + - maks + - julien51 + - mofoghlu + - JPBarringer + - jeffslofish ## Basic Supporter > Your name will be listed in BACKERS-2.md in the main source tree of js-git. -TODO: List 54 people who contributed here. + - servergrove + - bluntworks + - pdillon + - pizzapanther + - nschneble + - Ohad Assulin + - oxy + - lettertwo + - tmcw + - joeandaverde + - airportyh + - nathanathan + - signalwerk + - ripta + - vaughan + - neilk + - mikehenrty + - vardump + - Peter Burns + - blittle + - Stefan Stoichev + - amaxwell01 + - dannyfritz + - George V. Reilly + - euforic + - gflarity + - generalhenry + - piredman + - Rebecca + - st-luke + - asafy + - alessioalex + - sergi + - diversario + - seriema + - desaintmartin + - DinisCruz + - gotcha + - nikolay + - saintedlama + - begebot + - jbarratt + - mikaelkaron + - colinscroggins + - Eric Elliott + - owenb + - balupton + - fjakobs + - romainhuet + - angelyordanov + - cscott + - ilsken ## Anonymous Supporters diff --git a/BACKERS.md b/BACKERS.md index 5983003..f84931b 100644 --- a/BACKERS.md +++ b/BACKERS.md @@ -294,7 +294,7 @@ Originally JS-Git started at a [kickstarter project][]. This was to enable me t - LSD25 - Nima Gardideh (nemo) - Patrick Collins (pat@burned.com) - - Michael J. Ryan (tracker1) + - Michael J. Ryan ([@tracker1](https://github.com/tracker1)) - technoweenie - David Hayes - Meyer SciTech Solutions, LLC diff --git a/LICENSE b/LICENSE index c968f88..bfa2ab5 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2013 Tim Caswell +Copyright (c) 2013-2014 Tim Caswell Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/README.md b/README.md index b4f3c23..257315d 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,265 @@ -js-git -====== +# JS-Git +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/creationix/js-git?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -Git Implemented in JavaScript. +This project is a collection of modules that helps in implementing git powered +applications in JavaScript. The original purpose for this is to enable better +developer tools for authoring code in restricted environments like ChromeBooks +and tablets. It also enables using git as a database to replace SQL and no-SQL +data stores in many applications. + +This project was initially funded by two crowd-sourced fundraisers. See details +in [BACKERS.md](BACKERS.md) and [BACKERS-2.md](BACKERS-2.md). Thanks to all of +you who made this possible! + +## Usage + +Detailed API docs are contained in the [doc](doc) subfolder of this repository. + +In general the way you use js-git is you create a JS object and then mixin the +functionality you need. Here is an example of creating an in-memory database, +creating some objects, and then walking that tree using the high-level walker +APIs. + +## Creating a repo object. + +```js +// This provides symbolic names for the octal modes used by git trees. +var modes = require('js-git/lib/modes'); + +// Create a repo by creating a plain object. +var repo = {}; + +// This provides an in-memory storage backend that provides the following APIs: +// - saveAs(type, value) => hash +// - loadAs(type, hash) => hash +// - saveRaw(hash, binary) => +// - loadRaw(hash) => binary +require('js-git/mixins/mem-db')(repo); + +// This adds a high-level API for creating multiple git objects by path. +// - createTree(entries) => hash +require('js-git/mixins/create-tree')(repo); + +// This provides extra methods for dealing with packfile streams. +// It depends on +// - unpack(packStream, opts) => hashes +// - pack(hashes, opts) => packStream +require('js-git/mixins/pack-ops')(repo); + +// This adds in walker algorithms for quickly walking history or a tree. +// - logWalk(ref|hash) => stream +// - treeWalk(hash) => stream +require('js-git/mixins/walkers')(repo); + +// This combines parallel requests for the same resource for efficiency under load. +require('js-git/mixins/read-combiner')(repo); + +// This makes the object interface less strict. See its docs for details +require('js-git/mixins/formats')(repo); +``` + +## Generators vs Callbacks + +There are two control-flow styles that you can use to consume js-git APIs. All +the examples here use `yield` style and assume the code is contained within a +generator function that's yielding to a tool like [gen-run](https://github.com/creationix/gen-run). + +This style requires ES6 generators. This feature is currently in stable Firefox, +in stable Chrome behind a user-configurable flag, in node.js 0.11.x or greater +with a command-line flag. + +Also you can use generators on any ES5 platform if you use a source transform +like Facebook's [regenerator](http://facebook.github.io/regenerator/) tool. + +You read more about how generators work at [Generators vs Fibers](http://howtonode.org/generators-vs-fibers). + +```js +var run = require('gen-run'); + +run(function*() { + // Blocking logic goes here. You can use yield + var result = yield someAction(withArgs); + // The generator pauses at yield and resumes when the data is available. + // The rest of your process is not blocked, just this generator body. + // If there was an error, it will throw into this generator. +}); +``` + +If you can't use this new feature or just plain prefer node-style callbacks, all +js-git APIs also support that. The way this works is actually quite simple. +If you don't pass in the callback, the function will return a partially applied +version of your call expecting just the callback. + +```js +someAction(withArgs, function (err, value) { + if (err) return handleMyError(err); + // do something with value +}); + +// The function would be implemented to support both style like this. +function someAction(arg, callback) { + if (!callback) return someAction.bind(this, arg); + // We now have callback and arg +} +``` + +## Basic Object Creation + +Now we have an in-memory git repo useful for testing the network operations or +just getting to know the available APIs. + +In this example, we'll create a blob, create a tree containing that blob, create +a commit containing that tree. This shows how to create git objects manually. + +```js + // First we create a blob from a string. The `formats` mixin allows us to + // use a string directly instead of having to pass in a binary buffer. + var blobHash = yield repo.saveAs("blob", "Hello World\n"); + + // Now we create a tree that is a folder containing the blob as `greeting.txt` + var treeHash = yield repo.saveAs("tree", { + "greeting.txt": { mode: modes.file, hash: blobHash } + }); + + // With that tree, we can create a commit. + // Again the `formats` mixin allows us to omit details like committer, date, + // and parents. It assumes sane defaults for these. + var commitHash = yield repo.saveAs("commit", { + author: { + name: "Tim Caswell", + email: "tim@creationix.com" + }, + tree: treeHash, + message: "Test commit\n" + }); + +``` + +## Basic Object Loading + +We can read objects back one at a time using `loadAs`. + +```js +// Reading the file "greeting.txt" from a commit. + +// We first read the commit. +var commit = yield repo.loadAs("commit", commitHash); +// We then read the tree using `commit.tree`. +var tree = yield repo.loadAs("tree", commit.tree); +// We then read the file using the entry hash in the tree. +var file = yield repo.loadAs("blob", tree["greeting.txt"].hash); +// file is now a binary buffer. +``` + +When using the `formats` mixin there are two new types for `loadAs`, they are +`"text"` and `"array"`. + +```js +// When you're sure the file contains unicode text, you can load it as text directly. +var fileAsText = yield repo.loadAs("text", blobHash); + +// Also if you prefer array format, you can load a directory as an array. +var entries = yield repo.loadAs("array", treeHash); +entries.forEach(function (entry) { + // entry contains {name, mode, hash} +}); +``` + +## Using Walkers + +Now that we have a repo with some minimal data in it, we can query it. Since we +included the `walkers` mixin, we can walk the history as a linear stream or walk +the file tree as a depth-first linear stream. + +```js +// Create a log stream starting at the commit we just made. +// You could also use symbolic refs like `refs/heads/master` for repos that +// support them. +var logStream = yield repo.logWalk(commitHash); + +// Looping through the stream is easy by repeatedly calling waiting on `read`. +var commit, object; +while (commit = yield logStream.read(), commit !== undefined) { + + console.log(commit); + + // We can also loop through all the files of each commit version. + var treeStream = yield repo.treeWalk(commit.tree); + while (object = yield treeStream.read(), object !== undefined) { + console.log(object); + } + +} +``` + +## Filesystem Style Interface + +If you feel that creating a blob, then creating a tree, then creating the parent +tree, etc is a lot of work to save just one file, I agree. While writing the +tedit app, I discovered a nice high-level abstraction that you can mixin to make +this much easier. This is the `create-tree` mixin referenced in the above +config. + +```js +// We wish to create a tree that contains `www/index.html` and `README.me` files. +// This will create these two blobs, create a tree for `www` and then create a +// tree for the root containing `README.md` and the newly created `www` tree. +var treeHash = yield repo.createTree({ + "www/index.html": { + mode: modes.file, + content: "

Hello

\n

This is an HTML page?

\n" + }, + "README.md": { + mode: modes.file, + content: "# Sample repo\n\nThis is a sample\n" + } +}); +``` + +This is great for creating several files at once, but it can also be used to +edit existing trees by adding new files, changing existing files, or deleting +existing entries. + +```js +var changes = [ + { + path: "www/index.html" // Leaving out mode means to delete the entry. + }, + { + path: "www/app.js", // Create a new file in the existing directory. + mode: modes.file, + content: "// this is a js file\n" + } +]; + +// We need to use array form and specify the base tree hash as `base`. +changes.base = treeHash; + +treeHash = yield repo.createTree(changes); +``` + +## Creating Composite Filesystems + +The real fun begins when you create composite filesystems using git submodules. + +The code that handles this is not packaged as a repo mixin since it spans several +independent repos. Instead look to the [git-tree](https://github.com/creationix/git-tree) +repo for the code. It's interface is still slightly unstable and undocumented +but is used in production by tedit and my node hosting service that complements tedit. + +Basically this module allows you to perform high-level filesystem style commands +on a virtual filesystem that consists of many js-git repos. Until there are +proper docs, you can see how tedit uses it at . + +## Mounting Github Repos + +I've been asking Github to enable CORS headers to their HTTPS git servers, but +they've refused to do it. This means that a browser can never clone from github +because the browser will disallow XHR requests to the domain. + +They do, however, offer a REST interface to the raw [git data](https://developer.github.com/v3/git/). + +Using this I wrote a mixin for js-git that uses github *as* the backend store. + +Code at . Usage in tedit can be seen at +. diff --git a/SPONSORS.md b/SPONSORS.md new file mode 100644 index 0000000..1c23536 --- /dev/null +++ b/SPONSORS.md @@ -0,0 +1,12 @@ +# Sponsored Development + +As a company, you can sponsor development of specific features to the js-git ecosystem. + +## In Progress Sponsored Features + + - JS-Git - Encrypted Filesystem - Anonymous + - Tedit - Web Runtime - Anonymous + +## Completed Sponsored Features + + - Tedit - Live Export to VFS - Anonymous diff --git a/doc/lib/config-codec.md b/doc/lib/config-codec.md new file mode 100644 index 0000000..4a95217 --- /dev/null +++ b/doc/lib/config-codec.md @@ -0,0 +1,46 @@ +# Config Codec + +This module implements a codec for reading and writing git config files (this +includes the .gitmodules file). As far as I can tell, this is a variant of +the INI format. + +## codec.decode(ini) -> config + +Given the text of the config file, return the data as an object. + +The following config: + +```ini +[user] + name = Tim Caswell + email = tim@creationix.com +[color] + ui = true +[color "branch"] + current = yellow bold + local = green bold + remote = cyan bold +``` + +Will parse to this js object + +```js +{ + user: { + name: "Tim Caswell", + email: "tim@creationix.com" + }, + color: { + ui: "true", + branch: { + current: "yellow bold", + local: "green bold", + remote: "cyan bold" + } + } +} +``` + +## codec.encode(config) -> ini + +This reverses the conversion and writes a string from a config object. \ No newline at end of file diff --git a/doc/lib/deflate.md b/doc/lib/deflate.md new file mode 100644 index 0000000..b81df87 --- /dev/null +++ b/doc/lib/deflate.md @@ -0,0 +1,11 @@ +# Deflate + +This module implements a simple interface that when normal given data, returns the deflated version in a callback. This wraps the pako dependency. + +## deflate(inflated) => deflated + +```js +var deflate = require('js-git/lib/deflate'); + +var deflated = deflate(original); +``` diff --git a/doc/lib/inflate-stream.md b/doc/lib/inflate-stream.md new file mode 100644 index 0000000..4faa15f --- /dev/null +++ b/doc/lib/inflate-stream.md @@ -0,0 +1,23 @@ +# Inflate Stream + +This module implements zlib inflate by hand with a special streaming interface. +This is used in js-git to inflate git object fragments in a pack-stream. + +## inflateStream(onEmit, onUnused) -> onInput + +```js +var onInput = inflateStream(onEmit, onUnused); + +someStream.on("data", function (chunk) { + onInput(null, chunk); +}); + +function onEmit(err, out) { + if (err) throw err; + // out is a chunk of inflated data +} + +function onUnused(chunks) { + // chunks is an array of extra buffers or buffer slices. +} +``` diff --git a/doc/lib/inflate.md b/doc/lib/inflate.md new file mode 100644 index 0000000..d96d36f --- /dev/null +++ b/doc/lib/inflate.md @@ -0,0 +1,11 @@ +# Inflate + +This module implements a simple interface that when given deflated data returns the inflated version. + +## inflate(deflated) -> inflated + +```js +var inflate = require('js-git/lib/inflate'); + +var inflated = inflate(deflated); +``` diff --git a/doc/lib/object-codec.md b/doc/lib/object-codec.md new file mode 100644 index 0000000..de74a32 --- /dev/null +++ b/doc/lib/object-codec.md @@ -0,0 +1,127 @@ +# Object Codec + +This module implements a codec for the binary git object format for blobs, trees, tags, and commits. + +This library is useful for writing new storage backends. Normal users will probably +just use one of the existing mixins for object storage. + +## codec.frame({type,body}) -> buffer + +This function accepts an object with `type` and `body` properties. The `type` +property must be one of "blob", "tree", "commit" or "tag". The body can be a +pre-encoded raw-buffer or a plain javascript value. See encoder docs below for +the formats of the different body types. + +The returned binary value is the fully framed git object. The sha1 of this is +the git hash of the object. + +```js +var codec = require('js-git/lib/object-codec'); +var sha1 = require('git-sha1'); + +var bin = codec.frame({ type: "blob", body: "Hello World\n"}); +var hash = sha1(bin); +``` + +## codec.deframe(buffer, decode) -> {type,body} + +This function accepts a binary git buffer and returns the `{type,body}` object. + +If `decode` is true, then the body will also be decoded into a normal javascript +value. If `decode` is false or missing, then the raw-buffer will be in body. + +## codec.encoders + +This is an object containing 4 encoder function Each function has the signature: + + encode(body) -> raw-buffer + +Where body is the JS representation of the type and raw-buffer is the git encoded +version of that value, but without the type and length framing. + +```js +var encoders = require('js-git/lib/object-codec').encoders; +var modes = require('js-git/lib/modes'); +``` + +Blobs must be native binary values (Buffer in node, Uint8Array in browser). +It's recommended to either use the `bodec` library to create binary values from +strings directly or configure your system with the `formats` mixin that allows +for unicode strings when working with blobs. + +```js +rawBin = encoders.blob(new Uint8Array([1,2,3,4,5,6])); +rawBin = encoders.blob(bodec.fromUnicode("Hello World")); +``` + +Trees are objects with filename as key and object with {mode,hash} as value. +The modes are integers. It's best to use the modes module to help. + +```js +rawBin = encoders.tree({ "greeting.txt": { + mode: modes.file, + hash: blobHash +}}); +``` + +Commits are objects with required fields {tree,author,message} +Also if there is a single parent, you specify it with `parent`. + +Since a commit can have zero or more parent commits, you specify the parent +hashes via the `parents` property as an array of hashes. + +The `author` field is required and contains {name,email,date}. + +Commits also require a `committer` field with the same structure as `author`. + +The `date` property of `author` and `committer` is in the format {seconds,offset} +Where seconds is a unix timestamp in seconds and offset is the number of minutes +offset for the timezone. (Your local offset can be found with `(new Date).getTimezoneOffset()`) + +The `message` field is mandatory and a simple string. + +```js +rawBin = encoders.commit({ + tree: treeHash, + author: { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + parents: [ parentCommitHash ], + message: "This is a test commit\n" +}); +``` + +Annotated tags are like commits, except they have different fields. + +```js +rawBin = encoders.tag({ + object: commitHash, + type: "commit", + tag: "mytag", + tagger: { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + message: "Tag it!\n" +}); +``` + +## codec.decoders + +This is just like `codec.encoders` except these functions do the opposite. +They have the format: + + decode(raw-buffer) -> body + +```js +var commit = decoders.commit(rawCommitBin); +``` diff --git a/doc/lib/pack-codec.md b/doc/lib/pack-codec.md new file mode 100644 index 0000000..b08a1f9 --- /dev/null +++ b/doc/lib/pack-codec.md @@ -0,0 +1,98 @@ +# Pack Codec + +This module implements a codec for packfile streams used in the git network +protocols as well as the on-disk packfile format. + +These are a sync stream transforms. It accepts an emit function and returns a +write function. Both of these have the same interface. You signal `end` to the +input side by writing undefined (or nothing) and when emit gets called with +undefined that is `end` on the output. + +Since this is sync, errors are simply thrown. If you want to use this in the +context of an async stream with back-pressure, it's up to the consumer to handle +exceptions and write to the input at the correct rate. Basically to implement +back-pressure, you only need to keep writing values to the input till enough +data comes out the output. It's sync so by the time `write()` returns, `emit()` +will have been called as many times as it ever will (without more writes). + +Here is an example of using the decodePack in a node push stream that ignores +backpressure. + +```js +var decodePack = require('js-git/lib/pack-codec').decodePack; + +var write = decodePack(onItem); +stream.on("data", write); +stream.on("end", write); +var meta; +function onItem(item) { + if (item === undefined) { + // END of Stream + } + else if (meta === undefined) { + meta = item; + } + else { + console.log(item); + } +} +``` + +The first output is the meta object: + +```js +{ + version: 2 + num: num-of-objects, +} +``` + +## codec.decodePack(emit) -> write + +Input in this is the raw buffer chunks in the packstream. The chunks can be +broken up at any point so this is ideal for streaming from a disk or network. + + +Version is the git pack protocol version, and num is the number of objects that +will be in this stream. + +All output objects after this will be raw git objects. + +```js +{ + type: type, + size: buffer-size, + body: raw-buffer, + offset: offset-in-stream, + [ref: number-or-hash] +} +``` + +There are two extra types here that aren't seen elsewhere. They are `ofs-delta` +and `ref-delta`. In both cases, these are a diff that applies on top of another +object in the stream. The different is `ofs-delta` stores a number in `ref` +that is the number of bytes to go back in the stream to find the base object. +But `ref-delta` includes the full hash of it's base object. + + +## codec.encodePack(emit) -> write + +This is the reverse. In fact, if you fed this the output from `decodePack`, +it's output should match exactly the original stream. + +The objects don't need as much data as the parser outputs. In specefic, the meta +object only need contain: + +```js +{ num: num-of-objects } +``` + +And the items only need contain: + +```js +{ + type: type, + body: raw-buffer, + [ref: number-or-hash] +} +``` diff --git a/doc/lib/readme.md b/doc/lib/readme.md new file mode 100644 index 0000000..62156f8 --- /dev/null +++ b/doc/lib/readme.md @@ -0,0 +1,3 @@ +# Library + + diff --git a/doc/mixins/fs-db.md b/doc/mixins/fs-db.md new file mode 100644 index 0000000..c178435 --- /dev/null +++ b/doc/mixins/fs-db.md @@ -0,0 +1,53 @@ + +# Filesystem Git Database + +JSGit repositories need `loadAs`, `saveAs`, `loadRaw`, `saveRaw`, `readRef`, and +`updateRef` methods. +Depending on the backing storage, there are various ways to implement these +methods. +The implementation for in-memory storage is `js-git/mixins/mem-db`, and there +are variants for using Github or IndexDB for storage. + +The `js-git/mixins/fs-db` implementation provides these methods as well, but +depends on a file system interface providing `readFile`, `readChunk`, +`writeFile`, and `readDir`. +These file system methods are implemented by the `git-fs-db` and +`git-chrome-db` packages. + +For the purpose of this document, `=>` implies that the function does not block +and accepts a Node.js-style callback. +The arrow points to the type of the result. +None of these methods need to return a continuable if the nodeback is missing. + +The type `binary` stands for whatever binary representation is appropriate for +the underlying platform. +For browsers, binary is a `Uint8Array`. +For Node.js, binary is a `Buffer`. + +## readFile(path) => binary | undefined + +Reads the entirety of the file at the given path and produces the binary. +If the file does not exist, readFile provides `undefined` instead. + +## readChunk(path, start, end) => binary | undefined + +Reads a byte range of the file at the given path. +The byte range is a half open interval, including the byte at the initial index, +and excluding the byte at the terminal index, such that the end minus the start +is the length of the resulting binary data. +The end offset may be negative, in which case it should count back from the end +of the size of the file at the path, such that the size plus the negative end is +the positive end. +If the file does not exist, readChunk provides `undefined` instead. + +## writeFile(path, binary) => undefined + +Writes the given bytes to the file at the given path. +The method creates any directories leading up to the path if they do not already +exist. + +## readDir(path) => array of names | undefined + +Reads the names of the entries in the directory at the given path. +The names are not fully qualified paths, just the name of the entry within the +given directory. diff --git a/doc/mixins/mem-db.md b/doc/mixins/mem-db.md new file mode 100644 index 0000000..6b41aba --- /dev/null +++ b/doc/mixins/mem-db.md @@ -0,0 +1,27 @@ +# mem-db mixin + +This mixin implements object store (normal and raw) and stores the data in memory. + +```js +var memDb = require('js-git/mixins/mem-db'); +var repo = {}; +memDb(repo); +repo.saveAs("blob", "Hello World", function (err, hash) { + if (err) throw err; + console.log("Blob saved with hash " + hash); +}); +``` + +This attaches the following interfaces onto the repo object passed in: + + - `saveAs(type, body) => hash` + - `loadAs(type, hash) => body` + - `loadRaw(hash) => raw-binary` + - `saveRaw(hash, raw-binary) =>` + +All these functions are async and accept either a callback last or return a continuable. + +```js +// Example using continuable interface from gen-run generator body. +var commit = yield repo.loadAs("commit", commitHash); +``` \ No newline at end of file diff --git a/doc/mixins/pack-ops.md b/doc/mixins/pack-ops.md new file mode 100644 index 0000000..71df421 --- /dev/null +++ b/doc/mixins/pack-ops.md @@ -0,0 +1,37 @@ +# pack-ops mixin + +This mixin adds the ability to consume or create packfile streams. + +This depends on the repo already having: + + - `loadRaw(hash) => raw-binary` + - `saveRaw(hash, raw-binary) =>` + +And then adds: + + - `unpack(stream, opts) => hashes` + - `pack(hashes, opts) => stream` + +The streams are simple-stream format. This means they have a `.take(callback)` +method for pulling items out of the stream. + +Example: + +```js +var packOps = require('js-git/mixins/pack-ops'); +packOps(repo); + +repo.unpack(stream, opts, function (err, hashes) { + // hashes is imported objects +}); + +repo.pack(hashes, opts, function (err, stream) { + if (err) throw err; + stream.take(onRead); + function onRead(err, chunk) { + if (err) throw err; + console.log(chunk); + if (item) stream.take(onRead); + } +}); +``` diff --git a/doc/mixins/readme.md b/doc/mixins/readme.md new file mode 100644 index 0000000..5fb958f --- /dev/null +++ b/doc/mixins/readme.md @@ -0,0 +1,9 @@ +# Mixins + +There's three types of mixins thats documented: + +- [fs-db](fs-db.md) + +- [mem-db](mem-db.md) + +- [pack-ops](pack-ops.md) diff --git a/doc/readme.md b/doc/readme.md new file mode 100644 index 0000000..8d68f04 --- /dev/null +++ b/doc/readme.md @@ -0,0 +1,8 @@ +# js-git documentation + +Go to: + +- [Library](lib) + +- [Mixins](mixins) + diff --git a/examples/clone.js b/examples/clone.js deleted file mode 100644 index 886ddd4..0000000 --- a/examples/clone.js +++ /dev/null @@ -1,27 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.')(platform); -var gitRemote = require('git-net')(platform); -var fsDb = require('git-fs-db')(platform); -var fs = platform.fs; -var basename = require('path').basename; - -// Create a remote repo -var url = process.argv[2] || "git://github.com/creationix/conquest.git"; -var remote = gitRemote(url); - -// Create a local repo -var path = basename(remote.pathname); -var repo = jsGit(fsDb(fs(path))); - -console.log("Cloning %s to %s", url, path); - -var opts = { - onProgress: function (progress) { - process.stdout.write(progress); - } -}; - -repo.fetch(remote, opts, function (err) { - if (err) throw err; - console.log("Done"); -}); diff --git a/examples/create-harmony.js b/examples/create-harmony.js deleted file mode 100644 index ab00f23..0000000 --- a/examples/create-harmony.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -let platform = require('git-node-platform'); -let jsGit = require('../.')(platform); -let fsDb = require('git-fs-db')(platform); -let fs = platform.fs; -let run = require('gen-run'); - -// Create a filesystem backed bare repo -let repo = jsGit(fsDb(fs("test.git"))); - -let mock = require('./mock.js'); - -run(function *() { - yield repo.setBranch("master"); - console.log("Git database Initialized"); - - let head; - console.log(yield* map(mock.commits, function* (files, message) { - return head = yield repo.saveAs("commit", { - tree: yield repo.saveAs("tree", yield* map(files, function* (contents) { - return { - mode: 33188, // 0o100644, - hash: yield repo.saveAs("blob", contents) - }; - })), - parent: head, - author: mock.author, - committer: mock.committer, - message: message - }); - })); - - yield repo.updateHead(head); - console.log("Done"); - -}); - -function* map(object, onItem) { - let obj = {}; - for (let key in object) { - let value = object[key]; - obj[key] = yield* onItem(value, key); - } - return obj; -} diff --git a/examples/create.js b/examples/create.js deleted file mode 100644 index 449d1ba..0000000 --- a/examples/create.js +++ /dev/null @@ -1,83 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.')(platform); -var fsDb = require('git-fs-db')(platform); -var fs = platform.fs; - -// Create a filesystem backed bare repo -var repo = jsGit(fsDb(fs("test.git"))); - -var mock = require('./mock.js'); - -repo.setBranch("master", function (err) { - if (err) throw err; - console.log("Git database Initialized"); - - var parent; - serialEach(mock.commits, function (message, files, next) { - // Start building a tree object. - var tree = {}; - parallelEach(files, function (name, contents, next) { - repo.saveAs("blob", contents, function (err, hash) { - if (err) return next(err); - tree[name] = { - mode: 0100644, - hash: hash - }; - next(); - }); - }, function (err) { - if (err) return next(err); - repo.saveAs("tree", tree, function (err, hash) { - if (err) return next(err); - var commit = { - tree: hash, - parent: parent, - author: mock.author, - committer: mock.committer, - message: message - }; - if (!parent) delete commit.parent; - repo.saveAs("commit", commit, function (err, hash) { - if (err) return next(err); - parent = hash; - repo.updateHead(hash, next); - }); - }); - }); - }, function (err) { - if (err) throw err; - console.log("Done"); - }); - -}); - -// Mini control-flow library -function serialEach(object, fn, callback) { - var keys = Object.keys(object); - next(); - function next(err) { - if (err) return callback(err); - var key = keys.shift(); - if (!key) return callback(); - fn(key, object[key], next); - } -} -function parallelEach(object, fn, callback) { - var keys = Object.keys(object); - var left = keys.length + 1; - var done = false; - keys.forEach(function (key) { - fn(key, object[key], check); - }); - check(); - function check(err) { - if (done) return; - if (err) { - done = true; - return callback(err); - } - if (--left) return; - done = true; - callback(); - } -} diff --git a/examples/ls-remote.js b/examples/ls-remote.js deleted file mode 100644 index 9128185..0000000 --- a/examples/ls-remote.js +++ /dev/null @@ -1,13 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.')(platform); -var gitRemote = require('git-net')(platform); - -var repo = jsGit(); - -var url = process.argv[2] || "git://github.com/creationix/conquest.git"; -repo.lsRemote(gitRemote(url), function (err, refs) { - if (err) throw err; - Object.keys(refs).forEach(function (ref) { - console.log(refs[ref] + "\t" + ref); - }); -}); diff --git a/examples/mock.js b/examples/mock.js deleted file mode 100644 index c3c9d00..0000000 --- a/examples/mock.js +++ /dev/null @@ -1,18 +0,0 @@ -// Mock data for generating some history -exports.author = { name: "Tim Caswell", email: "tim@creationix.com" }; -exports.committer = { name: "JS-Git", email: "js-git@creationix.com" }; -exports.commits = { - "Initial Commit\n": { - "README.md": "# This is a test Repo\n\nIt's generated entirely by JavaScript\n" - }, - "Add package.json and blank module\n": { - "README.md": "# This is a test Repo\n\nIt's generated entirely by JavaScript\n", - "package.json": '{\n "name": "awesome-lib",\n "version": "3.1.3",\n "main": "awesome.js"\n}\n', - "awesome.js": 'module.exports = function () {\n throw new Error("TODO: Implement Awesome");\n};\n' - }, - "Implement awesome and bump version to 3.1.4\n": { - "README.md": "# This is a test Repo\n\nIt's generated entirely by JavaScript\n", - "package.json": '{\n "name": "awesome-lib",\n "version": "3.1.4",\n "main": "awesome.js"\n}\n', - "awesome.js": 'module.exports = function () {\n return 42;\n};\n' - } -}; diff --git a/examples/read-harmony.js b/examples/read-harmony.js deleted file mode 100644 index 2e536ad..0000000 --- a/examples/read-harmony.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -let platform = require('git-node-platform'); -let jsGit = require('../.')(platform); -let fsDb = require('git-fs-db')(platform); -let fs = platform.fs; -let run = require('gen-run'); - -// Create a filesystem backed bare repo -let repo = jsGit(fsDb(fs("test.git"))); - -run(start("HEAD")); - -function* start(hashish) { - let hash = yield repo.resolveHashish(hashish); - console.log(hashish, hash); - yield* loadCommit(hash); -} - -function* loadCommit(hash) { - let commit = yield repo.loadAs("commit", hash); - console.log("COMMIT", hash, commit); - let tree = yield repo.loadAs("tree", commit.tree); - console.log("TREE", commit.tree, tree); - for (let entry of tree.values()) { - let blob = yield repo.loadAs("blob", entry.hash); - console.log("BLOB", entry.hash, blob); - } - for (let parent of commit.parents.values()) { - yield* loadCommit(parent); - } -} diff --git a/examples/read.js b/examples/read.js deleted file mode 100644 index fcf82b2..0000000 --- a/examples/read.js +++ /dev/null @@ -1,39 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.')(platform); -var fsDb = require('git-fs-db')(platform); -var fs = platform.fs; - -// Create a filesystem backed bare repo -var repo = jsGit(fsDb(fs("test.git"))); - -loadCommit("HEAD"); - -function loadCommit(hashish) { - repo.loadAs("commit", hashish, onCommit); -} - -function onCommit(err, commit, hash) { - if (err) throw err; - console.log("COMMIT", hash, commit); - loadTree(commit.tree); - if (commit.parents) { - commit.parents.forEach(loadCommit); - } -} - -function loadTree(hash) { - repo.loadAs("tree", hash, onTree); -} - -function onTree(err, tree, hash) { - if (err) throw err; - console.log("TREE", hash, tree); - tree.forEach(onEntry); -} - -function onEntry(entry) { - repo.loadAs("blob", entry.hash, function (err, blob) { - if (err) throw err; - console.log("BLOB", entry.hash, blob); - }); -} diff --git a/js-git.js b/js-git.js deleted file mode 100644 index ef02889..0000000 --- a/js-git.js +++ /dev/null @@ -1,834 +0,0 @@ -module.exports = function (platform) { - var applyDelta = require('git-pack-codec/apply-delta.js')(platform); - var pushToPull = require('push-to-pull'); - var parse = pushToPull(require('git-pack-codec/decode.js')(platform)); - platform.agent = platform.agent || "js-git/" + require('./package.json').version; - - return newRepo; - - // platform options are: db, proto, and trace - function newRepo(db, workDir) { - var trace = platform.trace; - var sha1 = platform.sha1; - var bops = platform.bops; - - var encoders = { - commit: encodeCommit, - tag: encodeTag, - tree: encodeTree, - blob: encodeBlob - }; - - var decoders = { - commit: decodeCommit, - tag: decodeTag, - tree: decodeTree, - blob: decodeBlob - }; - - var repo = {}; - - if (db) { - // Git Objects - repo.load = load; // (hashish) -> object - repo.save = save; // (object) -> hash - repo.loadAs = loadAs; // (type, hashish) -> value - repo.saveAs = saveAs; // (type, value) -> hash - repo.remove = remove; // (hashish) - repo.unpack = unpack; // (opts, packStream) - - // Refs - repo.resolveHashish = resolveHashish; // (hashish) -> hash - repo.updateHead = updateHead; // (hash) - repo.getHead = getHead; // () -> ref - repo.setHead = setHead; // (ref) - repo.createRef = createRef; // (ref, hash) - repo.deleteRef = deleteRef; // (ref) - repo.listRefs = listRefs; // (prefix) -> refs - - if (workDir) { - // TODO: figure out API for working repos - } - } - - // Network Protocols - - repo.lsRemote = lsRemote; - if (db) { - repo.fetch = fetch; - repo.push = push; - } - - return repo; - - function load(hashish, callback) { - if (!callback) return load.bind(this, hashish); - var hash; - return resolveHashish(hashish, onHash); - - function onHash(err, result) { - if (err) return callback(err); - hash = result; - return db.load(hash, onBuffer); - } - - function onBuffer(err, buffer) { - if (err) return callback(err); - var type, object; - try { - if (sha1(buffer) !== hash) { - throw new Error("Hash checksum failed for " + hash); - } - var pair = deframe(buffer); - type = pair[0]; - buffer = pair[1]; - object = { - type: type, - body: decoders[type](buffer) - }; - } catch (err) { - if (err) return callback(err); - } - if (trace) trace("load", null, hash); - return callback(null, object, hash); - } - } - - function save(object, callback) { - if (!callback) return save.bind(this, object); - var buffer, hash; - try { - buffer = encoders[object.type](object.body); - buffer = frame(object.type, buffer); - hash = sha1(buffer); - } - catch (err) { - return callback(err); - } - return db.save(hash, buffer, onSave); - - function onSave(err) { - if (err) return callback(err); - if (trace) trace("save", null, hash); - return callback(null, hash); - } - } - - function loadAs(type, hashish, callback) { - if (!callback) return loadAs.bind(this, type, hashish); - return load(hashish, onObject); - - function onObject(err, object, hash) { - if (err) return callback(err); - if (object.type !== type) { - return new Error("Expected " + type + ", but found " + object.type); - } - return callback(null, object.body, hash); - } - } - - function saveAs(type, body, callback) { - if (!callback) return saveAs.bind(this, type, body); - return save({ type: type, body: body }, callback); - } - - function remove(hashish, callback) { - if (!callback) return remove.bind(this, hashish); - var hash; - return resolveHashish(hashish, onHash); - - function onHash(err, result) { - if (err) return callback(err); - hash = result; - return db.remove(hash, onRemove); - } - - function onRemove(err) { - if (err) return callback(err); - if (trace) trace("remove", null, hash); - return callback(null, hash); - } - } - - function resolveHashish(hashish, callback) { - if (!callback) return resolveHashish.bind(this, hashish); - hashish = hashish.trim(); - if ((/^[0-9a-f]{40}$/i).test(hashish)) { - return callback(null, hashish.toLowerCase()); - } - if (hashish === "HEAD") return getHead(onBranch); - if ((/^refs\//).test(hashish)) { - return db.read(hashish, checkBranch); - } - return checkBranch(); - - function onBranch(err, ref) { - if (err) return callback(err); - return resolveHashish(ref, callback); - } - - function checkBranch(err, hash) { - if (err) return callback(err); - if (hash) { - return resolveHashish(hash, callback); - } - return db.read("refs/heads/" + hashish, checkTag); - } - - function checkTag(err, hash) { - if (err) return callback(err); - if (hash) { - return resolveHashish(hash, callback); - } - return db.read("refs/tags/" + hashish, final); - } - - function final(err, hash) { - if (err) return callback(err); - if (hash) { - return resolveHashish(hash, callback); - } - return callback(new Error("Cannot find hashish: " + hashish)); - } - } - - function updateHead(hash, callback) { - if (!callback) return updateHead.bind(this, hash); - var ref; - return getHead(onBranch); - - function onBranch(err, result) { - if (err) return callback(err); - ref = result; - return db.write(ref, hash + "\n", callback); - } - } - - function getHead(callback) { - if (!callback) return getHead.bind(this); - return db.read("HEAD", onRead); - - function onRead(err, ref) { - if (err) return callback(err); - if (!ref) return callback(new Error("Missing HEAD")); - var match = ref.match(/^ref: *(.*)/); - if (!match) return callback(new Error("Invalid HEAD")); - return callback(null, match[1]); - } - } - - function setHead(branchName, callback) { - if (!callback) return setHead.bind(this, branchName); - var ref = "refs/heads/" + branchName; - return db.write("HEAD", "ref: " + ref + "\n", callback); - } - - function createRef(ref, hash, callback) { - if (!callback) return createRef.bind(this, ref, hash); - return db.write(ref, hash + "\n", callback); - } - - function deleteRef(ref, callback) { - if (!callback) return deleteRef.bind(this, ref); - return db.unlink(ref, callback); - } - - function listRefs(prefix, callback) { - if (!callback) return listRefs.bind(this, prefix); - var branches = {}, list = [], target = prefix; - return db.readdir(target, onNames); - - function onNames(err, names) { - if (err) { - if (err.code === "ENOENT") return shift(); - return callback(err); - } - for (var i = 0, l = names.length; i < l; ++i) { - list.push(target + "/" + names[i]); - } - return shift(); - } - - function shift(err) { - if (err) return callback(err); - target = list.shift(); - if (!target) return callback(null, branches); - return db.read(target, onRead); - } - - function onRead(err, hash) { - if (err) { - if (err.code === "EISDIR") return db.readdir(target, onNames); - return callback(err); - } - if (hash) { - branches[target] = hash.trim(); - return shift(); - } - return db.readdir(target, onNames); - } - } - - function indexOf(buffer, byte, i) { - i |= 0; - var length = buffer.length; - for (;;i++) { - if (i >= length) return -1; - if (buffer[i] === byte) return i; - } - } - - function parseAscii(buffer, start, end) { - var val = ""; - while (start < end) { - val += String.fromCharCode(buffer[start++]); - } - return val; - } - - function parseDec(buffer, start, end) { - var val = 0; - while (start < end) { - val = val * 10 + buffer[start++] - 0x30; - } - return val; - } - - function parseOct(buffer, start, end) { - var val = 0; - while (start < end) { - val = (val << 3) + buffer[start++] - 0x30; - } - return val; - } - - function deframe(buffer) { - var space = indexOf(buffer, 0x20); - if (space < 0) throw new Error("Invalid git object buffer"); - var nil = indexOf(buffer, 0x00, space); - if (nil < 0) throw new Error("Invalid git object buffer"); - var body = bops.subarray(buffer, nil + 1); - var size = parseDec(buffer, space + 1, nil); - if (size !== body.length) throw new Error("Invalid body length."); - return [ - parseAscii(buffer, 0, space), - body - ]; - } - - function frame(type, body) { - return bops.join([ - bops.from(type + " " + body.length + "\0"), - body - ]); - } - - // A sequence of bytes not containing the ASCII character byte - // values NUL (0x00), LF (0x0a), '<' (0c3c), or '>' (0x3e). - // The sequence may not begin or end with any bytes with the - // following ASCII character byte values: SPACE (0x20), - // '.' (0x2e), ',' (0x2c), ':' (0x3a), ';' (0x3b), '<' (0x3c), - // '>' (0x3e), '"' (0x22), "'" (0x27). - function safe(string) { - return string.replace(/(?:^[\.,:;<>"']+|[\0\n<>]+|[\.,:;<>"']+$)/gm, ""); - } - - function formatDate(date) { - var timezone = (date.timeZoneoffset || date.getTimezoneOffset()) / 60; - var seconds = Math.floor(date.getTime() / 1000); - return seconds + " " + (timezone > 0 ? "-0" : "0") + timezone + "00"; - } - - function encodePerson(person) { - if (!person.name || !person.email) { - throw new TypeError("Name and email are required for person fields"); - } - return safe(person.name) + - " <" + safe(person.email) + "> " + - formatDate(person.date || new Date()); - } - - function encodeCommit(commit) { - if (!commit.tree || !commit.author || !commit.message) { - throw new TypeError("Tree, author, and message are require for commits"); - } - var parents = commit.parents || (commit.parent ? [ commit.parent ] : []); - if (!Array.isArray(parents)) { - throw new TypeError("Parents must be an array"); - } - var str = "tree " + commit.tree; - for (var i = 0, l = parents.length; i < l; ++i) { - str += "\nparent " + parents[i]; - } - str += "\nauthor " + encodePerson(commit.author) + - "\ncommitter " + encodePerson(commit.committer || commit.author) + - "\n\n" + commit.message; - return bops.from(str); - } - - function encodeTag(tag) { - if (!tag.object || !tag.type || !tag.tag || !tag.tagger || !tag.message) { - throw new TypeError("Object, type, tag, tagger, and message required"); - } - var str = "object " + tag.object + - "\ntype " + tag.type + - "\ntag " + tag.tag + - "\ntagger " + encodePerson(tag.tagger) + - "\n\n" + tag.message; - return bops.from(str + "\n" + tag.message); - } - - function pathCmp(a, b) { - a += "/"; b += "/"; - return a < b ? -1 : a > b ? 1 : 0; - } - - function encodeTree(tree) { - var chunks = []; - Object.keys(tree).sort(pathCmp).forEach(onName); - return bops.join(chunks); - - function onName(name) { - var entry = tree[name]; - chunks.push( - bops.from(entry.mode.toString(8) + " " + name + "\0"), - bops.from(entry.hash, "hex") - ); - } - } - - function encodeBlob(blob) { - if (bops.is(blob)) return blob; - return bops.from(blob); - } - - function decodePerson(string) { - var match = string.match(/^([^<]*) <([^>]*)> ([^ ]*) (.*)$/); - if (!match) throw new Error("Improperly formatted person string"); - var sec = parseInt(match[3], 10); - var date = new Date(sec * 1000); - date.timeZoneoffset = parseInt(match[4], 10) / 100 * -60; - return { - name: match[1], - email: match[2], - date: date - }; - } - - - function decodeCommit(body) { - var i = 0; - var start; - var key; - var parents = []; - var commit = { - tree: "", - parents: parents, - author: "", - committer: "", - message: "" - }; - while (body[i] !== 0x0a) { - start = i; - i = indexOf(body, 0x20, start); - if (i < 0) throw new SyntaxError("Missing space"); - key = parseAscii(body, start, i++); - start = i; - i = indexOf(body, 0x0a, start); - if (i < 0) throw new SyntaxError("Missing linefeed"); - var value = bops.to(bops.subarray(body, start, i++)); - if (key === "parent") { - parents.push(value); - } - else { - if (key === "author" || key === "committer") { - value = decodePerson(value); - } - commit[key] = value; - } - } - i++; - commit.message = bops.to(bops.subarray(body, i)); - return commit; - } - - function decodeTag(body) { - var i = 0; - var start; - var key; - var tag = {}; - while (body[i] !== 0x0a) { - start = i; - i = indexOf(body, 0x20, start); - if (i < 0) throw new SyntaxError("Missing space"); - key = parseAscii(body, start, i++); - start = i; - i = indexOf(body, 0x0a, start); - if (i < 0) throw new SyntaxError("Missing linefeed"); - var value = bops.to(bops.subarray(body, start, i++)); - if (key === "tagger") value = decodePerson(value); - tag[key] = value; - } - i++; - tag.message = bops.to(bops.subarray(body, i)); - return tag; - } - - function decodeTree(body) { - var i = 0; - var length = body.length; - var start; - var mode; - var name; - var hash; - var tree = []; - while (i < length) { - start = i; - i = indexOf(body, 0x20, start); - if (i < 0) throw new SyntaxError("Missing space"); - mode = parseOct(body, start, i++); - start = i; - i = indexOf(body, 0x00, start); - name = bops.to(bops.subarray(body, start, i++)); - hash = bops.to(bops.subarray(body, i, i += 20), "hex"); - tree.push({ - mode: mode, - name: name, - hash: hash - }); - } - return tree; - } - - function decodeBlob(body) { - return body; - } - - function lsRemote(remote, callback) { - if (!callback) return lsRemote.bind(this, remote); - var refs; - return remote.discover(onDiscover); - - function onDiscover(err, result) { - if (err) return callback(err); - refs = result; - return remote.close(onClose); - } - - function onClose(err) { - if (err) return callback(err); - return callback(null, refs); - } - } - - function fetch(remote, opts, callback) { - if (!callback) return fetch.bind(this, remote, opts); - var refs, branch, queue, ref, hash; - return remote.discover(onDiscover); - - function onDiscover(err, serverRefs, serverCaps) { - if (err) return callback(err); - refs = serverRefs; - opts.caps = processCaps(opts, serverCaps); - return processWants(refs, opts.want, onWants); - } - - function onWants(err, wants) { - if (err) return callback(err); - opts.wants = wants; - return remote.fetch(repo, opts, onPackStream); - } - - function onPackStream(err, raw) { - if (err) return callback(err); - if (!raw) return remote.close(callback); - var packStream = parse(raw); - return unpack(packStream, opts, onUnpack); - } - - function onUnpack(err) { - if (err) return callback(err); - return remote.close(onClose); - } - - function onClose(err) { - if (err) return callback(err); - queue = Object.keys(refs); - return next(); - } - - function next(err) { - if (err) return callback(err); - ref = queue.shift(); - if (!ref) return setHead(branch, callback); - if (ref === "HEAD" || /{}$/.test(ref)) return next(); - hash = refs[ref]; - if (!branch && (hash === refs.HEAD)) branch = ref.substr(11); - db.has(hash, onHas); - } - - function onHas(err, has) { - if (err) return callback(err); - if (!has) return next(); - return db.write(ref, hash + "\n", next); - } - } - - function processCaps(opts, serverCaps) { - var caps = []; - if (serverCaps["ofs-delta"]) caps.push("ofs-delta"); - if (serverCaps["thin-pack"]) caps.push("thin-pack"); - if (opts.includeTag && serverCaps["include-tag"]) caps.push("include-tag"); - if ((opts.onProgress || opts.onError) && - (serverCaps["side-band-64k"] || serverCaps["side-band"])) { - caps.push(serverCaps["side-band-64k"] ? "side-band-64k" : "side-band"); - if (!opts.onProgress && serverCaps["no-progress"]) { - caps.push("no-progress"); - } - } - if (serverCaps.agent) caps.push("agent=" + platform.agent); - return caps; - } - - // Possible values for `filter` - // "HEAD" - fetch whatever the remote head is - // "refs/heads/master - ref - // ["refs/heads/master"] - list of refs - // "master" - branch - // ["master"] - list of branches - // "0.0.1" - tag - // ["0.0.1"] - list of tags - // function (ref, callback) { callback(null, true); } - interactive - // true - Fetch all remote refs. - function processWants(refs, filter, callback) { - if (filter === null || filter === undefined) { - return defaultWants(refs, callback); - } - filter = Array.isArray(filter) ? arrayFilter(filter) : - typeof filter === "function" ? filter = filter : - wantFilter(filter); - - var list = Object.keys(refs); - var wants = {}; - var ref, hash; - return shift(); - function shift() { - ref = list.shift(); - if (!ref) return callback(null, Object.keys(wants)); - hash = refs[ref]; - resolveHashish(ref, onResolve); - } - function onResolve(err, oldHash) { - // Skip refs we already have - if (hash === oldHash) return shift(); - filter(ref, onFilter); - } - function onFilter(err, want) { - if (err) return callback(err); - // Skip refs the user doesn't want - if (want) wants[hash] = true; - return shift(); - } - } - - function defaultWants(refs, callback) { - return listRefs("refs/heads", onRefs); - - function onRefs(err, branches) { - if (err) return callback(err); - var wants = Object.keys(branches); - wants.unshift("HEAD"); - return processWants(refs, wants, callback); - } - } - - function wantMatch(ref, want) { - if (want === "HEAD" || want === null || want === undefined) { - return ref === "HEAD"; - } - if (Object.prototype.toString.call(want) === '[object RegExp]') { - return want.test(ref); - } - if (typeof want === "boolean") return want; - if (typeof want !== "string") { - throw new TypeError("Invalid want type: " + typeof want); - } - return (/^refs\//.test(ref) && ref === want) || - (ref === "refs/heads/" + want) || - (ref === "refs/tags/" + want); - } - - function wantFilter(want) { - return function (ref, callback) { - var result; - try { - result = wantMatch(ref, want); - } - catch (err) { - return callback(err); - } - return callback(null, result); - }; - } - - function arrayFilter(want) { - var length = want.length; - return function (ref, callback) { - var result; - try { - for (var i = 0; i < length; ++i) { - if (result = wantMatch(ref, want[i])) break; - } - } - catch (err) { - return callback(err); - } - return callback(null, result); - }; - } - - function push() { - throw new Error("TODO: Implement repo.fetch"); - } - - function unpack(packStream, opts, callback) { - if (!callback) return unpack.bind(this, packStream, opts); - // TODO: save the stream to the local repo. - var version, num, count = 0, deltas = 0; - - // hashes keyed by offset - var hashes = {}; - var seen = {}; - var toDelete = {}; - var pending = {}; - var queue = []; - - packStream.read(function (err, stats) { - if (err) return callback(err); - version = stats.version; - num = stats.num; - packStream.read(onRead); - }); - function onRead(err, item) { - if (err) return callback(err); - if (opts.onProgress) { - var percent = Math.round(count / num * 100); - opts.onProgress("Receiving objects: " + percent + "% (" + count + "/" + num + ") " + (item ? "\r" : "\n")); - count++; - } - if (item === undefined) { - hashes = null; - count = 0; - return checkExisting(); - } - if (item.size !== item.body.length) { - return callback(new Error("Body size mismatch")); - } - var buffer = bops.join([ - bops.from(item.type + " " + item.size + "\0"), - item.body - ]); - var hash = sha1(buffer); - hashes[item.offset] = hash; - var ref = item.ref; - if (ref !== undefined) { - deltas++; - if (item.type === "ofs-delta") { - ref = hashes[item.offset - ref]; - } - var list = pending[ref]; - if (list) list.push(hash); - else pending[ref] = [hash]; - toDelete[hash] = true; - } - else { - seen[hash] = true; - } - - db.save(hash, buffer, function (err) { - if (err) return callback(err); - if (trace) trace("save", null, hash); - packStream.read(onRead); - }); - } - - function checkExisting() { - var list = Object.keys(pending); - var hash; - return pop(); - function pop() { - hash = list.pop(); - if (!hash) return applyDeltas(); - if (toDelete[hash]) return pop(); - return db.has(hash, onHas); - } - function onHas(err, has) { - if (err) return callback(err); - if (has) seen[hash] = true; - return pop(); - } - } - - function applyDeltas() { - Object.keys(pending).forEach(function (ref) { - if (seen[ref]) { - pending[ref].forEach(function (hash) { - queue.push({hash:hash,ref:ref}); - }); - delete pending[ref]; - } - }); - return queue.length ? check() : cleanup(); - } - - function deltaProgress() { - var percent = Math.round(count / deltas * 100); - return "Applying deltas: " + percent + "% (" + count++ + "/" + deltas + ") "; - } - - function check() { - var item = queue.pop(); - if (!item) return applyDeltas(); - if (opts.onProgress) { - opts.onProgress(deltaProgress() + "\r"); - } - db.load(item.ref, function (err, target) { - if (err) return callback(err); - db.load(item.hash, function (err, delta) { - if (err) return callback(err); - target = deframe(target); - delta = deframe(delta); - var buffer = frame(target[0], applyDelta(delta[1], target[1])); - var hash = sha1(buffer); - db.save(hash, buffer, function (err) { - if (err) return callback(err); - var deps = pending[item.hash]; - if (deps) { - pending[hash] = deps; - delete pending[item.hash]; - } - seen[hash] = true; - return check(); - }); - }); - }); - } - - function cleanup() { - if (opts.onProgress) { - opts.onProgress(deltaProgress() + "\n"); - } - var hashes = Object.keys(toDelete); - next(); - function next(err) { - if (err) return callback(err); - var hash = hashes.pop(); - if (!hash) return callback(); - remove(hash, next); - } - } - } - - } - -}; \ No newline at end of file diff --git a/lib/apply-delta.js b/lib/apply-delta.js new file mode 100644 index 0000000..5357ac3 --- /dev/null +++ b/lib/apply-delta.js @@ -0,0 +1,61 @@ +var bodec = require('bodec'); + +module.exports = applyDelta; + +function applyDelta(delta, base) { + var deltaOffset = 0; + + if (base.length !== readLength()) { + throw new Error("Base length mismatch"); + } + + // Create a new output buffer with length from header. + var outOffset = 0; + var out = bodec.create(readLength()); + + while (deltaOffset < delta.length) { + var byte = delta[deltaOffset++]; + // Copy command. Tells us offset in base and length to copy. + if (byte & 0x80) { + var offset = 0; + var length = 0; + if (byte & 0x01) offset |= delta[deltaOffset++] << 0; + if (byte & 0x02) offset |= delta[deltaOffset++] << 8; + if (byte & 0x04) offset |= delta[deltaOffset++] << 16; + if (byte & 0x08) offset |= delta[deltaOffset++] << 24; + if (byte & 0x10) length |= delta[deltaOffset++] << 0; + if (byte & 0x20) length |= delta[deltaOffset++] << 8; + if (byte & 0x40) length |= delta[deltaOffset++] << 16; + if (length === 0) length = 0x10000; + // copy the data + bodec.copy(bodec.slice(base, offset, offset + length), out, outOffset); + outOffset += length; + } + // Insert command, opcode byte is length itself + else if (byte) { + bodec.copy(bodec.slice(delta, deltaOffset, deltaOffset + byte), out, outOffset); + deltaOffset += byte; + outOffset += byte; + } + else throw new Error('Invalid delta opcode'); + } + + if (outOffset !== out.length) { + throw new Error("Size mismatch in check"); + } + + return out; + + // Read a variable length number our of delta and move the offset. + function readLength() { + var byte = delta[deltaOffset++]; + var length = byte & 0x7f; + var shift = 7; + while (byte & 0x80) { + byte = delta[deltaOffset++]; + length |= (byte & 0x7f) << shift; + shift += 7; + } + return length; + } +} diff --git a/lib/config-codec.js b/lib/config-codec.js new file mode 100644 index 0000000..e69264c --- /dev/null +++ b/lib/config-codec.js @@ -0,0 +1,67 @@ +"use strict"; + +// This is for working with git config files like .git/config and .gitmodules. +// I believe this is just INI format. +module.exports = { + encode: encode, + decode: decode +}; + +function encode(config) { + var lines = []; + Object.keys(config).forEach(function (name) { + var obj = config[name]; + var deep = {}; + var values = {}; + var hasValues = false; + Object.keys(obj).forEach(function (key) { + var value = obj[key]; + if (typeof value === 'object') { + deep[key] = value; + } + else { + hasValues = true; + values[key] = value; + } + }); + if (hasValues) { + encodeBody('[' + name + ']', values); + } + + Object.keys(deep).forEach(function (sub) { + var child = deep[sub]; + encodeBody('[' + name + ' "' + sub + '"]', child); + }); + }); + + return lines.join("\n") + "\n"; + + function encodeBody(header, obj) { + lines.push(header); + Object.keys(obj).forEach(function (name) { + lines.push( "\t" + name + " = " + obj[name]); + }); + } + +} + + +function decode(text) { + var config = {}; + var section; + text.split(/[\r\n]+/).forEach(function (line) { + var match = line.match(/\[([^ \t"\]]+) *(?:"([^"]+)")?\]/); + if (match) { + section = config[match[1]] || (config[match[1]] = {}); + if (match[2]) { + section = section[match[2]] = {}; + } + return; + } + match = line.match(/([^ \t=]+)[ \t]*=[ \t]*(.+)/); + if (match) { + section[match[1]] = match[2]; + } + }); + return config; +} diff --git a/lib/defer.js b/lib/defer.js new file mode 100644 index 0000000..be50543 --- /dev/null +++ b/lib/defer.js @@ -0,0 +1,33 @@ +"use strict"; + +var timeouts, messageName; + +// node.js +if (typeof process === "object" && typeof process.nextTick === "function") { + module.exports = process.nextTick; +} +// some browsers +else if (typeof setImmediate === "function") { + module.exports = setImmediate; +} +// most other browsers +else { + timeouts = []; + messageName = "zero-timeout-message"; + window.addEventListener("message", handleMessage, true); + + module.exports = function (fn) { + timeouts.push(fn); + window.postMessage(messageName, "*"); + }; +} + +function handleMessage(event) { + if (event.source == window && event.data == messageName) { + event.stopPropagation(); + if (timeouts.length > 0) { + var fn = timeouts.shift(); + fn(); + } + } +} diff --git a/lib/deflate.js b/lib/deflate.js new file mode 100644 index 0000000..512bcc5 --- /dev/null +++ b/lib/deflate.js @@ -0,0 +1,10 @@ +var pako = require('pako'); +var Binary = require('bodec').Binary; +if (Binary === Uint8Array) { + module.exports = pako.deflate; +} +else { + module.exports = function deflate(value) { + return new Binary(pako.deflate(new Uint8Array(value))); + }; +} diff --git a/lib/find-common.js b/lib/find-common.js new file mode 100644 index 0000000..245a5e2 --- /dev/null +++ b/lib/find-common.js @@ -0,0 +1,58 @@ +function oneCall(fn) { + var done = false; + return function () { + if (done) return; + done = true; + return fn.apply(this, arguments); + }; +} + +module.exports = findCommon; + +function findCommon(repo, a, b, callback) { + callback = oneCall(callback); + var ahead = 0, behind = 0; + var aStream, bStream; + var aCommit, bCommit; + + if (a === b) return callback(null, ahead, behind); + repo.logWalk(a, onAStream); + repo.logWalk(b, onBStream); + + function onAStream(err, stream) { + if (err) return callback(err); + aStream = stream; + aStream.read(onA); + } + + function onBStream(err, stream) { + if (err) return callback(err); + bStream = stream; + bStream.read(onB); + } + + function onA(err, commit) { + if (!commit) return callback(err || new Error("No common commit")); + aCommit = commit; + if (bCommit) compare(); + } + + function onB(err, commit) { + if (!commit) return callback(err || new Error("No common commit")); + bCommit = commit; + if (aCommit) compare(); + } + + function compare() { + if (aCommit.hash === bCommit.hash) return callback(null, ahead, behind); + if (aCommit.author.date.seconds > bCommit.author.date.seconds) { + ahead++; + aStream.read(onA); + } + else { + behind++; + bStream.read(onB); + } + } + +} diff --git a/lib/git-fs.js b/lib/git-fs.js new file mode 100644 index 0000000..c8d34d3 --- /dev/null +++ b/lib/git-fs.js @@ -0,0 +1,125 @@ +"use strict"; + +var modes = require('./modes'); +var defer = require('./defer'); + +// options.encrypt(plain) -> encrypted +// options.decrypt(encrypted) -> plain +// options.shouldEncrypt(path) -> boolean +// options.getRootTree() => hash +// options.setRootTree(hash) => +module.exports = function (repo, options) { + var toWrite = {}; + var callbacks = []; + var writing = false; + + return { + readFile: readFile, + writeFile: writeFile, + readDir: readDir + }; + + function readFile(path, callback) { + if (!callback) return readFile.bind(null, path); + + // If there is a pending write for this path, pull from the cache. + if (toWrite[path]) return callback(null, toWrite[path]); + + // Otherwise read from the persistent storage + options.getRootTree(onRootTree); + + function onRootTree(err, hash) { + if (!hash) return callback(err); + repo.pathToEntry(hash, path, onEntry); + } + + function onEntry(err, entry) { + if (!entry || !modes.isBlob(entry.mode)) return callback(err); + + repo.loadAs("blob", entry.hash, function (err, content) { + if (!content) return callback(err); + if (entry.mode === modes.sym) { + content = options.decrypt(content); + } + callback(null, content); + }); + } + } + + function writeFile(path, binary, callback) { + if (!callback) return writeFile.bind(null, path, binary); + toWrite[path] = binary; + callbacks.push(callback); + defer(check); + } + + function readDir(path, callback) { + if (!callback) return readDir.bind(null, path); + + options.getRootTree(onRootTree); + + function onRootTree(err, hash) { + if (!hash) return callback(err); + repo.pathToEntry(hash, path, onEntry); + } + + function onEntry(err, entry) { + if (!entry || entry.mode !== modes.tree) return callback(err); + repo.loadAs("tree", entry.hash, onTree); + } + + function onTree(err, tree) { + if (!tree) return callback(err); + callback(null, Object.keys(tree)); + } + } + + function check() { + if (writing || !callbacks.length) return; + writing = true; + options.getRootTree(onRootTree); + + function onRootTree(err, hash) { + if (err) return callall(err); + var files = pullFiles(); + if (hash) files.base = hash; + repo.createTree(files, onNewTree); + } + + function onNewTree(err, hash) { + if (err) return callall(err); + options.setRootTree(hash, onSaveRoot); + } + + function onSaveRoot(err) { + if (err) return callall(err); + writing = false; + callall(); + defer(check); + } + } + + function pullFiles() { + var files = Object.keys(toWrite).map(function (path) { + var content = toWrite[path]; + delete toWrite[path]; + var mode = modes.blob; + if (options.shouldEncrypt && options.shouldEncrypt(path)) { + mode = modes.sym; + content = options.encrypt(content); + } + return { + path: path, + mode: mode, + content: content + }; + }); + return files; + } + + function callall(err) { + callbacks.splice(0, callbacks.length).forEach(function (callback) { + callback(err); + }); + } +}; diff --git a/lib/inflate-stream.js b/lib/inflate-stream.js new file mode 100644 index 0000000..ce8d318 --- /dev/null +++ b/lib/inflate-stream.js @@ -0,0 +1,36 @@ +var Inflate = require('pako').Inflate; +var Binary = require('bodec').Binary; + +// Byte oriented inflate stream. Wrapper for pako's Inflate. +// +// var inf = inflate(); +// inf.write(byte) -> more - Write a byte to inflate's state-machine. +// Returns true if more data is expected. +// inf.recycle() - Reset the internal state machine. +// inf.flush() -> data - Flush the output as a binary buffer. +// +module.exports = function inflateStream() { + var inf = new Inflate(); + var b = new Uint8Array(1); + var empty = new Binary(0); + + return { + write: write, + recycle: recycle, + flush: Binary === Uint8Array ? flush : flushConvert + }; + + function write(byte) { + b[0] = byte; + inf.push(b); + return !inf.ended; + } + + function recycle() { inf = new Inflate(); } + + function flush() { return inf.result || empty; } + + function flushConvert() { + return inf.result ? new Binary(inf.result) : empty; + } +}; diff --git a/lib/inflate.js b/lib/inflate.js new file mode 100644 index 0000000..038f8a4 --- /dev/null +++ b/lib/inflate.js @@ -0,0 +1,10 @@ +var pako = require('pako'); +var Binary = require('bodec').Binary; +if (Binary === Uint8Array) { + module.exports = pako.inflate; +} +else { + module.exports = function inflate(value) { + return new Binary(pako.inflate(new Uint8Array(value))); + }; +} diff --git a/lib/modes.js b/lib/modes.js new file mode 100644 index 0000000..9162c62 --- /dev/null +++ b/lib/modes.js @@ -0,0 +1,28 @@ +"use strict"; + +var masks = { + mask: parseInt('100000', 8), + blob: parseInt('140000', 8), + file: parseInt('160000', 8) +}; + +var modes = module.exports = { + isBlob: function (mode) { + return (mode & masks.blob) === masks.mask; + }, + isFile: function (mode) { + return (mode & masks.file) === masks.mask; + }, + toType: function (mode) { + if (mode === modes.commit) return "commit"; + if (mode === modes.tree) return "tree"; + if ((mode & masks.blob) === masks.mask) return "blob"; + return "unknown"; + }, + tree: parseInt( '40000', 8), + blob: parseInt('100644', 8), + file: parseInt('100644', 8), + exec: parseInt('100755', 8), + sym: parseInt('120000', 8), + commit: parseInt('160000', 8) +}; diff --git a/lib/object-codec.js b/lib/object-codec.js new file mode 100644 index 0000000..a1609c3 --- /dev/null +++ b/lib/object-codec.js @@ -0,0 +1,265 @@ +"use strict"; +var bodec = require('bodec'); +var modes = require('./modes'); + +// (body) -> raw-buffer +var encoders = exports.encoders = { + blob: encodeBlob, + tree: encodeTree, + commit: encodeCommit, + tag: encodeTag +}; + + // ({type:type, body:raw-buffer}) -> buffer +exports.frame = frame; + +// (raw-buffer) -> body +var decoders = exports.decoders ={ + blob: decodeBlob, + tree: decodeTree, + commit: decodeCommit, + tag: decodeTag +}; + +// (buffer) -> {type:type, body:raw-buffer} +exports.deframe = deframe; + +// Export git style path sort in case it's wanted. +exports.treeMap = treeMap; +exports.treeSort = treeSort; + +function encodeBlob(body) { + if (!bodec.isBinary(body)) throw new TypeError("Blobs must be binary values"); + return body; +} + +function treeMap(key) { + /*jshint validthis:true*/ + var entry = this[key]; + return { + name: key, + mode: entry.mode, + hash: entry.hash + }; +} + +function treeSort(a, b) { + var aa = (a.mode === modes.tree) ? a.name + "/" : a.name; + var bb = (b.mode === modes.tree) ? b.name + "/" : b.name; + return aa > bb ? 1 : aa < bb ? -1 : 0; +} + +function encodeTree(body) { + var tree = ""; + if (Array.isArray(body)) throw new TypeError("Tree must be in object form"); + var list = Object.keys(body).map(treeMap, body).sort(treeSort); + for (var i = 0, l = list.length; i < l; i++) { + var entry = list[i]; + tree += entry.mode.toString(8) + " " + bodec.encodeUtf8(entry.name) + + "\0" + bodec.decodeHex(entry.hash); + } + return bodec.fromRaw(tree); +} + +function encodeTag(body) { + var str = "object " + body.object + + "\ntype " + body.type + + "\ntag " + body.tag + + "\ntagger " + formatPerson(body.tagger) + + "\n\n" + body.message; + return bodec.fromUnicode(str); +} + +function encodeCommit(body) { + var str = "tree " + body.tree; + for (var i = 0, l = body.parents.length; i < l; ++i) { + str += "\nparent " + body.parents[i]; + } + str += "\nauthor " + formatPerson(body.author) + + "\ncommitter " + formatPerson(body.committer) + + "\n\n" + body.message; + return bodec.fromUnicode(str); +} + + +function formatPerson(person) { + return safe(person.name) + + " <" + safe(person.email) + "> " + + formatDate(person.date); +} + +function safe(string) { + return string.replace(/(?:^[\.,:;<>"']+|[\0\n<>]+|[\.,:;<>"']+$)/gm, ""); +} + +function two(num) { + return (num < 10 ? "0" : "") + num; +} + +function formatDate(date) { + var seconds, offset; + if (date.seconds) { + seconds = date.seconds; + offset = date.offset; + } + // Also accept Date instances + else { + seconds = Math.floor(date.getTime() / 1000); + offset = date.getTimezoneOffset(); + } + var neg = "+"; + if (offset <= 0) offset = -offset; + else neg = "-"; + offset = neg + two(Math.floor(offset / 60)) + two(offset % 60); + return seconds + " " + offset; +} + +function frame(obj) { + var type = obj.type; + var body = obj.body; + if (!bodec.isBinary(body)) body = encoders[type](body); + return bodec.join([ + bodec.fromRaw(type + " " + body.length + "\0"), + body + ]); +} + +function decodeBlob(body) { + return body; +} + +function decodeTree(body) { + var i = 0; + var length = body.length; + var start; + var mode; + var name; + var hash; + var tree = {}; + while (i < length) { + start = i; + i = indexOf(body, 0x20, start); + if (i < 0) throw new SyntaxError("Missing space"); + mode = parseOct(body, start, i++); + start = i; + i = indexOf(body, 0x00, start); + name = bodec.toUnicode(body, start, i++); + hash = bodec.toHex(body, i, i += 20); + tree[name] = { + mode: mode, + hash: hash + }; + } + return tree; +} + +function decodeCommit(body) { + var i = 0; + var start; + var key; + var parents = []; + var commit = { + tree: "", + parents: parents, + author: "", + committer: "", + message: "" + }; + while (body[i] !== 0x0a) { + start = i; + i = indexOf(body, 0x20, start); + if (i < 0) throw new SyntaxError("Missing space"); + key = bodec.toRaw(body, start, i++); + start = i; + i = indexOf(body, 0x0a, start); + if (i < 0) throw new SyntaxError("Missing linefeed"); + var value = bodec.toUnicode(body, start, i++); + if (key === "parent") { + parents.push(value); + } + else { + if (key === "author" || key === "committer") { + value = decodePerson(value); + } + commit[key] = value; + } + } + i++; + commit.message = bodec.toUnicode(body, i, body.length); + return commit; +} + +function decodeTag(body) { + var i = 0; + var start; + var key; + var tag = {}; + while (body[i] !== 0x0a) { + start = i; + i = indexOf(body, 0x20, start); + if (i < 0) throw new SyntaxError("Missing space"); + key = bodec.toRaw(body, start, i++); + start = i; + i = indexOf(body, 0x0a, start); + if (i < 0) throw new SyntaxError("Missing linefeed"); + var value = bodec.toUnicode(body, start, i++); + if (key === "tagger") value = decodePerson(value); + tag[key] = value; + } + i++; + tag.message = bodec.toUnicode(body, i, body.length); + return tag; +} + +function decodePerson(string) { + var match = string.match(/^([^<]*) <([^>]*)> ([^ ]*) (.*)$/); + if (!match) throw new Error("Improperly formatted person string"); + return { + name: match[1], + email: match[2], + date: { + seconds: parseInt(match[3], 10), + offset: parseInt(match[4], 10) / 100 * -60 + } + }; +} + +function deframe(buffer, decode) { + var space = indexOf(buffer, 0x20); + if (space < 0) throw new Error("Invalid git object buffer"); + var nil = indexOf(buffer, 0x00, space); + if (nil < 0) throw new Error("Invalid git object buffer"); + var body = bodec.slice(buffer, nil + 1); + var size = parseDec(buffer, space + 1, nil); + if (size !== body.length) throw new Error("Invalid body length."); + var type = bodec.toRaw(buffer, 0, space); + return { + type: type, + body: decode ? decoders[type](body) : body + }; +} + +function indexOf(buffer, byte, i) { + i |= 0; + var length = buffer.length; + for (;;i++) { + if (i >= length) return -1; + if (buffer[i] === byte) return i; + } +} + +function parseOct(buffer, start, end) { + var val = 0; + while (start < end) { + val = (val << 3) + buffer[start++] - 0x30; + } + return val; +} + +function parseDec(buffer, start, end) { + var val = 0; + while (start < end) { + val = val * 10 + buffer[start++] - 0x30; + } + return val; +} diff --git a/lib/pack-codec.js b/lib/pack-codec.js new file mode 100644 index 0000000..93ac7c1 --- /dev/null +++ b/lib/pack-codec.js @@ -0,0 +1,326 @@ +var inflateStream = require('./inflate-stream.js'); +var inflate = require('./inflate.js'); +var deflate = require('./deflate.js'); +var sha1 = require('git-sha1'); +var bodec = require('bodec'); + +var typeToNum = { + commit: 1, + tree: 2, + blob: 3, + tag: 4, + "ofs-delta": 6, + "ref-delta": 7 +}; +var numToType = {}; +for (var type in typeToNum) { + var num = typeToNum[type]; + numToType[num] = type; +} +exports.parseEntry = parseEntry; +function parseEntry(chunk) { + var offset = 0; + var byte = chunk[offset++]; + var type = numToType[(byte >> 4) & 0x7]; + var size = byte & 0xf; + var left = 4; + while (byte & 0x80) { + byte = chunk[offset++]; + size |= (byte & 0x7f) << left; + left += 7; + } + size = size >>> 0; + var ref; + if (type === "ref-delta") { + ref = bodec.toHex(bodec.slice(chunk, offset, offset += 20)); + } + else if (type === "ofs-delta") { + byte = chunk[offset++]; + ref = byte & 0x7f; + while (byte & 0x80) { + byte = chunk[offset++]; + ref = ((ref + 1) << 7) | (byte & 0x7f); + } + } + + var body = inflate(bodec.slice(chunk, offset)); + if (body.length !== size) { + throw new Error("Size mismatch"); + } + var result = { + type: type, + body: body + }; + if (typeof ref !== "undefined") { + result.ref = ref; + } + return result; +} + + +exports.decodePack = decodePack; +function decodePack(emit) { + + var state = $pack; + var sha1sum = sha1(); + var inf = inflateStream(); + + var offset = 0; + var position = 0; + var version = 0x4b434150; // PACK reversed + var num = 0; + var type = 0; + var length = 0; + var ref = null; + var checksum = ""; + var start = 0; + var parts = []; + + + return function (chunk) { + if (chunk === undefined) { + if (num || checksum.length < 40) throw new Error("Unexpected end of input stream"); + return emit(); + } + + for (var i = 0, l = chunk.length; i < l; i++) { + // console.log([state, i, chunk[i].toString(16)]); + if (!state) throw new Error("Unexpected extra bytes: " + bodec.slice(chunk, i)); + state = state(chunk[i], i, chunk); + position++; + } + if (!state) return; + if (state !== $checksum) sha1sum.update(chunk); + var buff = inf.flush(); + if (buff.length) { + parts.push(buff); + } + }; + + // The first four bytes in a packfile are the bytes 'PACK' + function $pack(byte) { + if ((version & 0xff) === byte) { + version >>>= 8; + return version ? $pack : $version; + } + throw new Error("Invalid packfile header"); + } + + // The version is stored as an unsigned 32 integer in network byte order. + // It must be version 2 or 3. + function $version(byte) { + version = (version << 8) | byte; + if (++offset < 4) return $version; + if (version >= 2 && version <= 3) { + offset = 0; + return $num; + } + throw new Error("Invalid version number " + num); + } + + // The number of objects in this packfile is also stored as an unsigned 32 bit int. + function $num(byte) { + num = (num << 8) | byte; + if (++offset < 4) return $num; + offset = 0; + emit({version: version, num: num}); + return $header; + } + + // n-byte type and length (3-bit type, (n-1)*7+4-bit length) + // CTTTSSSS + // C is continue bit, TTT is type, S+ is length + function $header(byte) { + if (start === 0) start = position; + type = byte >> 4 & 0x07; + length = byte & 0x0f; + if (byte & 0x80) { + offset = 4; + return $header2; + } + return afterHeader(); + } + + // Second state in the same header parsing. + // CSSSSSSS* + function $header2(byte) { + length |= (byte & 0x7f) << offset; + if (byte & 0x80) { + offset += 7; + return $header2; + } + return afterHeader(); + } + + // Common helper for finishing tiny and normal headers. + function afterHeader() { + offset = 0; + if (type === 6) { + ref = 0; + return $ofsDelta; + } + if (type === 7) { + ref = ""; + return $refDelta; + } + // console.log({type: type,length: length}) + return $body; + } + + // Big-endian modified base 128 number encoded ref offset + function $ofsDelta(byte) { + ref = byte & 0x7f; + if (byte & 0x80) return $ofsDelta2; + return $body; + } + + function $ofsDelta2(byte) { + ref = ((ref + 1) << 7) | (byte & 0x7f); + if (byte & 0x80) return $ofsDelta2; + return $body; + } + + // 20 byte raw sha1 hash for ref + function $refDelta(byte) { + ref += toHex(byte); + if (++offset < 20) return $refDelta; + return $body; + } + + // Common helper for generating 2-character hex numbers + function toHex(num) { + return num < 0x10 ? "0" + num.toString(16) : num.toString(16); + } + + // Common helper for emitting all three object shapes + function emitObject() { + var body = bodec.join(parts); + if (body.length !== length) { + throw new Error("Body length mismatch"); + } + var item = { + type: numToType[type], + size: length, + body: body, + offset: start + }; + if (ref) item.ref = ref; + parts.length = 0; + start = 0; + offset = 0; + type = 0; + length = 0; + ref = null; + emit(item); + } + + // Feed the deflated code to the inflate engine + function $body(byte, i, chunk) { + if (inf.write(byte)) return $body; + var buf = inf.flush(); + if (buf.length !== length) throw new Error("Length mismatch, expected " + length + " got " + buf.length); + inf.recycle(); + if (buf.length) { + parts.push(buf); + } + emitObject(); + // If this was all the objects, start calculating the sha1sum + if (--num) return $header; + sha1sum.update(bodec.slice(chunk, 0, i + 1)); + return $checksum; + } + + // 20 byte checksum + function $checksum(byte) { + checksum += toHex(byte); + if (++offset < 20) return $checksum; + var actual = sha1sum.digest(); + if (checksum !== actual) throw new Error("Checksum mismatch: " + actual + " != " + checksum); + } + +} + + +exports.encodePack = encodePack; +function encodePack(emit) { + var sha1sum = sha1(); + var left; + return function (item) { + if (item === undefined) { + if (left !== 0) throw new Error("Some items were missing"); + return emit(); + } + if (typeof item.num === "number") { + if (left !== undefined) throw new Error("Header already sent"); + left = item.num; + write(packHeader(item.num)); + } + else if (typeof item.type === "string" && bodec.isBinary(item.body)) { + // The header must be sent before items. + if (typeof left !== "number") throw new Error("Headers not sent yet"); + + // Make sure we haven't sent all the items already + if (!left) throw new Error("All items already sent"); + + // Send the item in packstream format + write(packFrame(item)); + + // Send the checksum after the last item + if (!--left) { + emit(bodec.fromHex(sha1sum.digest())); + } + } + else { + throw new Error("Invalid item"); + } + }; + function write(chunk) { + sha1sum.update(chunk); + emit(chunk); + } +} + +function packHeader(length) { + return bodec.fromArray([ + 0x50, 0x41, 0x43, 0x4b, // PACK + 0, 0, 0, 2, // version 2 + length >> 24, // Num of objects + (length >> 16) & 0xff, + (length >> 8) & 0xff, + length & 0xff + ]); +} + +function packFrame(item) { + var length = item.body.length; + + // write TYPE_AND_BASE128_SIZE + var head = [(typeToNum[item.type] << 4) | (length & 0xf)]; + var i = 0; + length >>= 4; + while (length) { + head[i++] |= 0x80; + head[i] = length & 0x7f; + length >>= 7; + } + + if (typeof item.ref === "number") { + // write BIG_ENDIAN_MODIFIED_BASE_128_NUMBER + var offset = item.ref; + // Calculate how many digits we need in base 128 and move the pointer + i += Math.floor(Math.log(offset) / Math.log(0x80)) + 1; + // Write the last digit + head[i] = offset & 0x7f; + // Then write the rest + while (offset >>= 7) { + head[--i] = 0x80 | (--offset & 0x7f); + } + } + + var parts = [bodec.fromArray(head)]; + if (typeof item.ref === "string") { + parts.push(bodec.fromHex(item.ref)); + } + parts.push(deflate(item.body)); + return bodec.join(parts); +} diff --git a/lib/pkt-line.js b/lib/pkt-line.js new file mode 100644 index 0000000..4134462 --- /dev/null +++ b/lib/pkt-line.js @@ -0,0 +1,128 @@ +"use strict"; + +var bodec = require('bodec'); +var PACK = bodec.fromRaw("PACK"); + +module.exports = { + deframer: deframer, + framer: framer +}; + +function deframer(emit) { + var state = 0; + var offset = 4; + var length = 0; + var data; + var more = true; + + return function (item) { + + // Forward the EOS marker + if (item === undefined) return emit(); + + // Once we're in pack mode, everything goes straight through + if (state === 3) return emit(item); + + // Otherwise parse the data using a state machine. + for (var i = 0, l = item.length; i < l; i++) { + var byte = item[i]; + if (state === 0) { + var val = fromHexChar(byte); + if (val === -1) { + if (byte === PACK[0]) { + offset = 1; + state = 2; + continue; + } + state = -1; + throw new SyntaxError("Not a hex char: " + String.fromCharCode(byte)); + } + length |= val << ((--offset) * 4); + if (offset === 0) { + if (length === 4) { + offset = 4; + more = emit(""); + } + else if (length === 0) { + offset = 4; + more = emit(null); + } + else if (length > 4) { + length -= 4; + data = bodec.create(length); + state = 1; + } + else { + state = -1; + throw new SyntaxError("Invalid length: " + length); + } + } + } + else if (state === 1) { + data[offset++] = byte; + if (offset === length) { + offset = 4; + state = 0; + length = 0; + if (data[0] === 1) { + more = emit(bodec.slice(data, 1)); + } + else if (data[0] === 2) { + more = emit({progress: bodec.toUnicode(data, 1)}); + } + else if (data[0] === 3) { + more = emit({error: bodec.toUnicode(data, 1)}); + } + else { + more = emit(bodec.toUnicode(data).trim()); + } + } + } + else if (state === 2) { + if (offset < 4 && byte === PACK[offset++]) { + continue; + } + state = 3; + more = emit(bodec.join([PACK, bodec.subarray(item, i)])); + break; + } + else { + throw new Error("pkt-line decoder in invalid state"); + } + } + + return more; + }; + +} + +function framer(emit) { + return function (item) { + if (item === undefined) return emit(); + if (item === null) { + return emit(bodec.fromRaw("0000")); + } + if (typeof item === "string") { + item = bodec.fromUnicode(item); + } + return emit(bodec.join([frameHead(item.length + 4), item])); + }; +} + +function frameHead(length) { + var buffer = bodec.create(4); + buffer[0] = toHexChar(length >>> 12); + buffer[1] = toHexChar((length >>> 8) & 0xf); + buffer[2] = toHexChar((length >>> 4) & 0xf); + buffer[3] = toHexChar(length & 0xf); + return buffer; +} + +function fromHexChar(val) { + return (val >= 0x30 && val < 0x40) ? val - 0x30 : + ((val > 0x60 && val <= 0x66) ? val - 0x57 : -1); +} + +function toHexChar(val) { + return val < 0x0a ? val + 0x30 : val + 0x57; +} diff --git a/lib/wrap-handler.js b/lib/wrap-handler.js new file mode 100644 index 0000000..9a1c050 --- /dev/null +++ b/lib/wrap-handler.js @@ -0,0 +1,21 @@ +"use strict"; + +module.exports = wrapHandler; + +function wrapHandler(fn, onError) { + if (onError) { + return function (err, value) { + if (err) return onError(err); + try { + return fn(value); + } + catch (err) { + return onError(err); + } + }; + } + return function (err, value) { + if (err) throw err; + return fn(value); + }; +} diff --git a/mixins/add-cache.js b/mixins/add-cache.js new file mode 100644 index 0000000..6d94285 --- /dev/null +++ b/mixins/add-cache.js @@ -0,0 +1,61 @@ +"use strict"; + +module.exports = addCache; +function addCache(repo, cache) { + var loadAs = repo.loadAs; + if (loadAs) repo.loadAs = loadAsCached; + var saveAs = repo.saveAs; + if (saveAs) repo.saveAs = saveAsCached; + var createTree = repo.createTree; + if (createTree) repo.createTree = createTreeCached; + + function loadAsCached(type, hash, callback) { + // Next check in disk cache... + cache.loadAs(type, hash, onCacheLoad); + + function onCacheLoad(err, value) { + if (err) return callback(err); + // ...and return if it's there. + if (value !== undefined) { + return callback(null, value, hash); + } + + // Otherwise load from real data source... + loadAs.call(repo, type, hash, onLoad); + } + + function onLoad(err, value) { + if (value === undefined) return callback(err); + + // Store it on disk too... + // Force the hash to prevent mismatches. + cache.saveAs(type, value, onSave, hash); + + function onSave(err) { + if (err) return callback(err); + // Finally return the value to caller. + callback(null, value, hash); + } + } + } + + function saveAsCached(type, value, callback) { + saveAs.call(repo, type, value, onSave); + + function onSave(err, hash) { + if (err) return callback(err); + // Store in disk, forcing hash to match. + cache.saveAs(type, value, callback, hash); + } + } + + function createTreeCached(entries, callback) { + createTree.call(repo, entries, onTree); + + function onTree(err, hash, tree) { + if (err) return callback(err); + cache.saveAs("tree", tree, callback, hash); + } + } + +} diff --git a/mixins/create-tree.js b/mixins/create-tree.js new file mode 100644 index 0000000..5137dae --- /dev/null +++ b/mixins/create-tree.js @@ -0,0 +1,148 @@ +"use strict"; + +var modes = require('../lib/modes.js'); + +module.exports = function (repo) { + repo.createTree = createTree; + + function createTree(entries, callback) { + if (!callback) return createTree.bind(null, entries); + callback = singleCall(callback); + if (!Array.isArray(entries)) { + entries = Object.keys(entries).map(function (path) { + var entry = entries[path]; + entry.path = path; + return entry; + }); + } + + // Tree paths that we need loaded + var toLoad = {}; + function markTree(path) { + while(true) { + if (toLoad[path]) return; + toLoad[path] = true; + trees[path] = { + add: [], + del: [], + tree: {} + }; + if (!path) break; + path = path.substring(0, path.lastIndexOf("/")); + } + } + + // Commands to run organized by tree path + var trees = {}; + + // Counter for parallel I/O operations + var left = 1; // One extra counter to protect again zalgo cache callbacks. + + // First pass, stubs out the trees structure, sorts adds from deletes, + // and saves any inline content blobs. + entries.forEach(function (entry) { + var index = entry.path.lastIndexOf("/"); + var parentPath = entry.path.substr(0, index); + var name = entry.path.substr(index + 1); + markTree(parentPath); + var tree = trees[parentPath]; + var adds = tree.add; + var dels = tree.del; + + if (!entry.mode) { + dels.push(name); + return; + } + var add = { + name: name, + mode: entry.mode, + hash: entry.hash + }; + adds.push(add); + if (entry.hash) return; + left++; + repo.saveAs("blob", entry.content, function (err, hash) { + if (err) return callback(err); + add.hash = hash; + check(); + }); + }); + + // Preload the base trees + if (entries.base) loadTree("", entries.base); + + // Check just in case there was no IO to perform + check(); + + function loadTree(path, hash) { + left++; + delete toLoad[path]; + repo.loadAs("tree", hash, function (err, tree) { + if (err) return callback(err); + trees[path].tree = tree; + Object.keys(tree).forEach(function (name) { + var childPath = path ? path + "/" + name : name; + if (toLoad[childPath]) loadTree(childPath, tree[name].hash); + }); + check(); + }); + } + + function check() { + if (--left) return; + findLeaves().forEach(processLeaf); + } + + function processLeaf(path) { + var entry = trees[path]; + delete trees[path]; + var tree = entry.tree; + entry.del.forEach(function (name) { + delete tree[name]; + }); + entry.add.forEach(function (item) { + tree[item.name] = { + mode: item.mode, + hash: item.hash + }; + }); + left++; + repo.saveAs("tree", tree, function (err, hash, tree) { + if (err) return callback(err); + if (!path) return callback(null, hash, tree); + var index = path.lastIndexOf("/"); + var parentPath = path.substring(0, index); + var name = path.substring(index + 1); + trees[parentPath].add.push({ + name: name, + mode: modes.tree, + hash: hash + }); + if (--left) return; + findLeaves().forEach(processLeaf); + }); + } + + function findLeaves() { + var paths = Object.keys(trees); + var parents = {}; + paths.forEach(function (path) { + if (!path) return; + var parent = path.substring(0, path.lastIndexOf("/")); + parents[parent] = true; + }); + return paths.filter(function (path) { + return !parents[path]; + }); + } + } +}; + +function singleCall(callback) { + var done = false; + return function () { + if (done) return console.warn("Discarding extra callback"); + done = true; + return callback.apply(this, arguments); + }; +} diff --git a/mixins/delay.js b/mixins/delay.js new file mode 100644 index 0000000..8291224 --- /dev/null +++ b/mixins/delay.js @@ -0,0 +1,51 @@ +"use strict"; + +module.exports = function (repo, ms) { + var saveAs = repo.saveAs; + var loadAs = repo.loadAs; + var readRef = repo.readRef; + var updateRef = repo.updateRef; + var createTree = repo.createTree; + + repo.saveAs = saveAsDelayed; + repo.loadAs = loadAsDelayed; + repo.readRef = readRefDelayed; + repo.updateRed = updateRefDelayed; + if (createTree) repo.createTree = createTreeDelayed; + + function saveAsDelayed(type, value, callback) { + if (!callback) return saveAsDelayed.bind(repo, type, value); + setTimeout(function () { + return saveAs.call(repo, type, value, callback); + }, ms); + } + + function loadAsDelayed(type, hash, callback) { + if (!callback) return loadAsDelayed.bind(repo, type, hash); + setTimeout(function () { + return loadAs.call(repo, type, hash, callback); + }, ms); + } + + function readRefDelayed(ref, callback) { + if (!callback) return readRefDelayed.bind(repo, ref); + setTimeout(function () { + return readRef.call(repo, ref, callback); + }, ms); + } + + function updateRefDelayed(ref, hash, callback) { + if (!callback) return updateRefDelayed.bind(repo, ref, hash); + setTimeout(function () { + return updateRef.call(repo, ref, hash, callback); + }, ms); + } + + function createTreeDelayed(entries, callback) { + if (!callback) return createTreeDelayed.bind(repo, entries); + setTimeout(function () { + return createTree.call(repo, entries, callback); + }, ms); + } + +}; diff --git a/mixins/fall-through.js b/mixins/fall-through.js new file mode 100644 index 0000000..3953499 --- /dev/null +++ b/mixins/fall-through.js @@ -0,0 +1,26 @@ +var modes = require('../lib/modes'); + +module.exports = function (local, remote) { + var loadAs = local.loadAs; + local.loadAs = newLoadAs; + function newLoadAs(type, hash, callback) { + if (!callback) return newLoadAs.bind(local. type, hash); + loadAs.call(local, type, hash, function (err, body) { + if (err) return callback(err); + if (body === undefined) return remote.loadAs(type, hash, callback); + callback(null, body); + }); + } + + var readRef = local.readRef; + local.readRef = newReadRef; + function newReadRef(ref, callback) { + if (!callback) return newReadRef.bind(local. ref); + readRef.call(local, ref, function (err, body) { + if (err) return callback(err); + if (body === undefined) return remote.readRef(ref, callback); + callback(null, body); + }); + } + +}; diff --git a/mixins/formats.js b/mixins/formats.js new file mode 100644 index 0000000..88ac21f --- /dev/null +++ b/mixins/formats.js @@ -0,0 +1,133 @@ +"use strict"; + +var bodec = require('bodec'); +var treeMap = require('../lib/object-codec').treeMap; + +module.exports = function (repo) { + var loadAs = repo.loadAs; + repo.loadAs = newLoadAs; + var saveAs = repo.saveAs; + repo.saveAs = newSaveAs; + + function newLoadAs(type, hash, callback) { + if (!callback) return newLoadAs.bind(repo, type, hash); + var realType = type === "text" ? "blob": + type === "array" ? "tree" : type; + return loadAs.call(repo, realType, hash, onLoad); + + function onLoad(err, body, hash) { + if (body === undefined) return callback(err); + if (type === "text") body = bodec.toUnicode(body); + if (type === "array") body = toArray(body); + return callback(err, body, hash); + } + } + + function newSaveAs(type, body, callback) { + if (!callback) return newSaveAs.bind(repo, type, body); + type = type === "text" ? "blob": + type === "array" ? "tree" : type; + if (type === "blob") { + if (typeof body === "string") { + body = bodec.fromUnicode(body); + } + } + else if (type === "tree") { + body = normalizeTree(body); + } + else if (type === "commit") { + body = normalizeCommit(body); + } + else if (type === "tag") { + body = normalizeTag(body); + } + return saveAs.call(repo, type, body, callback); + } + +}; + +function toArray(tree) { + return Object.keys(tree).map(treeMap, tree); +} + +function normalizeTree(body) { + var type = body && typeof body; + if (type !== "object") { + throw new TypeError("Tree body must be array or object"); + } + var tree = {}, i, l, entry; + // If array form is passed in, convert to object form. + if (Array.isArray(body)) { + for (i = 0, l = body.length; i < l; i++) { + entry = body[i]; + tree[entry.name] = { + mode: entry.mode, + hash: entry.hash + }; + } + } + else { + var names = Object.keys(body); + for (i = 0, l = names.length; i < l; i++) { + var name = names[i]; + entry = body[name]; + tree[name] = { + mode: entry.mode, + hash: entry.hash + }; + } + } + return tree; +} + +function normalizeCommit(body) { + if (!body || typeof body !== "object") { + throw new TypeError("Commit body must be an object"); + } + if (!(body.tree && body.author && body.message)) { + throw new TypeError("Tree, author, and message are required for commits"); + } + var parents = body.parents || (body.parent ? [ body.parent ] : []); + if (!Array.isArray(parents)) { + throw new TypeError("Parents must be an array"); + } + var author = normalizePerson(body.author); + var committer = body.committer ? normalizePerson(body.committer) : author; + return { + tree: body.tree, + parents: parents, + author: author, + committer: committer, + message: body.message + }; +} + +function normalizeTag(body) { + if (!body || typeof body !== "object") { + throw new TypeError("Tag body must be an object"); + } + if (!(body.object && body.type && body.tag && body.tagger && body.message)) { + throw new TypeError("Object, type, tag, tagger, and message required"); + } + return { + object: body.object, + type: body.type, + tag: body.tag, + tagger: normalizePerson(body.tagger), + message: body.message + }; +} + +function normalizePerson(person) { + if (!person || typeof person !== "object") { + throw new TypeError("Person must be an object"); + } + if (typeof person.name !== "string" || typeof person.email !== "string") { + throw new TypeError("Name and email are required for person fields"); + } + return { + name: person.name, + email: person.email, + date: person.date || new Date() + }; +} diff --git a/mixins/fs-db.js b/mixins/fs-db.js new file mode 100644 index 0000000..12e1cb0 --- /dev/null +++ b/mixins/fs-db.js @@ -0,0 +1,339 @@ +"use strict"; +var bodec = require('bodec'); +var inflate = require('../lib/inflate'); +var deflate = require('../lib/deflate'); +var codec = require('../lib/object-codec'); +var parsePackEntry = require('../lib/pack-codec').parseEntry; +var applyDelta = require('../lib/apply-delta'); +var sha1 = require('git-sha1'); +var pathJoin = require('path').join; + +// The fs object has the following interface: +// - readFile(path) => binary +// Must also call callback() with no arguments if the file does not exist. +// - readChunk(path, start, end) => binary +// Must also call callback() with no arguments if the file does not exist. +// - writeFile(path, binary) => +// Must also make every directory up to parent of path. +// - readDir(path) => array +// Must also call callback() with no arguments if the file does not exist. +// The repo is expected to have a rootPath property that points to +// the .git folder within the filesystem. +module.exports = function (repo, fs) { + + var cachedIndexes = {}; + + repo.loadAs = loadAs; + repo.saveAs = saveAs; + repo.loadRaw = loadRaw; + repo.saveRaw = saveRaw; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.hasHash = hasHash; + repo.init = init; + repo.setShallow = setShallow; + + function init(ref, callback) { + if (!callback) return init.bind(null, ref); + ref = ref || "refs/heads/master"; + var path = pathJoin(repo.rootPath, "HEAD"); + fs.writeFile(path, "ref: " + ref, callback); + } + + function setShallow(ref, callback) { + if (!callback) return setShallow.bind(null, ref); + var path = pathJoin(repo.rootPath, "shallow"); + fs.writeFile(path, ref, callback); + } + + function updateRef(ref, hash, callback) { + if (!callback) return updateRef.bind(repo, ref, hash); + var path = pathJoin(repo.rootPath, ref); + var lock = path + ".lock"; + fs.writeFile(lock, bodec.fromRaw(hash + "\n"), function(err) { + if(err) return callback(err); + fs.rename(lock, path, callback); + }); + } + + function readRef(ref, callback) { + if (!callback) return readRef.bind(repo, ref); + var path = pathJoin(repo.rootPath, ref); + fs.readFile(path, function (err, binary) { + if (err) return callback(err); + if (binary === undefined) { + return readPackedRef(ref, callback); + } + var hash; + try { hash = bodec.toRaw(binary).trim(); } + catch (err) { return callback(err); } + callback(null, hash); + }); + } + + function readPackedRef(ref, callback) { + var path = pathJoin(repo.rootPath, "packed-refs"); + fs.readFile(path, function (err, binary) { + if (binary === undefined) return callback(err); + var hash; + try { + var text = bodec.toRaw(binary); + var index = text.indexOf(ref); + if (index >= 0) { + hash = text.substring(index - 41, index - 1); + } + } + catch (err) { + return callback(err); + } + callback(null, hash); + }); + } + + function saveAs(type, body, callback) { + if (!callback) return saveAs.bind(repo, type, body); + var raw, hash; + try { + raw = codec.frame({ + type: type, + body: codec.encoders[type](body) + }); + hash = sha1(raw); + } + catch (err) { return callback(err); } + saveRaw(hash, raw, function (err) { + if (err) return callback(err); + callback(null, hash); + }); + } + + function saveRaw(hash, raw, callback) { + if (!callback) return saveRaw.bind(repo, hash, raw); + var buffer, path; + try { + if (sha1(raw) !== hash) { + throw new Error("Save data does not match hash"); + } + buffer = deflate(raw); + path = hashToPath(hash); + } + catch (err) { return callback(err); } + // Try to read the object first. + loadRaw(hash, function (err, data) { + // If it already exists, we're done + if (data) return callback(); + // Otherwise write a new file + var tmp = path.replace(/[0-9a-f]+$/, 'tmp_obj_' + Math.random().toString(36).substr(2)) + fs.writeFile(tmp, buffer, function(err) { + if(err) return callback(err); + fs.rename(tmp, path, callback); + }); + }); + } + + function loadAs(type, hash, callback) { + if (!callback) return loadAs.bind(repo, type, hash); + loadRaw(hash, function (err, raw) { + if (raw === undefined) return callback(err); + var body; + try { + raw = codec.deframe(raw); + if (raw.type !== type) throw new TypeError("Type mismatch"); + body = codec.decoders[raw.type](raw.body); + } + catch (err) { return callback(err); } + callback(null, body); + }); + } + + function hasHash(hash, callback) { + if (!callback) return hasHash.bind(repo, hash); + loadRaw(hash, function (err, body) { + if (err) return callback(err); + return callback(null, !!body); + }); + } + + function loadRaw(hash, callback) { + if (!callback) return loadRaw.bind(repo, hash); + var path = hashToPath(hash); + fs.readFile(path, function (err, buffer) { + if (err) return callback(err); + if (buffer) { + var raw; + try { raw = inflate(buffer); } + catch (err) { return callback(err); } + return callback(null, raw); + } + return loadRawPacked(hash, callback); + }); + } + + function loadRawPacked(hash, callback) { + var packDir = pathJoin(repo.rootPath, "objects/pack"); + var packHashes = []; + fs.readDir(packDir, function (err, entries) { + if (!entries) return callback(err); + entries.forEach(function (name) { + var match = name.match(/pack-([0-9a-f]{40}).idx/); + if (match) packHashes.push(match[1]); + }); + start(); + }); + + function start() { + var packHash = packHashes.pop(); + var offsets; + if (!packHash) return callback(); + if (!cachedIndexes[packHash]) loadIndex(packHash); + else onIndex(); + + function loadIndex() { + var indexFile = pathJoin(packDir, "pack-" + packHash + ".idx" ); + fs.readFile(indexFile, function (err, buffer) { + if (!buffer) return callback(err); + try { + cachedIndexes[packHash] = parseIndex(buffer); + } + catch (err) { return callback(err); } + onIndex(); + }); + } + + function onIndex() { + var cached = cachedIndexes[packHash]; + var packFile = pathJoin(packDir, "pack-" + packHash + ".pack" ); + var index = cached.byHash[hash]; + if (!index) return start(); + offsets = cached.offsets; + loadChunk(packFile, index.offset, callback); + } + + function loadChunk(packFile, start, callback) { + var index = offsets.indexOf(start); + if (index < 0) { + var error = new Error("Can't find chunk starting at " + start); + return callback(error); + } + var end = index + 1 < offsets.length ? offsets[index + 1] : -20; + fs.readChunk(packFile, start, end, function (err, chunk) { + if (!chunk) return callback(err); + var raw; + try { + var entry = parsePackEntry(chunk); + if (entry.type === "ref-delta") { + return loadRaw.call(repo, entry.ref, onBase); + } + else if (entry.type === "ofs-delta") { + return loadChunk(packFile, start - entry.ref, onBase); + } + raw = codec.frame(entry); + } + catch (err) { return callback(err); } + callback(null, raw); + + function onBase(err, base) { + if (!base) return callback(err); + var object = codec.deframe(base); + var buffer; + try { + object.body = applyDelta(entry.body, object.body); + buffer = codec.frame(object); + } + catch (err) { return callback(err); } + callback(null, buffer); + } + }); + } + + } + } + + function hashToPath(hash) { + return pathJoin(repo.rootPath, "objects", hash.substring(0, 2), hash.substring(2)); + } + +}; + +function parseIndex(buffer) { + if (readUint32(buffer, 0) !== 0xff744f63 || + readUint32(buffer, 4) !== 0x00000002) { + throw new Error("Only v2 pack indexes supported"); + } + + // Get the number of hashes in index + // This is the value of the last fan-out entry + var hashOffset = 8 + 255 * 4; + var length = readUint32(buffer, hashOffset); + hashOffset += 4; + var crcOffset = hashOffset + 20 * length; + var lengthOffset = crcOffset + 4 * length; + var largeOffset = lengthOffset + 4 * length; + var checkOffset = largeOffset; + var indexes = new Array(length); + for (var i = 0; i < length; i++) { + var start = hashOffset + i * 20; + var hash = bodec.toHex(bodec.slice(buffer, start, start + 20)); + var crc = readUint32(buffer, crcOffset + i * 4); + var offset = readUint32(buffer, lengthOffset + i * 4); + if (offset & 0x80000000) { + offset = largeOffset + (offset &0x7fffffff) * 8; + checkOffset = Math.max(checkOffset, offset + 8); + offset = readUint64(buffer, offset); + } + indexes[i] = { + hash: hash, + offset: offset, + crc: crc + }; + } + var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20)); + var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40)); + if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) { + throw new Error("Checksum mistmatch"); + } + + var byHash = {}; + indexes.sort(function (a, b) { + return a.offset - b.offset; + }); + indexes.forEach(function (data) { + byHash[data.hash] = { + offset: data.offset, + crc: data.crc, + }; + }); + var offsets = indexes.map(function (entry) { + return entry.offset; + }).sort(function (a, b) { + return a - b; + }); + + return { + offsets: offsets, + byHash: byHash, + checksum: packChecksum + }; +} + +function readUint32(buffer, offset) { + return (buffer[offset] << 24 | + buffer[offset + 1] << 16 | + buffer[offset + 2] << 8 | + buffer[offset + 3] << 0) >>> 0; +} + +// Yes this will lose precision over 2^53, but that can't be helped when +// returning a single integer. +// We simply won't support packfiles over 8 petabytes. I'm ok with that. +function readUint64(buffer, offset) { + var hi = (buffer[offset] << 24 | + buffer[offset + 1] << 16 | + buffer[offset + 2] << 8 | + buffer[offset + 3] << 0) >>> 0; + var lo = (buffer[offset + 4] << 24 | + buffer[offset + 5] << 16 | + buffer[offset + 6] << 8 | + buffer[offset + 7] << 0) >>> 0; + return hi * 0x100000000 + lo; +} diff --git a/mixins/indexed-db.js b/mixins/indexed-db.js new file mode 100644 index 0000000..19b43a5 --- /dev/null +++ b/mixins/indexed-db.js @@ -0,0 +1,147 @@ +"use strict"; +/*global indexedDB*/ + +var codec = require('../lib/object-codec.js'); +var sha1 = require('git-sha1'); +var modes = require('../lib/modes.js'); +var db; + +mixin.init = init; + +mixin.loadAs = loadAs; +mixin.saveAs = saveAs; +module.exports = mixin; + +function init(callback) { + + db = null; + var request = indexedDB.open("tedit", 1); + + // We can only create Object stores in a versionchange transaction. + request.onupgradeneeded = function(evt) { + var db = evt.target.result; + + if (evt.dataLoss && evt.dataLoss !== "none") { + return callback(new Error(evt.dataLoss + ": " + evt.dataLossMessage)); + } + + // A versionchange transaction is started automatically. + evt.target.transaction.onerror = onError; + + if(db.objectStoreNames.contains("objects")) { + db.deleteObjectStore("objects"); + } + if(db.objectStoreNames.contains("refs")) { + db.deleteObjectStore("refs"); + } + + db.createObjectStore("objects", {keyPath: "hash"}); + db.createObjectStore("refs", {keyPath: "path"}); + }; + + request.onsuccess = function (evt) { + db = evt.target.result; + callback(); + }; + request.onerror = onError; +} + + +function mixin(repo, prefix) { + if (!prefix) throw new Error("Prefix required"); + repo.refPrefix = prefix; + repo.saveAs = saveAs; + repo.loadAs = loadAs; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.hasHash = hasHash; +} + +function onError(evt) { + console.error("error", evt.target.error); +} + +function saveAs(type, body, callback, forcedHash) { + if (!callback) return saveAs.bind(this, type, body); + var hash; + try { + var buffer = codec.frame({type:type,body:body}); + hash = forcedHash || sha1(buffer); + } + catch (err) { return callback(err); } + var trans = db.transaction(["objects"], "readwrite"); + var store = trans.objectStore("objects"); + var entry = { hash: hash, type: type, body: body }; + var request = store.put(entry); + request.onsuccess = function() { + // console.warn("SAVE", type, hash); + callback(null, hash, body); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} + +function loadAs(type, hash, callback) { + if (!callback) return loadAs.bind(this, type, hash); + loadRaw(hash, function (err, entry) { + if (!entry) return callback(err); + if (type !== entry.type) { + return callback(new TypeError("Type mismatch")); + } + callback(null, entry.body, hash); + }); +} + +function loadRaw(hash, callback) { + var trans = db.transaction(["objects"], "readwrite"); + var store = trans.objectStore("objects"); + var request = store.get(hash); + request.onsuccess = function(evt) { + var entry = evt.target.result; + if (!entry) return callback(); + return callback(null, entry); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} + +function hasHash(hash, callback) { + if (!callback) return hasHash.bind(this, hash); + loadRaw(hash, function (err, body) { + if (err) return callback(err); + return callback(null, !!body); + }); +} + +function readRef(ref, callback) { + if (!callback) return readRef.bind(this, ref); + var key = this.refPrefix + "/" + ref; + var trans = db.transaction(["refs"], "readwrite"); + var store = trans.objectStore("refs"); + var request = store.get(key); + request.onsuccess = function(evt) { + var entry = evt.target.result; + if (!entry) return callback(); + callback(null, entry.hash); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} + +function updateRef(ref, hash, callback) { + if (!callback) return updateRef.bind(this, ref, hash); + var key = this.refPrefix + "/" + ref; + var trans = db.transaction(["refs"], "readwrite"); + var store = trans.objectStore("refs"); + var entry = { path: key, hash: hash }; + var request = store.put(entry); + request.onsuccess = function() { + callback(); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} diff --git a/mixins/mem-cache.js b/mixins/mem-cache.js new file mode 100644 index 0000000..0434481 --- /dev/null +++ b/mixins/mem-cache.js @@ -0,0 +1,53 @@ +"use strict"; + +var encoders = require('../lib/object-codec').encoders; +var decoders = require('../lib/object-codec').decoders; +var Binary = require('bodec').Binary; + +var cache = memCache.cache = {}; +module.exports = memCache; + +function memCache(repo) { + var loadAs = repo.loadAs; + repo.loadAs = loadAsCached; + function loadAsCached(type, hash, callback) { + if (!callback) return loadAsCached.bind(this, type, hash); + if (hash in cache) return callback(null, dupe(type, cache[hash]), hash); + loadAs.call(repo, type, hash, function (err, value) { + if (value === undefined) return callback(err); + if (type !== "blob" || value.length < 100) { + cache[hash] = dupe(type, value); + } + return callback.apply(this, arguments); + }); + } + + var saveAs = repo.saveAs; + repo.saveAs = saveAsCached; + function saveAsCached(type, value, callback) { + if (!callback) return saveAsCached.bind(this, type, value); + value = dupe(type, value); + saveAs.call(repo, type, value, function (err, hash) { + if (err) return callback(err); + if (type !== "blob" || value.length < 100) { + cache[hash] = value; + } + return callback(null, hash, value); + }); + } +} +function dupe(type, value) { + if (type === "blob") { + if (type.length >= 100) return value; + return new Binary(value); + } + return decoders[type](encoders[type](value)); +} + +function deepFreeze(obj) { + Object.freeze(obj); + Object.keys(obj).forEach(function (key) { + var value = obj[key]; + if (typeof value === "object") deepFreeze(value); + }); +} diff --git a/mixins/mem-db.js b/mixins/mem-db.js new file mode 100644 index 0000000..fd9a33d --- /dev/null +++ b/mixins/mem-db.js @@ -0,0 +1,95 @@ +"use strict"; + +var defer = require('../lib/defer.js'); +var codec = require('../lib/object-codec.js'); +var sha1 = require('git-sha1'); + +module.exports = mixin; +var isHash = /^[0-9a-f]{40}$/; + +function mixin(repo) { + var objects = {}; + var refs = {}; + + repo.saveAs = saveAs; + repo.loadAs = loadAs; + repo.saveRaw = saveRaw; + repo.loadRaw = loadRaw; + repo.hasHash = hasHash; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.listRefs = listRefs; + + function readRef(ref, callback) { + return makeAsync(function () { + return refs[ref]; + }, callback); + } + + function listRefs(prefix, callback) { + return makeAsync(function () { + var regex = prefix && new RegExp("^" + prefix + "[/$]"); + var out = {}; + Object.keys(refs).forEach(function (name) { + if (regex && !regex.test(name)) return; + out[name] = refs[name]; + }); + return out; + }, callback); + } + + function updateRef(ref, hash, callback) { + return makeAsync(function () { + return (refs[ref] = hash); + }, callback); + } + + function hasHash(hash, callback) { + return makeAsync(function () { + if (!isHash.test(hash)) hash = refs[hash]; + return hash in objects; + }, callback); + } + + function saveAs(type, body, callback) { + return makeAsync(function () { + var buffer = codec.frame({type:type,body:body}); + var hash = sha1(buffer); + objects[hash] = buffer; + return hash; + }, callback); + } + + function saveRaw(hash, buffer, callback) { + return makeAsync(function () { + objects[hash] = buffer; + }, callback); + } + + function loadAs(type, hash, callback) { + return makeAsync(function () { + if (!isHash.test(hash)) hash = refs[hash]; + var buffer = objects[hash]; + if (!buffer) return []; + var obj = codec.deframe(buffer, true); + if (obj.type !== type) throw new TypeError("Type mismatch"); + return obj.body; + }, callback); + } + + function loadRaw(hash, callback) { + return makeAsync(function () { + return objects[hash]; + }, callback); + } +} + +function makeAsync(fn, callback) { + if (!callback) return makeAsync.bind(null, fn); + defer(function () { + var out; + try { out = fn(); } + catch (err) { return callback(err); } + callback(null, out); + }); +} diff --git a/mixins/pack-ops.js b/mixins/pack-ops.js new file mode 100644 index 0000000..dece5ac --- /dev/null +++ b/mixins/pack-ops.js @@ -0,0 +1,201 @@ +"use strict"; + +var sha1 = require('git-sha1'); +var applyDelta = require('../lib/apply-delta.js'); +var codec = require('../lib/object-codec.js'); +var decodePack = require('../lib/pack-codec.js').decodePack; +var encodePack = require('../lib/pack-codec.js').encodePack; +var makeChannel = require('culvert'); + +module.exports = function (repo) { + // packChannel is a writable culvert channel {put,drain} containing raw packfile binary data + // opts can contain "onProgress" or "onError" hook functions. + // callback will be called with a list of all unpacked hashes on success. + repo.unpack = unpack; // (packChannel, opts) => hashes + + // hashes is an array of hashes to pack + // packChannel will be a readable culvert channel {take} containing raw packfile binary data + repo.pack = pack; // (hashes, opts) => packChannel +}; + +function unpack(packChannel, opts, callback) { + /*jshint validthis:true*/ + if (!callback) return unpack.bind(this, packChannel, opts); + + packChannel = applyParser(packChannel, decodePack, callback); + + var repo = this; + + var version, num, numDeltas = 0, count = 0, countDeltas = 0; + var done, startDeltaProgress = false; + + // hashes keyed by offset for ofs-delta resolving + var hashes = {}; + // key is hash, boolean is cached "has" value of true or false + var has = {}; + // key is hash we're waiting for, value is array of items that are waiting. + var pending = {}; + + return packChannel.take(onStats); + + function onDone(err) { + if (done) return; + done = true; + if (err) return callback(err); + return callback(null, values(hashes)); + } + + function onStats(err, stats) { + if (err) return onDone(err); + version = stats.version; + num = stats.num; + packChannel.take(onRead); + } + + function objectProgress(more) { + if (!more) startDeltaProgress = true; + var percent = Math.round(count / num * 100); + return opts.onProgress("Receiving objects: " + percent + "% (" + (count++) + "/" + num + ") " + (more ? "\r" : "\n")); + } + + function deltaProgress(more) { + if (!startDeltaProgress) return; + var percent = Math.round(countDeltas / numDeltas * 100); + return opts.onProgress("Applying deltas: " + percent + "% (" + (countDeltas++) + "/" + numDeltas + ") " + (more ? "\r" : "\n")); + } + + function onRead(err, item) { + if (err) return onDone(err); + if (opts.onProgress) objectProgress(item); + if (item === undefined) return onDone(); + if (item.size !== item.body.length) { + return onDone(new Error("Body size mismatch")); + } + if (item.type === "ofs-delta") { + numDeltas++; + item.ref = hashes[item.offset - item.ref]; + return resolveDelta(item); + } + if (item.type === "ref-delta") { + numDeltas++; + return checkDelta(item); + } + return saveValue(item); + } + + function resolveDelta(item) { + if (opts.onProgress) deltaProgress(); + return repo.loadRaw(item.ref, function (err, buffer) { + if (err) return onDone(err); + if (!buffer) return onDone(new Error("Missing base image at " + item.ref)); + var target = codec.deframe(buffer); + item.type = target.type; + item.body = applyDelta(item.body, target.body); + return saveValue(item); + }); + } + + function checkDelta(item) { + var hasTarget = has[item.ref]; + if (hasTarget === true) return resolveDelta(item); + if (hasTarget === false) return enqueueDelta(item); + return repo.hasHash(item.ref, function (err, value) { + if (err) return onDone(err); + has[item.ref] = value; + if (value) return resolveDelta(item); + return enqueueDelta(item); + }); + } + + function saveValue(item) { + var buffer = codec.frame(item); + var hash = sha1(buffer); + hashes[item.offset] = hash; + has[hash] = true; + if (hash in pending) { + // I have yet to come across a pack stream that actually needs this. + // So I will only implement it when I have concrete data to test against. + console.error({ + list: pending[hash], + item: item + }); + throw "TODO: pending value was found, resolve it"; + } + return repo.saveRaw(hash, buffer, onSave); + } + + function onSave(err) { + if (err) return callback(err); + packChannel.take(onRead); + } + + function enqueueDelta(item) { + var list = pending[item.ref]; + if (!list) pending[item.ref] = [item]; + else list.push(item); + packChannel.take(onRead); + } + +} + +// TODO: Implement delta refs to reduce stream size +function pack(hashes, opts, callback) { + /*jshint validthis:true*/ + if (!callback) return pack.bind(this, hashes, opts); + var repo = this; + var i = 0, first = true, done = false; + return callback(null, applyParser({ take: take }, encodePack)); + + function take(callback) { + if (done) return callback(); + if (first) return readFirst(callback); + var hash = hashes[i++]; + if (hash === undefined) { + return callback(); + } + repo.loadRaw(hash, function (err, buffer) { + if (err) return callback(err); + if (!buffer) return callback(new Error("Missing hash: " + hash)); + // Reframe with pack format header + callback(null, codec.deframe(buffer)); + }); + } + + function readFirst(callback) { + first = false; + callback(null, {num: hashes.length}); + } +} + +function values(object) { + var keys = Object.keys(object); + var length = keys.length; + var out = new Array(length); + for (var i = 0; i < length; i++) { + out[i] = object[keys[i]]; + } + return out; +} + + +function applyParser(stream, parser, onError) { + var extra = makeChannel(); + extra.put = parser(extra.put); + stream.take(onData); + + function onData(err, item) { + if (err) return onError(err); + var more; + try { more = extra.put(item); } + catch (err) { return onError(err); } + if (more) stream.take(onData); + else extra.drain(onDrain); + } + + function onDrain(err) { + if (err) return onError(err); + stream.take(onData); + } + + return { take: extra.take }; +} diff --git a/mixins/path-to-entry.js b/mixins/path-to-entry.js new file mode 100644 index 0000000..3615b7a --- /dev/null +++ b/mixins/path-to-entry.js @@ -0,0 +1,51 @@ +var cache = require('./mem-cache').cache; +var modes = require('../lib/modes'); + +module.exports = function (repo) { + repo.pathToEntry = pathToEntry; +}; + +function pathToEntry(rootTree, path, callback) { + if (!callback) return pathToEntry.bind(this, rootTree, path); + var repo = this; + var mode = modes.tree; + var hash = rootTree; + var parts = path.split("/").filter(Boolean); + var index = 0; + var cached; + loop(); + function loop() { + while (index < parts.length) { + if (mode === modes.tree) { + cached = cache[hash]; + if (!cached) return repo.loadAs("tree", hash, onLoad); + var entry = cached[parts[index]]; + if (!entry) return callback(); + mode = entry.mode; + hash = entry.hash; + index++; + continue; + } + if (modes.isFile(mode)) return callback(); + return callback(null, { + last: { + mode: mode, + hash: hash, + path: parts.slice(0, index).join("/"), + rest: parts.slice(index).join("/"), + } + }); + } + callback(null, { + mode: mode, + hash: hash + }); + } + + function onLoad(err, value) { + if (!value) return callback(err || new Error("Missing object: " + hash)); + cache[hash] = value; + loop(); + } + +} diff --git a/mixins/read-combiner.js b/mixins/read-combiner.js new file mode 100644 index 0000000..39f128d --- /dev/null +++ b/mixins/read-combiner.js @@ -0,0 +1,28 @@ +"use strict"; + +// This replaces loadAs with a version that batches concurrent requests for +// the same hash. +module.exports = function (repo) { + var pendingReqs = {}; + + var loadAs = repo.loadAs; + repo.loadAs = newLoadAs; + + function newLoadAs(type, hash, callback) { + if (!callback) return newLoadAs.bind(null, type, hash); + var list = pendingReqs[hash]; + if (list) { + if (list.type !== type) callback(new Error("Type mismatch")); + else list.push(callback); + return; + } + list = pendingReqs[hash] = [callback]; + list.type = type; + loadAs.call(repo, type, hash, function () { + delete pendingReqs[hash]; + for (var i = 0, l = list.length; i < l; i++) { + list[i].apply(this, arguments); + } + }); + } +}; diff --git a/mixins/sync.js b/mixins/sync.js new file mode 100644 index 0000000..6222c11 --- /dev/null +++ b/mixins/sync.js @@ -0,0 +1,147 @@ +"use strict"; + +var modes = require('../lib/modes'); + +module.exports = function (local, remote) { + local.fetch = fetch; + local.send = send; + local.readRemoteRef = remote.readRef.bind(remote); + local.updateRemoteRef = remote.updateRef.bind(remote); + + function fetch(ref, depth, callback) { + if (!callback) return fetch.bind(local, ref, depth); + sync(local, remote, ref, depth, callback); + } + + function send(ref, callback) { + if (!callback) return send.bind(local, ref); + sync(remote, local, ref, Infinity, callback); + } +}; + +// Download remote ref with depth +// Make sure to use Infinity for depth on github mounts or anything that +// doesn't allow shallow clones. +function sync(local, remote, ref, depth, callback) { + if (typeof ref !== "string") throw new TypeError("ref must be string"); + if (typeof depth !== "number") throw new TypeError("depth must be number"); + + var hasCache = {}; + + remote.readRef(ref, function (err, hash) { + if (!hash) return callback(err); + importCommit(hash, depth, function (err) { + if (err) return callback(err); + callback(null, hash); + }); + }); + + // Caching has check. + function check(type, hash, callback) { + if (typeof type !== "string") throw new TypeError("type must be string"); + if (typeof hash !== "string") throw new TypeError("hash must be string"); + if (hasCache[hash]) return callback(null, true); + local.hasHash(hash, function (err, has) { + if (err) return callback(err); + hasCache[hash] = has; + callback(null, has); + }); + } + + function importCommit(hash, depth, callback) { + check("commit", hash, onCheck); + + function onCheck(err, has) { + if (err || has) return callback(err); + remote.loadAs("commit", hash, onLoad); + } + + function onLoad(err, commit) { + if (!commit) return callback(err || new Error("Missing commit " + hash)); + var i = 0; + importTree(commit.tree, onImport); + + function onImport(err) { + if (err) return callback(err); + if (i >= commit.parents.length || depth <= 1) { + return local.saveAs("commit", commit, onSave); + } + importCommit(commit.parents[i++], depth - 1, onImport); + } + } + + function onSave(err, newHash) { + if (err) return callback(err); + if (newHash !== hash) { + return callback(new Error("Commit hash mismatch " + hash + " != " + newHash)); + } + hasCache[hash] = true; + callback(); + } + } + + function importTree(hash, callback) { + check("tree", hash, onCheck); + + function onCheck(err, has) { + if (err || has) return callback(err); + remote.loadAs("tree", hash, onLoad); + } + + function onLoad(err, tree) { + if (!tree) return callback(err || new Error("Missing tree " + hash)); + var i = 0; + var names = Object.keys(tree); + onImport(); + + function onImport(err) { + if (err) return callback(err); + if (i >= names.length) { + return local.saveAs("tree", tree, onSave); + } + var name = names[i++]; + var entry = tree[name]; + if (modes.isBlob(entry.mode)) { + return importBlob(entry.hash, onImport); + } + if (entry.mode === modes.tree) { + return importTree(entry.hash, onImport); + } + // Skip others. + onImport(); + } + } + + function onSave(err, newHash) { + if (err) return callback(err); + if (newHash !== hash) { + return callback(new Error("Tree hash mismatch " + hash + " != " + newHash)); + } + hasCache[hash] = true; + callback(); + } + } + + function importBlob(hash, callback) { + check("blob", hash, onCheck); + + function onCheck(err, has) { + if (err || has) return callback(err); + remote.loadAs("blob", hash, onLoad); + } + + function onLoad(err, blob) { + if (!blob) return callback(err || new Error("Missing blob " + hash)); + local.saveAs("blob", blob, onSave); + } + + function onSave(err, newHash) { + if (err) return callback(err); + if (newHash !== hash) { + return callback(new Error("Blob hash mismatch " + hash + " != " + newHash)); + } + hasCache[hash] = true; + callback(); + } + } +} diff --git a/mixins/walkers.js b/mixins/walkers.js new file mode 100644 index 0000000..59a5475 --- /dev/null +++ b/mixins/walkers.js @@ -0,0 +1,152 @@ +var modes = require('../lib/modes.js'); + +module.exports = function (repo) { + repo.logWalk = logWalk; // (ref) => stream + repo.treeWalk = treeWalk; // (treeHash) => stream +}; +module.exports.walk = walk; + +function logWalk(ref, callback) { + if (!callback) return logWalk.bind(this, ref); + var last, seen = {}; + var repo = this; + if (!repo.readRef) return onShallow(); + return repo.readRef("shallow", onShallow); + + function onShallow(err, shallow) { + last = shallow; + resolveRef(repo, ref, onHash); + } + + function onHash(err, hash) { + if (err) return callback(err); + return repo.loadAs("commit", hash, function (err, commit) { + if (commit === undefined) return callback(err); + commit.hash = hash; + seen[hash] = true; + return callback(null, walk(commit, scan, loadKey, compare)); + }); + } + + function scan(commit) { + if (last === commit) return []; + return commit.parents.filter(function (hash) { + return !seen[hash]; + }); + } + + function loadKey(hash, callback) { + return repo.loadAs("commit", hash, function (err, commit) { + if (!commit) return callback(err || new Error("Missing commit " + hash)); + commit.hash = hash; + if (hash === last) commit.last = true; + return callback(null, commit); + }); + } + +} + +function compare(commit, other) { + return commit.author.date < other.author.date; +} + +function treeWalk(hash, callback) { + if (!callback) return treeWalk.bind(this, hash); + var repo = this; + return repo.loadAs("tree", hash, onTree); + + function onTree(err, body) { + if (!body) return callback(err || new Error("Missing tree " + hash)); + var tree = { + mode: modes.tree, + hash: hash, + body: body, + path: "/" + }; + return callback(null, walk(tree, treeScan, treeLoadKey, treeCompare)); + } + + function treeLoadKey(entry, callback) { + if (entry.mode !== modes.tree) return callback(null, entry); + var type = modes.toType(entry.mode); + return repo.loadAs(type, entry.hash, function (err, body) { + if (err) return callback(err); + entry.body = body; + return callback(null, entry); + }); + } + +} + +function treeScan(object) { + if (object.mode !== modes.tree) return []; + var tree = object.body; + return Object.keys(tree).map(function (name) { + var entry = tree[name]; + var path = object.path + name; + if (entry.mode === modes.tree) path += "/"; + return { + mode: entry.mode, + hash: entry.hash, + path: path + }; + }); +} + +function treeCompare(first, second) { + return first.path < second.path; +} + +function resolveRef(repo, hashish, callback) { + if (/^[0-9a-f]{40}$/.test(hashish)) { + return callback(null, hashish); + } + repo.readRef(hashish, function (err, hash) { + if (!hash) return callback(err || new Error("Bad ref " + hashish)); + callback(null, hash); + }); +} + +function walk(seed, scan, loadKey, compare) { + var queue = [seed]; + var working = 0, error, cb; + return {read: read, abort: abort}; + + function read(callback) { + if (!callback) return read; + if (cb) return callback(new Error("Only one read at a time")); + if (working) { cb = callback; return; } + var item = queue.shift(); + if (!item) return callback(); + try { scan(item).forEach(onKey); } + catch (err) { return callback(err); } + return callback(null, item); + } + + function abort(callback) { return callback(); } + + function onError(err) { + if (cb) { + var callback = cb; cb = null; + return callback(err); + } + error = err; + } + + function onKey(key) { + working++; + loadKey(key, onItem); + } + + function onItem(err, item) { + working--; + if (err) return onError(err); + var index = queue.length; + while (index && compare(item, queue[index - 1])) index--; + queue.splice(index, 0, item); + if (!working && cb) { + var callback = cb; cb = null; + return read(callback); + } + } +} diff --git a/mixins/websql-db.js b/mixins/websql-db.js new file mode 100644 index 0000000..6a25744 --- /dev/null +++ b/mixins/websql-db.js @@ -0,0 +1,167 @@ +"use strict"; + +var codec = require('../lib/object-codec.js'); +var bodec = require('bodec'); +var inflate = require('../lib/inflate'); +var deflate = require('../lib/deflate'); + +var sha1 = require('git-sha1'); +var modes = require('../lib/modes.js'); +var db; + +mixin.init = init; + +mixin.loadAs = loadAs; +mixin.saveAs = saveAs; +mixin.loadRaw = loadRaw; +mixin.saveRaw = saveRaw; +module.exports = mixin; + +function mixin(repo, prefix) { + if (!prefix) throw new Error("Prefix required"); + repo.refPrefix = prefix; + repo.saveAs = saveAs; + repo.saveRaw = saveRaw; + repo.loadAs = loadAs; + repo.loadRaw = loadRaw; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.hasHash = hasHash; +} + +function init(callback) { + + db = openDatabase('tedit', '1.0', 'tedit local data', 10 * 1024 * 1024); + db.transaction(function (tx) { + tx.executeSql( + 'CREATE TABLE IF NOT EXISTS objects (hash unique, body blob)' + ); + tx.executeSql( + 'CREATE TABLE IF NOT EXISTS refs (path unique, value text)' + ); + }, function () { + console.error(arguments); + callback(new Error("Problem initializing database")); + }, function () { + callback(); + }); +} + +function saveAs(type, body, callback) { + /*jshint: validthis: true */ + if (!callback) return saveAs.bind(this, type, body); + var hash, buffer; + try { + buffer = codec.frame({type:type,body:body}); + hash = sha1(buffer); + } + catch (err) { return callback(err); } + this.saveRaw(hash, buffer, callback); +} + +function saveRaw(hash, buffer, callback) { + /*jshint: validthis: true */ + if (!callback) return saveRaw.bind(this, hash, buffer); + var sql = 'INSERT INTO objects (hash, body) VALUES (?, ?)'; + db.transaction(function (tx) { + var text; + try { + text = bodec.toBase64(deflate(buffer)); + } + catch (err) { + return callback(err); + } + tx.executeSql(sql, [hash, text], function () { + callback(null, hash); + }); + }); +} + +function loadAs(type, hash, callback) { + /*jshint: validthis: true */ + if (!callback) return loadAs.bind(this, type, hash); + loadRaw(hash, function (err, buffer) { + if (!buffer) return callback(err); + var parts, body; + try { + parts = codec.deframe(buffer); + if (parts.type !== type) throw new Error("Type mismatch"); + body = codec.decoders[type](parts.body); + } + catch (err) { + return callback(err); + } + callback(null, body); + }); +} + +function loadRaw(hash, callback) { + /*jshint: validthis: true */ + if (!callback) return loadRaw.bind(this, hash); + var sql = 'SELECT * FROM objects WHERE hash=?'; + db.readTransaction(function (tx) { + tx.executeSql(sql, [hash], function (tx, result) { + if (!result.rows.length) return callback(); + var item = result.rows.item(0); + var buffer; + try { + buffer = inflate(bodec.fromBase64(item.body)); + } + catch (err) { + return callback(err); + } + callback(null, buffer); + }, function (tx, error) { + callback(new Error(error.message)); + }); + }); +} + +function hasHash(type, hash, callback) { + /*jshint: validthis: true */ + loadAs(type, hash, function (err, value) { + if (err) return callback(err); + if (value === undefined) return callback(null, false); + if (type !== "tree") return callback(null, true); + var names = Object.keys(value); + next(); + function next() { + if (!names.length) return callback(null, true); + var name = names.pop(); + var entry = value[name]; + hasHash(modes.toType(entry.mode), entry.hash, function (err, has) { + if (err) return callback(err); + if (has) return next(); + callback(null, false); + }); + } + }); +} + +function readRef(ref, callback) { + /*jshint: validthis: true */ + var key = this.refPrefix + "/" + ref; + var sql = 'SELECT * FROM refs WHERE path=?'; + db.transaction(function (tx) { + tx.executeSql(sql, [key], function (tx, result) { + if (!result.rows.length) return callback(); + var item = result.rows.item(0); + callback(null, item.value); + }, function (tx, error) { + callback(new Error(error.message)); + }); + }); +} + +function updateRef(ref, hash, callback) { + /*jshint: validthis: true */ + var key = this.refPrefix + "/" + ref; + var sql = 'INSERT INTO refs (path, value) VALUES (?, ?)'; + db.transaction(function (tx) { + tx.executeSql(sql, [key, hash], function () { + callback(); + }, function (tx, error) { + callback(new Error(error.message)); + }); + }); +} diff --git a/net/git-fetch-pack.js b/net/git-fetch-pack.js new file mode 100644 index 0000000..4e75303 --- /dev/null +++ b/net/git-fetch-pack.js @@ -0,0 +1,196 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); +var bodec = require('bodec'); + +module.exports = fetchPack; + +function fetchPack(transport, onError) { + + if (!onError) onError = throwIt; + + // Wrap our handler functions to route errors properly. + onRef = wrapHandler(onRef, onError); + onWant = wrapHandler(onWant, onError); + onNak = wrapHandler(onNak, onError); + onMore = wrapHandler(onMore, onError); + onReady = wrapHandler(onReady, onError); + + var caps = null; + var capsSent = false; + var refs = {}; + var haves = {}; + var havesCount = 0; + + // Create a duplex channel for talking with the agent. + var libraryChannel = makeChannel(); + var agentChannel = makeChannel(); + var api = { + put: libraryChannel.put, + drain: libraryChannel.drain, + take: agentChannel.take + }; + + // Start the connection and listen for the response. + var socket = transport("git-upload-pack", onError); + socket.take(onRef); + + // Return the other half of the duplex API channel. + return { + put: agentChannel.put, + drain: agentChannel.drain, + take: libraryChannel.take + }; + + function onRef(line) { + if (line === undefined) { + throw new Error("Socket disconnected"); + } + if (line === null) { + api.put(refs); + api.take(onWant); + return; + } + else if (!caps) { + caps = {}; + Object.defineProperty(refs, "caps", {value: caps}); + Object.defineProperty(refs, "shallows", {value:[]}); + var index = line.indexOf("\0"); + if (index >= 0) { + line.substring(index + 1).split(" ").forEach(function (cap) { + var i = cap.indexOf("="); + if (i >= 0) { + caps[cap.substring(0, i)] = cap.substring(i + 1); + } + else { + caps[cap] = true; + } + }); + line = line.substring(0, index); + } + } + var match = line.match(/(^[0-9a-f]{40}) (.*)$/); + if (!match) { + if (typeof line === "string" && /^ERR/i.test(line)) { + throw new Error(line); + } + throw new Error("Invalid line: " + JSON.stringify(line)); + } + refs[match[2]] = match[1]; + socket.take(onRef); + } + + var packChannel; + var progressChannel; + var errorChannel; + + function onWant(line) { + if (line === undefined) return socket.put(); + if (line === null) { + socket.put(null); + return api.take(onWant); + } + if (line.deepen) { + socket.put("deepen " + line.deepen + "\n"); + return api.take(onWant); + } + if (line.have) { + haves[line.have] = true; + havesCount++; + socket.put("have " + line.have + "\n"); + return api.take(onWant); + } + if (line.want) { + var extra = ""; + if (!capsSent) { + capsSent = true; + if (caps["ofs-delta"]) extra += " ofs-delta"; + if (caps["thin-pack"]) extra += " thin-pack"; + // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed"; + // else if (caps["multi_ack"]) extra +=" multi_ack"; + if (caps["side-band-64k"]) extra += " side-band-64k"; + else if (caps["side-band"]) extra += " side-band"; + // if (caps["agent"]) extra += " agent=" + agent; + if (caps.agent) extra += " agent=" + caps.agent; + } + extra += "\n"; + socket.put("want " + line.want + extra); + return api.take(onWant); + } + if (line.done) { + socket.put("done\n"); + return socket.take(onNak); + } + throw new Error("Invalid have/want command"); + } + + function onNak(line) { + if (line === undefined) return api.put(); + if (line === null) return socket.take(onNak); + if (bodec.isBinary(line) || line.progress || line.error) { + packChannel = makeChannel(); + progressChannel = makeChannel(); + errorChannel = makeChannel(); + api.put({ + pack: { take: packChannel.take }, + progress: { take: progressChannel.take }, + error: { take: errorChannel.take }, + }); + return onMore(null, line); + } + var match = line.match(/^shallow ([0-9a-f]{40})$/); + if (match) { + refs.shallows.push(match[1]); + return socket.take(onNak); + } + match = line.match(/^ACK ([0-9a-f]{40})$/); + if (match) { + return socket.take(onNak); + } + if (line === "NAK") { + return socket.take(onNak); + } + throw new Error("Expected NAK, but got " + JSON.stringify(line)); + } + + function onMore(line) { + + if (line === undefined) { + packChannel.put(); + progressChannel.put(); + errorChannel.put(); + return api.put(); + } + if (line === null) { + api.put(line); + } + else { + if (line.progress) { + progressChannel.put(line.progress); + } + else if (line.error) { + errorChannel.put(line.error); + } + else { + if (!packChannel.put(line)) { + return packChannel.drain(onReady); + } + } + } + socket.take(onMore); + } + + function onReady() { + socket.take(onMore); + } + +} + +var defer = require('js-git/lib/defer'); +function throwIt(err) { + defer(function () { + throw err; + }); + // throw err; +} diff --git a/net/request-xhr.js b/net/request-xhr.js new file mode 100644 index 0000000..5bf9064 --- /dev/null +++ b/net/request-xhr.js @@ -0,0 +1,36 @@ +"use strict"; + +module.exports = request; + +function request(method, url, headers, body, callback) { + if (typeof body === "function") { + callback = body; + body = undefined; + } + if (!callback) { + return request.bind(null, method, url, headers, body); + } + var xhr = new XMLHttpRequest(); + xhr.open(method, url, true); + xhr.responseType = "arraybuffer"; + + Object.keys(headers).forEach(function (name) { + xhr.setRequestHeader(name, headers[name]); + }); + + xhr.onreadystatechange = function () { + if (xhr.readyState !== 4) return; + var resHeaders = {}; + xhr.getAllResponseHeaders().trim().split("\r\n").forEach(function (line) { + var index = line.indexOf(":"); + resHeaders[line.substring(0, index).toLowerCase()] = line.substring(index + 1).trim(); + }); + + callback(null, { + statusCode: xhr.status, + headers: resHeaders, + body: xhr.response && new Uint8Array(xhr.response) + }); + }; + xhr.send(body); +} diff --git a/net/tcp-chrome-sockets.js b/net/tcp-chrome-sockets.js new file mode 100644 index 0000000..2a14aa6 --- /dev/null +++ b/net/tcp-chrome-sockets.js @@ -0,0 +1,108 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); +var tcp = window.chrome.sockets.tcp; +var runtime = window.chrome.runtime; + +module.exports = connect; + +function connect(host, port, onError) { + port = port|0; + host = String(host); + if (!port || !host) throw new TypeError("host and port are required"); + + onCreate = wrap(onCreate, onError); + onConnect = wrap(onConnect, onError); + onInfo = wrap(onInfo, onError); + onReceive = wrap(onReceive, onError); + onReceiveError = wrap(onReceiveError, onError); + onData = wrapHandler(onData, onError); + onWrite = wrap(onWrite, onError); + + var paused = false; + var open = false; + var socketId; + + var serverChannel = makeChannel(); + var clientChannel = makeChannel(); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + tcp.onReceive.addListener(onReceive); + tcp.onReceiveError.addListener(onReceiveError); + tcp.create(onCreate); + + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onCreate(createInfo) { + socketId = createInfo.socketId; + tcp.connect(socketId, host, port, onConnect); + } + + function onConnect(result) { + if (result < 0) throw new Error(runtime.lastError.message + " Connection error"); + tcp.getInfo(socketId, onInfo); + } + + function onInfo(socketInfo) { + if (!socketInfo.connected) { + throw new Error("Connection failed"); + } + open = true; + socket.take(onData); + } + + function onReceive(info) { + if (info.socketId !== socketId) return; + if (socket.put(new Uint8Array(info.data)) || paused) return; + paused = true; + tcp.setPaused(socketId, true); + socket.drain(onDrain); + } + + function onDrain() { + if (!paused) return; + paused = false; + if (open) tcp.setPaused(socketId, false); + } + + function onReceiveError(info) { + if (info.socketId !== socketId) return; + open = false; + tcp.close(socketId); + socket.put(); + // TODO: find a way to tell close and error apart. + // throw new Error("Code " + info.resultCode + " error while receiving."); + } + + function onData(data) { + tcp.send(socketId, data.buffer, onWrite); + } + + function onWrite(info) { + if (info.resultCode < 0) { + throw new Error(runtime.lastError.message + " Error writing."); + } + socket.take(onData); + } +} + + +function wrap(fn, onError) { + return function () { + try { + return fn.apply(this, arguments); + } + catch (err) { + onError(err); + } + }; +} diff --git a/net/tcp-node.js b/net/tcp-node.js new file mode 100644 index 0000000..c7f3967 --- /dev/null +++ b/net/tcp-node.js @@ -0,0 +1,85 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); +var net = require('net'); + +module.exports = connect; + +function connect(host, port, onError) { + port = port|0; + host = String(host); + if (!port || !host) throw new TypeError("host and port are required"); + + // Wrap event handlers from node stream + onConnect = wrap(onConnect, onError); + pump = wrap(pump, onError); + onEnd = wrap(onEnd, onError); + onDrain = wrap(onDrain, onError); + + // Wrap event handlers from culvert socket + onTake = wrapHandler(onTake, onError); + + var serverChannel = makeChannel(); + var clientChannel = makeChannel(); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + var client = net.connect({ host: host, port: port }, onConnect); + if (onError) client.on("error", onError); + + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onConnect() { + socket.take(onTake); + client.on("end", onEnd); + client.on("readable", pump); + client.on("drain", onDrain); + client.on("error", onError); + } + + function pump() { + var chunk; + do { + chunk = client.read(); + if (!chunk) return; + } while (socket.put(chunk)); + socket.drain(pump); + } + + function onEnd() { + socket.put(); + } + + function onTake(data) { + if (data === undefined) { + client.end(); + } + else if (client.write(data)) { + socket.take(onTake); + } + } + + function onDrain() { + socket.take(onTake); + } + +} + +function wrap(fn, onError) { + return function () { + try { + return fn.apply(this, arguments); + } + catch (err) { + onError(err); + } + }; +} diff --git a/net/tcp-ws-proxy.js b/net/tcp-ws-proxy.js new file mode 100644 index 0000000..37a5ff7 --- /dev/null +++ b/net/tcp-ws-proxy.js @@ -0,0 +1,79 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); + +module.exports = function (proxyUrl) { + if (proxyUrl[proxyUrl.length - 1] !== "/") proxyUrl += "/"; + + return function connect(host, port, onError) { + port = port|0; + host = String(host); + if (!port || !host) throw new TypeError("host and port are required"); + + onData = wrapHandler(onData, onError); + + var serverChannel = makeChannel(); + var clientChannel = makeChannel(); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + var connected = false; + var ws = new WebSocket(proxyUrl + "tcp/" + host + "/" + port); + ws.binaryType = "arraybuffer"; + + ws.onopen = wrap(onOpen, onError); + ws.onclose = wrap(onClose, onError); + ws.onmessage = wrap(onMessage, onError); + ws.onerror = wrap(onWsError, onError); + + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onOpen() { + ws.send("connect"); + } + + function onClose() { + socket.put(); + } + + function onMessage(evt) { + if (!connected && evt.data === "connect") { + connected = true; + socket.take(onData); + return; + } + + socket.put(new Uint8Array(evt.data)); + } + + function onWsError() { + console.error(arguments); + throw new Error("Generic websocket error"); + } + + function onData(chunk) { + ws.send(chunk.buffer); + socket.take(onData); + } + + }; +}; + +function wrap(fn, onError) { + return function () { + try { + return fn.apply(this, arguments); + } + catch (err) { + onError(err); + } + }; +} diff --git a/net/transport-http.js b/net/transport-http.js new file mode 100644 index 0000000..fd3b0c3 --- /dev/null +++ b/net/transport-http.js @@ -0,0 +1,103 @@ +"use strict"; + +var makeChannel = require('culvert'); +var bodec = require('bodec'); +var pktLine = require('../lib/pkt-line'); +var wrapHandler = require('../lib/wrap-handler'); + +module.exports = function (request) { + + return function httpTransport(gitUrl, username, password) { + // Send Auth header if username is set + var auth; + if (username) { + auth = "Basic " + btoa(username + ":" + (password || "")); + } + + return function (serviceName, onError) { + + // Wrap our handler functions to route errors properly. + onResponse = wrapHandler(onResponse, onError); + onWrite = wrapHandler(onWrite, onError); + onResult = wrapHandler(onResult, onError); + + // Create a duplex channel with transform for internal use. + var serverChannel = makeChannel();//0, "server"); + var clientChannel = makeChannel();//0, "client"); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + // Send the initial request to start the connection. + var headers = {}; + if (auth) headers.Authorization = auth; + request("GET", gitUrl + "/info/refs?service=" + serviceName, headers, onResponse); + + // Prep for later requests + var bodyParts = []; + var bodyWrite = pktLine.framer(function (chunk) { + bodyParts.push(chunk); + }); + headers["Content-Type"] = "application/x-" + serviceName + "-request"; + socket.take(onWrite); + + var verified = 0; + var parseResponse = pktLine.deframer(function (line) { + if (verified === 2) { + socket.put(line); + } + else if (verified === 0) { + if (line !== "# service=" + serviceName) { + throw new Error("Illegal service response"); + } + verified = 1; + } + else if (verified === 1) { + if (line !== null) { + throw new Error("Expected null after service name"); + } + verified = 2; + } + }); + + // Return the other half of the duplex channel for the protocol logic to use. + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onResponse(res) { + if (res.statusCode !== 200) { + throw new Error("Invalid response: " + res.statusCode); + } + if (res.headers["content-type"] !== "application/x-" + serviceName + "-advertisement") { + throw new Error("Not a smart http git server"); + } + parseResponse(res.body); + } + + function onWrite(item) { + if (item === undefined) return socket.put(); + bodyWrite(item); + socket.take(onWrite); + if (item !== "done\n" || !bodyParts.length) return; + var body = bodec.join(bodyParts); + bodyParts.length = 0; + request("POST", gitUrl + "/" + serviceName, headers, body, onResult); + } + + function onResult(res) { + if (res.statusCode !== 200) { + throw new Error("Invalid result: " + res.statusCode); + } + if (res.headers["content-type"] !== "application/x-" + serviceName + "-result") { + throw new Error("Not a smart http git server"); + } + parseResponse(res.body); + } + }; + }; +}; diff --git a/net/transport-tcp.js b/net/transport-tcp.js new file mode 100644 index 0000000..d32728e --- /dev/null +++ b/net/transport-tcp.js @@ -0,0 +1,48 @@ +"use strict"; + +var makeChannel = require('culvert'); +var bodec = require('bodec'); +var pktLine = require('../lib/pkt-line'); +var wrapHandler = require('../lib/wrap-handler'); + +module.exports = function (connect) { + + return function tcpTransport(path, host, port) { + port = (port|0) || 9418; + if (!path || !host) throw new Error("path and host are required"); + + return function (serviceName, onError) { + + onData = wrapHandler(onData, onError); + onDrain = wrapHandler(onDrain, onError); + + var socket = connect(host, port, onError); + var inter = makeChannel(); + inter.put = pktLine.deframer(inter.put); + + socket.put = pktLine.framer(socket.put); + var greeting = bodec.fromRaw(serviceName + " " + path + "\0host=" + host + "\0"); + socket.put(greeting); + + // Pipe socket to inter with backpressure + socket.take(onData); + function onData(chunk) { + if (inter.put(chunk)) { + socket.take(onData); + } + else { + inter.drain(onDrain); + } + } + function onDrain() { + socket.take(onData); + } + + return { + put: socket.put, + drain: socket.drain, + take: inter.take + }; + }; + }; +}; diff --git a/package.json b/package.json index c207e09..9f70c0d 100644 --- a/package.json +++ b/package.json @@ -1,29 +1,27 @@ { "name": "js-git", - "version": "0.4.0", + "version": "0.7.8", "description": "Git Implemented in JavaScript", - "main": "js-git.js", - "repository": { - "type": "git", - "url": "git://github.com/creationix/js-git.git" - }, - "devDependencies": { - "git-fs-db": "~0.1.0", - "git-node-platform": "~0.1.2", - "git-net": "~0.0.1", - "gen-run": "~0.1.1" - }, "keywords": [ "git", "js-git" ], + "repository": { + "type": "git", + "url": "git://github.com/creationix/js-git.git" + }, "author": "Tim Caswell ", "license": "MIT", "bugs": { "url": "https://github.com/creationix/js-git/issues" }, + "scripts": { + "test": "ls test/test-* | xargs -n1 node" + }, "dependencies": { - "push-to-pull": "~0.1.0", - "git-pack-codec": "~0.0.1" + "bodec": "^0.1.0", + "culvert": "^0.1.2", + "git-sha1": "^0.1.2", + "pako": "^0.2.5" } } diff --git a/specs/high/db.md b/specs/high/db.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/fs.md b/specs/high/fs.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/index.md b/specs/high/index.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/proto.md b/specs/high/proto.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/trace.md b/specs/high/trace.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/bops.md b/specs/low/bops.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/continuable.md b/specs/low/continuable.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/deflate.md b/specs/low/deflate.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/http.md b/specs/low/http.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/inflate.md b/specs/low/inflate.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/sha1.md b/specs/low/sha1.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/simple-stream.md b/specs/low/simple-stream.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/ssh.md b/specs/low/ssh.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/tcp.md b/specs/low/tcp.md deleted file mode 100644 index e69de29..0000000 diff --git a/test/run.js b/test/run.js new file mode 100644 index 0000000..e0655bf --- /dev/null +++ b/test/run.js @@ -0,0 +1,48 @@ +// Ultra simple test runner with TAP output. + +var inspect = require('util').inspect; +var defer = require('../lib/defer.js'); +var log = console.log; +console.log = function () { + var args = [].slice.call(arguments).map(function (arg) { + return inspect(arg, {colors:true}); + }); + log(args.join(" ").split("\n").map(function (line) { + return "# " + line; + }).join("\n")); +}; + +module.exports = function (tests) { + var timeout; + var test; + var index = 0; + log("1.." + (tests.length)); + next(); + function next(err) { + if (timeout) clearTimeout(timeout); + if (index) { + if (err) { + log(err.stack.split("\n").map(function (line) { + return "# " + line; + }).join("\n")); + log("not ok " + index + " - " + test.name); + } + else { + log("ok " + index + " - " + test.name); + } + } + test = tests[index++]; + if (!test) return; + timeout = setTimeout(onTimeout, 1000); + try { + if (test.length) test(next); + else test(); + } + catch (err) { return next(err); } + if (!test.length) defer(next); + } + + function onTimeout() { + next(new Error("Test timeout")); + } +}; \ No newline at end of file diff --git a/test/sample-pack.js b/test/sample-pack.js new file mode 100644 index 0000000..d90a99c --- /dev/null +++ b/test/sample-pack.js @@ -0,0 +1,5 @@ +var bodec = require('bodec'); + +// This is a small sample packfile with couple offset deltas +// pack-5851ce932ec42973b51d631afe25da247c3dc49a.pack +module.exports = bodec.fromBase64('UEFDSwAAAAIAAAAQnQ54nJ3MWwoCMQxA0f+uIhtQ0nYeKYgobsENZNoEC/OQMTK6e2cN/l4411YRYCo5kseITVLpSmAfOVLSnFJB6kJqSukDuSevMhu0moed9CmrKjKFwpIxtT7TINh2vSqReHX8tseywr1OcOPXJuMIJ6vTJa/CVpe5fo55mc7gY2p86LFBOGCH6PY6VTP5x7prKfAVA54Xe+yLWTbQOor7AZUCSPmRDnicnctRCgIhEADQf08xFyjGUVeFiKIrdAEdZ0lYd8OM7fh1hn4fvNFFQEi8JCcuCoWSmakwY8xoHGMxkdgimZjVM3VZB8wUPMUJLWrRPml0IdspuJl1JHJBSijGRlLpPR5bh3ttcEuvXZYFTqO2C3dJo25r/Rx5a2fQJlpNHgnhgBOi+mmrY8g/V11LgVV2mOsi6guDiEL9mA94nJ3PTWrDMBBA4b1OMRdosDT6hRIKvkIuIMkjd6htGXVCkts3Z+j2wbd4MohA+5Cai874uiQXQmuIjagsAWMp3rWS0WCM6syDDgGbDCXEmhz5Zl00iayv2mpyHk2xVLVZlhJUvst3H3DjHeb8+6Btg0/h/asOysL94Oel9v0KGpPVxjtE+Jj8NKl33VmE/mPV3M8XrO8x4WOFkusPSIc+eOUjb9B4I/UHHmNMM5QOeJydy1sKwjAQRuH3rGI2oGQmlzYgIrgDcQNp8hcDTSsxostXt+B5/OD0BpAzMJmzJJs4J5Fh5OiCsB3nMFvoOAakkaHusWHtpJm1y9YYb4KXSawgR/GY9MQ+8OB/TZhVfPbb1uhaKp3j44VloUMv9ZQaYi/bWt77tNUjsQmWxTttaae91uqrtfSOf151wRorqN9Ac1mgPgYNRBeSDnicncvdCcIwEADg90xxCyiXn6YGRBRXcIG75IKBppX2xI6vM/j6waerCGAozFyCiA2Jx+QJh5Rd8l5cHUiSdcVTzeZFq8wKY5TkamYsIWO1Xkau8VRdHNhF5BLJsUWqht76XFZ4tA532j4yTXDW1q95FdK2zG0/5qVfwPoUrIshWThgRDQ/7U1V/rnmVgpsSxdQ2dV8AbwRRT6TC3icnczNCQIxEEDhe6qYBpT8JwuyCHvybgOTmOBAsoE4ouUrluD1wfd4lgLexpqjL9G5kG6YUtY56ohqccbVaEzQoaLXAp98HxOu1GHDx6u0Biemfs6zINPY6X3Mo6+gzGKV9jYEOEgvpfjWTszlHysuOzFhg+03ER9fQDcKqQl4nDM0MDAzMVFIL0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcMoSoy60kVmVeajlYifjVm28/SzW0d12ZKCB++trFC8ZKOxBKjMBqauylWlkm6kbyCrH0Gp01vHQ9NnMNAFftOrq1AXic80jNyclXCM8vyknhckxJUSjOz03lAgBQjAcOPXicS8zLL8lILVJIy8xJ5QIAI9cEvLEBeJyrTC1RSMzLL8lILVJIy8xJ5QIAOsAGLmWAPnicm8lYOqEUAAX6AhVkEHicKw2aEAQABEABqqoCeJwzNDAwMzFRyK1ML0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcAgAxUhBDqAJ4nDM0MDAzMVFIL0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcAgAPuQ9dqAJ4nDM0MDAzMVFIL0pNLcnMS9crqShhCK3dYPty+oksL6Y+ub1WMq+Voh9ZAAAZvA8xPHic80jNyclXCM8vyknhAgAcMgQnuZAj3ZpSLQckQi9VfpQYWt+hefM='); diff --git a/test/test-config-codec.js b/test/test-config-codec.js new file mode 100644 index 0000000..22640ba --- /dev/null +++ b/test/test-config-codec.js @@ -0,0 +1,69 @@ +var run = require('./run.js'); + +// The thing we mean to test. +var codec = require('../lib/config-codec.js'); + +var sample = '\ +[user]\n\ +\tname = Tim Caswell\n\ +\temail = tim@creationix.com\n\ +[core]\n\ +\teditor = vim\n\ +\twhitespace = fix,-indent-with-non-tab,trailing-space,cr-at-eol\n\ +[web]\n\ +\tbrowser = google-chrome\n\ +[color]\n\ +\tui = true\n\ +[color "branch"]\n\ +\tcurrent = yellow bold\n\ +\tlocal = green bold\n\ +\tremote = cyan bold\n\ +[color "diff"]\n\ +\tmeta = yellow bold\n\ +\tfrag = magenta bold\n\ +\told = red bold\n\ +\tnew = green bold\n\ +\twhitespace = red reverse\n\ +[github]\n\ +\tuser = creationix\n\ +\ttoken = token'; + +var config; + +run([ + function testDecode() { + config = codec.decode(sample); + if (config.user.name !== "Tim Caswell") { + throw new Error("Failed to parse user.name"); + } + if (config.color.ui != "true") { + throw new Error("Failed to parse color.ui"); + } + if (config.color.diff.meta !== "yellow bold") { + throw new Error("Failed to parse color.diff.meta"); + } + }, + function testEncode() { + var encoded = codec.encode(config); + var config2 = codec.decode(encoded); + if (JSON.stringify(config) !== JSON.stringify(config2)) { + console.log(config); + console.log(encoded); + console.log(config2); + throw new Error("Encode failed"); + } + }, + function testEncode2() { + var encoded = codec.encode({ + foo: { + bar: { + baz: true + } + } + }); + if (encoded !== '[foo "bar"]\n\tbaz = true\n') { + console.log(encoded); + throw new Error("Invalid encoding of single deep config"); + } + } +]); diff --git a/test/test-mem-db.js b/test/test-mem-db.js new file mode 100644 index 0000000..f7db59c --- /dev/null +++ b/test/test-mem-db.js @@ -0,0 +1,57 @@ +var run = require('./run.js'); +var bodec = require('bodec'); +var sha1 = require('git-sha1'); +var codec = require('../lib/object-codec.js'); + +var repo = {}; +require('../mixins/mem-db.js')(repo); + +var blob = bodec.fromUnicode("Hello World\n"); +var blobHash = "557db03de997c86a4a028e1ebd3a1ceb225be238"; +run([ + function testSaveAs(end) { + repo.saveAs("blob", blob, function (err, hash) { + if (err) return end(err); + if (hash !== blobHash) { + console.log([hash, blobHash]); + return end(new Error("Hash mismatch")); + } + end(); + }); + }, + function testLoadRaw(end) { + repo.loadRaw(blobHash, function (err, bin) { + if (err) return end(err); + var obj = codec.deframe(bin, true); + if (obj.type !== "blob") return err(new Error("Wrong type")); + if (bodec.toUnicode(obj.body) !== bodec.toUnicode(blob)) { + return err(new Error("Wrong body")); + } + end(); + }); + }, + function testLoadAs(end) { + repo.loadAs("blob", blobHash, function (err, body) { + if (err) return end(err); + if (bodec.toUnicode(body) !== bodec.toUnicode(blob)) { + return err(new Error("Wrong body")); + } + end(); + }); + }, + function testSaveRaw(end) { + var newBody = bodec.fromUnicode("A new body\n"); + var bin = codec.frame({type:"blob",body:newBody}); + var hash = sha1(bin); + repo.saveRaw(hash, bin, function (err) { + if (err) return end(err); + repo.loadAs("blob", hash, function (err, body) { + if (err) return end(err); + if (bodec.toUnicode(body) !== bodec.toUnicode(newBody)) { + return end(new Error("Body mismatch")); + } + end(); + }); + }); + } +]); diff --git a/test/test-object-codec.js b/test/test-object-codec.js new file mode 100644 index 0000000..49fb75e --- /dev/null +++ b/test/test-object-codec.js @@ -0,0 +1,212 @@ +var modes = require('../lib/modes.js'); +var bodec = require('bodec'); +var sha1 = require('git-sha1'); +var run = require('./run.js'); + +// The thing we mean to test. +var codec = require('../lib/object-codec.js'); + +var blobHash, treeHash, commitHash, tagHash; +var blob, tree, commit, tag; +var blobBin, treeBin, commitBin, tagBin; + +run([ + function testEncodeBlob() { + blob = bodec.fromUnicode("Hello World\n"); + blobBin = codec.frame({type: "blob", body: blob}); + blobHash = sha1(blobBin); + if (blobHash !== '557db03de997c86a4a028e1ebd3a1ceb225be238') { + throw new Error("Invalid blob hash"); + } + }, + function testEncodeBlobInvalidType() { + try { + codec.frame({type: "blob", body: "Not a binary value"}); + } + catch (err) { + return; + } + throw new Error("Expected an error when passin in a non-binary blob"); + }, + function testEncodeTree() { + tree = { + "greeting.txt": { + mode: modes.file, + hash: blobHash + } + }; + treeBin = codec.frame({type: "tree", body: tree}); + treeHash = sha1(treeBin); + if (treeHash !== "648fc86e8557bdabbc2c828a19535f833727fa62") { + throw new Error("Invalid tree hash"); + } + }, + function testTreeSort() { + var tree = { + "README.md": {"mode":modes.blob,"hash":"42bd87a816800cb87646e95b71273983a71a26dc"}, + "a.js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "a-js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "b": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "b-js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "c": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "c.js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "a": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, + "b.js": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, + "c-js": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, + }; + var treeBin = codec.frame({type: "tree", body: tree}); + var treeHash = sha1(treeBin); + if (treeHash !== "f78893bf52bc695f343372d4210c8c0803c7c4db") { + throw new Error("Invalid tree hash"); + } + }, + function testEncodeCommit() { + var person = { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790884, + offset: 7 * 60 + } + }; + commit = { + tree: treeHash, + author: person, + committer: person, + message: "Test Commit\n", + parents: [] + }; + commitBin = codec.frame({type: "commit", body: commit}); + commitHash = sha1(commitBin); + if (commitHash !== "500c37fc17988b90c82d812a2d6fc25b15354bf2") { + throw new Error("Invalid commit hash"); + } + }, + function testEncodeTag() { + tag = { + object: commitHash, + type: "commit", + tag: "mytag", + tagger: { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + message: "Tag it!\n" + }; + tagBin = codec.frame({type: "tag", body: tag}); + tagHash = sha1(tagBin); + if (tagHash !== "49522787662a0183652dc9cafa5c008b5a0e0c2a") { + throw new Error("Invalid tag hash"); + } + }, + function testDecodeTag() { + var obj = codec.deframe(tagBin, true); + if (obj.type !== "tag") throw new Error("Invalid type"); + if (!(obj.body.object === tag.object && obj.body.message === tag.message)) { + throw new Error("Problem decoding"); + } + }, + function testDecodeCommit() { + var obj = codec.deframe(commitBin, true); + if (obj.type !== "commit") throw new Error("Invalid type"); + if (!(obj.body.tree === commit.tree && + obj.body.message === commit.message && + obj.body.author.date.seconds === commit.author.date.seconds)) { + throw new Error("Problem decoding"); + } + }, + function testDecodeTree() { + var obj = codec.deframe(treeBin, true); + if (obj.type !== "tree") throw new Error("Invalid type"); + if (obj.body["greeting.txt"].hash !== tree["greeting.txt"].hash) { + throw new Error("Problem decoding"); + } + }, + function testDecodeBlob() { + var obj = codec.deframe(blobBin, true); + if (obj.type !== "blob") throw new Error("Invalid type"); + if (bodec.toUnicode(obj.body) !== bodec.toUnicode(blob)) { + throw new Error("Problem decoding"); + } + }, + function testUnicodeFilePath() { + var name = "æðelen"; + var tree = {}; + tree[name] = { + mode: modes.file, + hash: blobHash + }; + var bin = codec.frame({type:"tree", body: tree}); + var obj = codec.deframe(bin, true); + var newName = Object.keys(obj.body)[0]; + if (newName !== name) { + console.log(newName + " != " + name); + throw new Error("Problem storing and retrieving utf8 paths"); + } + if (obj.body[name].hash !== tree[name].hash) { + throw new Error("Problem decoding hash hex"); + } + }, + function testUnicodeCommit() { + var person = { + name: "Laȝamon", + email: "laȝamon@chronicles-of-england.org", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }; + var commit = { + tree: treeHash, + author: person, + committer: person, + message: "An preost wes on leoden, Laȝamon was ihoten\nHe wes Leovenaðes sone -- liðe him be Drihten\n", + parents: [] + }; + var bin = codec.frame({type:"commit", body:commit}); + var obj = codec.deframe(bin, true); + if (commit.author.name !== obj.body.author.name || + commit.author.email !== obj.body.author.email || + commit.message !== obj.body.message) { + console.log([obj.body.author, obj.body.message]); + throw new Error("Problem decoding utf8 parts in commit"); + } + }, + function testUnicodeTag() { + var tag = { + object: commitHash, + type: "commit", + tag: "Laȝamon", + tagger: { + name: "Laȝamon", + email: "laȝamon@chronicles-of-england.org", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + message: "He wonede at Ernleȝe at æðelen are chirechen,\nUppen Sevarne staþe, sel þar him þuhte,\nOnfest Radestone, þer he bock radde.\n" + }; + var bin = codec.frame({type:"tag", body:tag}); + var obj = codec.deframe(bin, true); + if (tag.tagger.name !== obj.body.tagger.name || + tag.tagger.email !== obj.body.tagger.email || + tag.message !== obj.body.message) { + console.log([obj.body.tagger, obj.body.message]); + throw new Error("Problem decoding utf8 parts in tag"); + } + }, + function testBinaryBlob() { + var blob = bodec.create(256); + for (var i = 0; i < 256; i++) { blob[i] = i; } + var bin = codec.frame({type:"blob",body:blob}); + var obj = codec.deframe(bin, true); + if (bodec.toRaw(blob) !== bodec.toRaw(obj.body)) { + throw new Error("Problem decoding binary blob"); + } + } +]); diff --git a/test/test-pack-codec.js b/test/test-pack-codec.js new file mode 100644 index 0000000..4f6c1d2 --- /dev/null +++ b/test/test-pack-codec.js @@ -0,0 +1,100 @@ +var bodec = require('bodec'); +var run = require('./run.js'); +var decoders = require('../lib/object-codec.js').decoders; +var encoders = require('../lib/object-codec.js').encoders; + +// The thing we mean to test. +var codec = require('../lib/pack-codec.js'); + +var pack = require('./sample-pack.js'); +var items = []; +var newPack; + +function unpackStream(stream) { + var meta, out = [], finished = false; + var write = codec.decodePack(onItem); + for (var i = 0, l = stream.length; i < l; i += 128) { + var slice = bodec.slice(stream, i, i + 128); + try { + // console.log("SLICE", slice); + write(slice); + } + catch (err) { + throw err; + } + } + write(); + + function onItem(item) { + // console.log("UNPACK", item); + if (item === undefined) { + finished = true; + } + else if (!meta) { + meta = item; + } + else { + out.push(item); + } + } + if (!finished) throw new Error("unpack stream didn't finish"); + if (out.length !== meta.num) throw new Error("Item num mismatch"); + return out; +} + + +run([ + function testDecodePack() { + var counts = {}; + items = unpackStream(pack).map(function (item) { + counts[item.type] = counts[item.type] || 0; + counts[item.type]++; + if (item.type === "tree" || item.type === "tag" || item.type === "commit") { + item.body = decoders[item.type](item.body); + } + return item; + }); + if (counts.commit !== 6) throw new Error("Wrong number of commits parsed"); + if (counts.tree !== 4) throw new Error("Wrong number of trees parsed"); + if (counts.blob !== 4) throw new Error("Wrong number of blobs parsed"); + if (counts['ofs-delta'] !== 2) throw new Error("Wrong number of offset deltas parsed"); + }, + function testEncodePack() { + var done = false; + var outs = []; + + var write = codec.encodePack(function (item) { + if (item === undefined) { + done = true; + return; + } + if (!bodec.isBinary(item)) throw new Error("encode output must be buffers"); + outs.push(item); + }); + write({num:items.length}); + items.forEach(function (item) { + if (!bodec.isBinary(item.body)) { + item.body = encoders[item.type](item.body); + } + write(item); + }); + write(); + + if (!done) throw new Error("Output stream never ended"); + + newPack = bodec.join(outs); + }, + function verifyEncodePack() { + try { + unpackStream(newPack); + if (bodec.toHex(pack) !== bodec.toHex(newPack)) { + throw new Error("Final pack doesn't match original."); + } + } + catch (err) { + console.log(bodec.toHex(pack)); + console.log(bodec.toHex(newPack)); + throw err; + } + } +]); diff --git a/test/test-pack-ops.js b/test/test-pack-ops.js new file mode 100644 index 0000000..001d958 --- /dev/null +++ b/test/test-pack-ops.js @@ -0,0 +1,55 @@ +var run = require('./run.js'); + +var repo = {}; +require('../mixins/mem-db.js')(repo); + +var pack = require('./sample-pack.js'); +var hashes; + +run([ + function setup() { + require('../mixins/pack-ops.js')(repo); + }, + function testUnpack(end) { + repo.unpack(singleStream(pack), { + onProgress: onProgress + }, function (err, result) { + if (err) return end(err); + hashes = result; + if (hashes.length !== 16) { + return end(new Error("Wrong number of objects unpacked")); + } + end(); + }); + function onProgress(progress) { + // console.log(progress); + } + }, + function testPack(end) { + var stream; + var parts = []; + repo.pack(hashes, {}, function (err, result) { + if (err) return end(err); + stream = result; + stream.take(onRead); + }); + function onRead(err, chunk) { + if (err) return end(err); + // console.log(chunk); + if (chunk) { + parts.push(chunk); + return stream.take(onRead); + } + end(); + } + } +]); + +function singleStream(item) { + var done = false; + return { take: function (callback) { + if (done) return callback(); + done = true; + callback(null, item); + }}; +} \ No newline at end of file diff --git a/test/test-zlib.js b/test/test-zlib.js new file mode 100644 index 0000000..78f8090 --- /dev/null +++ b/test/test-zlib.js @@ -0,0 +1,44 @@ +var run = require('./run.js'); +var bodec = require('bodec'); + +// The thing we mean to test. +var inflate = require('../lib/inflate.js'); +var deflate = require('../lib/deflate.js'); +var inflateStream = require('../lib/inflate-stream.js'); + +var bin = bodec.create(1024); +for (var i = 0; i < 1024; i++) { + bin[i] = i >> 2 | i % 4 & 0x7f; +} + +run([ + function testRoundTrip() { + var deflated = deflate(bin); + if (!bodec.isBinary(deflated)) { + throw new Error("deflate output should be native binary"); + } + var inflated = inflate(deflated); + if (!bodec.isBinary(inflated)) { + throw new Error("inflate output should be native binary"); + } + if (bodec.toRaw(bin) !== bodec.toRaw(inflated)) { + console.log([bin, inflated]); + throw new Error("Problem with roundtrip"); + } + }, + function testStream() { + var done = false; + var chunks = []; + var deflated = deflate(bin); + var inf = inflateStream(); + + for (var i = 0, l = deflated.length; i < l; ++i) { + inf.write(deflated[i]); + } + var inflated = inf.flush(); + if (bodec.toRaw(bin) !== bodec.toRaw(inflated)) { + console.log([bin.length, inflated.length]); + throw new Error("Problem with roundtrip"); + } + } +]);