diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 6161444..0000000 --- a/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -*.git -node_modules -.zedstate -tags diff --git a/.jshintrc b/.jshintrc deleted file mode 100644 index 953ae25..0000000 --- a/.jshintrc +++ /dev/null @@ -1,12 +0,0 @@ -{ - // Enforcing options, without name - set to false to ignore violations - "-W030": false, // 'Expected an assignment or function call and instead saw an expression.' - "-W058": false, // 'Missing '()' invoking a constructor.' - - "loopfunc": true, - - // Environments - set to true to allow environment variables - "browser": true, - "node": true, - "esnext": true -} diff --git a/BACKERS.md b/BACKERS.md index 5983003..f84931b 100644 --- a/BACKERS.md +++ b/BACKERS.md @@ -294,7 +294,7 @@ Originally JS-Git started at a [kickstarter project][]. This was to enable me t - LSD25 - Nima Gardideh (nemo) - Patrick Collins (pat@burned.com) - - Michael J. Ryan (tracker1) + - Michael J. Ryan ([@tracker1](https://github.com/tracker1)) - technoweenie - David Hayes - Meyer SciTech Solutions, LLC diff --git a/LICENSE b/LICENSE index c968f88..bfa2ab5 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ The MIT License (MIT) -Copyright (c) 2013 Tim Caswell +Copyright (c) 2013-2014 Tim Caswell Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/README.md b/README.md index 3215ae7..257315d 100644 --- a/README.md +++ b/README.md @@ -1,219 +1,265 @@ -js-git -====== +# JS-Git +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/creationix/js-git?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -Git Implemented in JavaScript. +This project is a collection of modules that helps in implementing git powered +applications in JavaScript. The original purpose for this is to enable better +developer tools for authoring code in restricted environments like ChromeBooks +and tablets. It also enables using git as a database to replace SQL and no-SQL +data stores in many applications. -This project is very modular and configurable by gluing different components together. +This project was initially funded by two crowd-sourced fundraisers. See details +in [BACKERS.md](BACKERS.md) and [BACKERS-2.md](BACKERS-2.md). Thanks to all of +you who made this possible! -This repo, `js-git`, is the core implementation of git and consumes various instances of interfaces. This means that your network and persistance stack is completely pluggable. +## Usage -If you're looking for a more pre-packaged system, consider packages like `creationix/git-node` that implement all the abstract interfaces using node.js native APIs. The `creationix/jsgit` package is an example of a CLI tool that consumes this. +Detailed API docs are contained in the [doc](doc) subfolder of this repository. -The main end-user API as exported by this module for working with local repositories is: +In general the way you use js-git is you create a JS object and then mixin the +functionality you need. Here is an example of creating an in-memory database, +creating some objects, and then walking that tree using the high-level walker +APIs. -## Initialize the library - -First you create an instance of the library by injecting the platform dependencies. +## Creating a repo object. ```js -var platform = require('git-node-platform'); -var jsGit = require('js-git')(platform); +// This provides symbolic names for the octal modes used by git trees. +var modes = require('js-git/lib/modes'); + +// Create a repo by creating a plain object. +var repo = {}; + +// This provides an in-memory storage backend that provides the following APIs: +// - saveAs(type, value) => hash +// - loadAs(type, hash) => hash +// - saveRaw(hash, binary) => +// - loadRaw(hash) => binary +require('js-git/mixins/mem-db')(repo); + +// This adds a high-level API for creating multiple git objects by path. +// - createTree(entries) => hash +require('js-git/mixins/create-tree')(repo); + +// This provides extra methods for dealing with packfile streams. +// It depends on +// - unpack(packStream, opts) => hashes +// - pack(hashes, opts) => packStream +require('js-git/mixins/pack-ops')(repo); + +// This adds in walker algorithms for quickly walking history or a tree. +// - logWalk(ref|hash) => stream +// - treeWalk(hash) => stream +require('js-git/mixins/walkers')(repo); + +// This combines parallel requests for the same resource for efficiency under load. +require('js-git/mixins/read-combiner')(repo); + +// This makes the object interface less strict. See its docs for details +require('js-git/mixins/formats')(repo); ``` -## Wrap a Database - -Then you implement the database interface (or more likely use a library to create it for you). +## Generators vs Callbacks -```js -var fsDb = require('git-fs-db')(platform); -var db = fsDb("/path/to/repo.git"); -``` +There are two control-flow styles that you can use to consume js-git APIs. All +the examples here use `yield` style and assume the code is contained within a +generator function that's yielding to a tool like [gen-run](https://github.com/creationix/gen-run). -The database interface is documented later on. +This style requires ES6 generators. This feature is currently in stable Firefox, +in stable Chrome behind a user-configurable flag, in node.js 0.11.x or greater +with a command-line flag. -## Continuables +Also you can use generators on any ES5 platform if you use a source transform +like Facebook's [regenerator](http://facebook.github.io/regenerator/) tool. -In all public async functions you can either pass in a node-style callback last or omit the callback and it will return you a continuable. - -This means you can consume the js-git library using normal ES3 code or if you prefer use [gen-run][] and consume the continuables. - -If the callback is omitted, a continuable is returned. You must pass a callback into this continuable to actually start the action. +You read more about how generators work at [Generators vs Fibers](http://howtonode.org/generators-vs-fibers). ```js -// Callback mode -jsgit.someAction(arg1, arg2, function (err, result) { - ... -}); - -// Continuable mode -var cont = jsgit.someAction(arg1, arg2); -cont(function (err, result) { - ... +var run = require('gen-run'); + +run(function*() { + // Blocking logic goes here. You can use yield + var result = yield someAction(withArgs); + // The generator pauses at yield and resumes when the data is available. + // The rest of your process is not blocked, just this generator body. + // If there was an error, it will throw into this generator. }); - -// Continuable mode with gen-run -var result = yield jsgit.someAction(arg1, arg2); ``` -### db.get(key, [callback]) -> value - -Load a ref or object from the database. - -The database should assume that keys that are 40-character long hex strings are sha1 hashes. The value for these will always be binary (`Buffer` in node, `Uint8Array` in browser) -All other keys are paths like `refs/heads/master` or `HEAD` and the value is a string. +If you can't use this new feature or just plain prefer node-style callbacks, all +js-git APIs also support that. The way this works is actually quite simple. +If you don't pass in the callback, the function will return a partially applied +version of your call expecting just the callback. +```js +someAction(withArgs, function (err, value) { + if (err) return handleMyError(err); + // do something with value +}); -### db.set(key, value, [callback]) - -Save a value to the database. Same rules apply about hash keys being binary values and other keys being string values. - -### db.has(key, [callback]) -> hasKey? +// The function would be implemented to support both style like this. +function someAction(arg, callback) { + if (!callback) return someAction.bind(this, arg); + // We now have callback and arg +} +``` -Check if a key is in the database +## Basic Object Creation -### db.del(key, [callback]) +Now we have an in-memory git repo useful for testing the network operations or +just getting to know the available APIs. -Remove an object or ref from the database. +In this example, we'll create a blob, create a tree containing that blob, create +a commit containing that tree. This shows how to create git objects manually. -### db.keys(prefix, [callback]) -> keys +```js + // First we create a blob from a string. The `formats` mixin allows us to + // use a string directly instead of having to pass in a binary buffer. + var blobHash = yield repo.saveAs("blob", "Hello World\n"); + + // Now we create a tree that is a folder containing the blob as `greeting.txt` + var treeHash = yield repo.saveAs("tree", { + "greeting.txt": { mode: modes.file, hash: blobHash } + }); + + // With that tree, we can create a commit. + // Again the `formats` mixin allows us to omit details like committer, date, + // and parents. It assumes sane defaults for these. + var commitHash = yield repo.saveAs("commit", { + author: { + name: "Tim Caswell", + email: "tim@creationix.com" + }, + tree: treeHash, + message: "Test commit\n" + }); -Given a path prefix, give all the keys. This is like a readdir if you treat the keys as paths. +``` -For example, given the keys `refs/heads/master`, `refs/heads/experimental`, `refs/tags/0.1.3` and the prefix `refs/heads/`, the output would be `master` and `experimental`. +## Basic Object Loading -A null prefix returns all non hash keys. +We can read objects back one at a time using `loadAs`. -### db.init([callback]) +```js +// Reading the file "greeting.txt" from a commit. -Initialize a database. This is where you db implementation can setup stuff. +// We first read the commit. +var commit = yield repo.loadAs("commit", commitHash); +// We then read the tree using `commit.tree`. +var tree = yield repo.loadAs("tree", commit.tree); +// We then read the file using the entry hash in the tree. +var file = yield repo.loadAs("blob", tree["greeting.txt"].hash); +// file is now a binary buffer. +``` -### db.clear([callback]) +When using the `formats` mixin there are two new types for `loadAs`, they are +`"text"` and `"array"`. -This is for when the user wants to delete or otherwise reclaim your database's resources. +```js +// When you're sure the file contains unicode text, you can load it as text directly. +var fileAsText = yield repo.loadAs("text", blobHash); +// Also if you prefer array format, you can load a directory as an array. +var entries = yield repo.loadAs("array", treeHash); +entries.forEach(function (entry) { + // entry contains {name, mode, hash} +}); +``` -### Wrapping the DataBase +## Using Walkers -Now that you have a database instance, you can use the jsGit library created above. +Now that we have a repo with some minimal data in it, we can query it. Since we +included the `walkers` mixin, we can walk the history as a linear stream or walk +the file tree as a depth-first linear stream. ```js -var repo = jsGit(db); -``` +// Create a log stream starting at the commit we just made. +// You could also use symbolic refs like `refs/heads/master` for repos that +// support them. +var logStream = yield repo.logWalk(commitHash); -### repo.load(hash(ish), [callback]) -> git object +// Looping through the stream is easy by repeatedly calling waiting on `read`. +var commit, object; +while (commit = yield logStream.read(), commit !== undefined) { -Load a git object from the database. You can pass in either a hash or a symbolic name like `HEAD` or `refs/tags/v3.1.4`. + console.log(commit); -The object will be of the form: + // We can also loop through all the files of each commit version. + var treeStream = yield repo.treeWalk(commit.tree); + while (object = yield treeStream.read(), object !== undefined) { + console.log(object); + } -```js -{ - type: "commit", // Or "tag", "tree", or "blob" - body: { ... } // Or an array for tree and a binary value for blob. } ``` -### repo.save(object, [callback]) -> hash +## Filesystem Style Interface -Save an object to the database. This will give you back the hash of the cotent by which you can retrieve the value back. - -### repo.loadAs(type, hash, [callback]) -> body - -This convenience wrapper will call `repo.load` for you and then check if the type is what you expected. If it is, it will return the body directly. If it's not, it will error. +If you feel that creating a blob, then creating a tree, then creating the parent +tree, etc is a lot of work to save just one file, I agree. While writing the +tedit app, I discovered a nice high-level abstraction that you can mixin to make +this much easier. This is the `create-tree` mixin referenced in the above +config. ```js -var commit = yield repo.loadAs("commit", "HEAD"); -var tree = yield repo.loadAs("tree", commit.tree); +// We wish to create a tree that contains `www/index.html` and `README.me` files. +// This will create these two blobs, create a tree for `www` and then create a +// tree for the root containing `README.md` and the newly created `www` tree. +var treeHash = yield repo.createTree({ + "www/index.html": { + mode: modes.file, + content: "

Hello

\n

This is an HTML page?

\n" + }, + "README.md": { + mode: modes.file, + content: "# Sample repo\n\nThis is a sample\n" + } +}); ``` -I'm using yield syntax because it's simpler, you can use callbacks instead if you prefer. - -### repo.saveAs(type, body, [callback]) -> hash - -Another convenience wrapper, this time to save objects as a specefic type. The body must be in the right format. +This is great for creating several files at once, but it can also be used to +edit existing trees by adding new files, changing existing files, or deleting +existing entries. ```js -var blobHash = yield repo.saveAs("blob", binaryData); -var treeHash = yield repo.saveAs("tree", [ - { mode: 0100644, name: "file.dat", hash: blobHash } -]); -var commitHash = yield repo.saveAs("commit", { - tree: treeHash, - author: { name: "Tim Caswell", email: "tim@creationix.com", date: new Date }, - message: "Save the blob" -}); +var changes = [ + { + path: "www/index.html" // Leaving out mode means to delete the entry. + }, + { + path: "www/app.js", // Create a new file in the existing directory. + mode: modes.file, + content: "// this is a js file\n" + } +]; + +// We need to use array form and specify the base tree hash as `base`. +changes.base = treeHash; + +treeHash = yield repo.createTree(changes); ``` -### repo.remove(hash, [callback]) - -Remove an object. - -### repo.unpack(packFileStream, opts, [callback]) - -Import a packfile stream (simple-stream format) into the current database. This is used mostly for clone and fetch operations where the stream comes from a remote repo. - -`opts` is a hash of optional configs. - - - `opts.onProgress(progress)` - listen to the git progress channel by passing in a event listener. - - `opts.onError(error)` - same thing, but for the error channel. - - `opts.deline` - If this is truthy, the progress and error messages will be rechunked to be whole lines. They usually come jumbled in the internal sidechannel. - -### repo.logWalk(hash(ish), [callback]) -> log stream - -This convenience wrapper creates a readable stream of the history sorted by author date. - -If you want full history, pass in `HEAD` for the hash. - -### repo.treeWalk(hash(ish), [callback]) -> file stream - -This helper will return a stream of files suitable for traversing a file tree as a linear stream. The hash can be a ref to a commit, a commit hash or a tree hash directly. - -### repo.walk(seed, scan, loadKey, compare) -> stream - -This is the generic helper that `logWalk` and `treeWalk` use. See `js-git.js` source for usage. - -### repo.resolveHashish(hashish, [callback]) -> hash - -Resolve a ref, branch, or tag to a real hash. - -### repo.updateHead(hash, [callback]) - -Update whatever branch `HEAD` is pointing to so that it points to `hash`. - -You'll usually want to do this after creating a new commint in the HEAD branch. - -### repo.getHead([callback]) -> ref name - -Read the current active branch. +## Creating Composite Filesystems -### repo.setHead(ref, [callback]) +The real fun begins when you create composite filesystems using git submodules. -Set the current active branch. +The code that handles this is not packaged as a repo mixin since it spans several +independent repos. Instead look to the [git-tree](https://github.com/creationix/git-tree) +repo for the code. It's interface is still slightly unstable and undocumented +but is used in production by tedit and my node hosting service that complements tedit. -### repo.fetch(remote, opts, [callback]) +Basically this module allows you to perform high-level filesystem style commands +on a virtual filesystem that consists of many js-git repos. Until there are +proper docs, you can see how tedit uses it at . -Convenience wrapper that fetches from a remote instance and calls `repo.unpack` with the resulting packfile stream for you. +## Mounting Github Repos -## Related Packages +I've been asking Github to enable CORS headers to their HTTPS git servers, but +they've refused to do it. This means that a browser can never clone from github +because the browser will disallow XHR requests to the domain. -Being that js-git is so modular, here is a list of the most relevent modules that work with js-git: +They do, however, offer a REST interface to the raw [git data](https://developer.github.com/v3/git/). - - - A generic remote protocol implementation that wraps the platform interfaces and consumes urls. - - Example Applications - - - A multi-platform GUI program that clones and browses git repos. - - - An example of using js-git in node. This is a CLI tool. - - - A packaged version of js-git made for node.js - - Platform Helpers - - - A git-http platform interface adapter that wraps git-tcp platform instances. - - - Just the platform interface for using js-git on node.js. - - - A pure-js implementation of the sha1 part of the platform interface. - - - An implementation of js-git platform for browsers. - - - An implementation of the git-tcp interface that consumes a websocket to tcp proxy server. - - - A pure-js implementation of the zlib parts of the platform interface. - - Storage Backends - - - A database interface adapter that wraps a fs interface. - - - A git-db implementation based on `localStorage`. - - - A git-db implementation that stores data in ram for quick testing. - - - A git-db implementation cased on `indexedDB`. +Using this I wrote a mixin for js-git that uses github *as* the backend store. -[gen-run]: https://github.com/creationix/gen-run +Code at . Usage in tedit can be seen at +. diff --git a/SPONSORS.md b/SPONSORS.md new file mode 100644 index 0000000..1c23536 --- /dev/null +++ b/SPONSORS.md @@ -0,0 +1,12 @@ +# Sponsored Development + +As a company, you can sponsor development of specific features to the js-git ecosystem. + +## In Progress Sponsored Features + + - JS-Git - Encrypted Filesystem - Anonymous + - Tedit - Web Runtime - Anonymous + +## Completed Sponsored Features + + - Tedit - Live Export to VFS - Anonymous diff --git a/doc/lib/config-codec.md b/doc/lib/config-codec.md new file mode 100644 index 0000000..4a95217 --- /dev/null +++ b/doc/lib/config-codec.md @@ -0,0 +1,46 @@ +# Config Codec + +This module implements a codec for reading and writing git config files (this +includes the .gitmodules file). As far as I can tell, this is a variant of +the INI format. + +## codec.decode(ini) -> config + +Given the text of the config file, return the data as an object. + +The following config: + +```ini +[user] + name = Tim Caswell + email = tim@creationix.com +[color] + ui = true +[color "branch"] + current = yellow bold + local = green bold + remote = cyan bold +``` + +Will parse to this js object + +```js +{ + user: { + name: "Tim Caswell", + email: "tim@creationix.com" + }, + color: { + ui: "true", + branch: { + current: "yellow bold", + local: "green bold", + remote: "cyan bold" + } + } +} +``` + +## codec.encode(config) -> ini + +This reverses the conversion and writes a string from a config object. \ No newline at end of file diff --git a/doc/lib/deflate.md b/doc/lib/deflate.md new file mode 100644 index 0000000..b81df87 --- /dev/null +++ b/doc/lib/deflate.md @@ -0,0 +1,11 @@ +# Deflate + +This module implements a simple interface that when normal given data, returns the deflated version in a callback. This wraps the pako dependency. + +## deflate(inflated) => deflated + +```js +var deflate = require('js-git/lib/deflate'); + +var deflated = deflate(original); +``` diff --git a/doc/lib/inflate-stream.md b/doc/lib/inflate-stream.md new file mode 100644 index 0000000..4faa15f --- /dev/null +++ b/doc/lib/inflate-stream.md @@ -0,0 +1,23 @@ +# Inflate Stream + +This module implements zlib inflate by hand with a special streaming interface. +This is used in js-git to inflate git object fragments in a pack-stream. + +## inflateStream(onEmit, onUnused) -> onInput + +```js +var onInput = inflateStream(onEmit, onUnused); + +someStream.on("data", function (chunk) { + onInput(null, chunk); +}); + +function onEmit(err, out) { + if (err) throw err; + // out is a chunk of inflated data +} + +function onUnused(chunks) { + // chunks is an array of extra buffers or buffer slices. +} +``` diff --git a/doc/lib/inflate.md b/doc/lib/inflate.md new file mode 100644 index 0000000..d96d36f --- /dev/null +++ b/doc/lib/inflate.md @@ -0,0 +1,11 @@ +# Inflate + +This module implements a simple interface that when given deflated data returns the inflated version. + +## inflate(deflated) -> inflated + +```js +var inflate = require('js-git/lib/inflate'); + +var inflated = inflate(deflated); +``` diff --git a/doc/lib/object-codec.md b/doc/lib/object-codec.md new file mode 100644 index 0000000..de74a32 --- /dev/null +++ b/doc/lib/object-codec.md @@ -0,0 +1,127 @@ +# Object Codec + +This module implements a codec for the binary git object format for blobs, trees, tags, and commits. + +This library is useful for writing new storage backends. Normal users will probably +just use one of the existing mixins for object storage. + +## codec.frame({type,body}) -> buffer + +This function accepts an object with `type` and `body` properties. The `type` +property must be one of "blob", "tree", "commit" or "tag". The body can be a +pre-encoded raw-buffer or a plain javascript value. See encoder docs below for +the formats of the different body types. + +The returned binary value is the fully framed git object. The sha1 of this is +the git hash of the object. + +```js +var codec = require('js-git/lib/object-codec'); +var sha1 = require('git-sha1'); + +var bin = codec.frame({ type: "blob", body: "Hello World\n"}); +var hash = sha1(bin); +``` + +## codec.deframe(buffer, decode) -> {type,body} + +This function accepts a binary git buffer and returns the `{type,body}` object. + +If `decode` is true, then the body will also be decoded into a normal javascript +value. If `decode` is false or missing, then the raw-buffer will be in body. + +## codec.encoders + +This is an object containing 4 encoder function Each function has the signature: + + encode(body) -> raw-buffer + +Where body is the JS representation of the type and raw-buffer is the git encoded +version of that value, but without the type and length framing. + +```js +var encoders = require('js-git/lib/object-codec').encoders; +var modes = require('js-git/lib/modes'); +``` + +Blobs must be native binary values (Buffer in node, Uint8Array in browser). +It's recommended to either use the `bodec` library to create binary values from +strings directly or configure your system with the `formats` mixin that allows +for unicode strings when working with blobs. + +```js +rawBin = encoders.blob(new Uint8Array([1,2,3,4,5,6])); +rawBin = encoders.blob(bodec.fromUnicode("Hello World")); +``` + +Trees are objects with filename as key and object with {mode,hash} as value. +The modes are integers. It's best to use the modes module to help. + +```js +rawBin = encoders.tree({ "greeting.txt": { + mode: modes.file, + hash: blobHash +}}); +``` + +Commits are objects with required fields {tree,author,message} +Also if there is a single parent, you specify it with `parent`. + +Since a commit can have zero or more parent commits, you specify the parent +hashes via the `parents` property as an array of hashes. + +The `author` field is required and contains {name,email,date}. + +Commits also require a `committer` field with the same structure as `author`. + +The `date` property of `author` and `committer` is in the format {seconds,offset} +Where seconds is a unix timestamp in seconds and offset is the number of minutes +offset for the timezone. (Your local offset can be found with `(new Date).getTimezoneOffset()`) + +The `message` field is mandatory and a simple string. + +```js +rawBin = encoders.commit({ + tree: treeHash, + author: { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + parents: [ parentCommitHash ], + message: "This is a test commit\n" +}); +``` + +Annotated tags are like commits, except they have different fields. + +```js +rawBin = encoders.tag({ + object: commitHash, + type: "commit", + tag: "mytag", + tagger: { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + message: "Tag it!\n" +}); +``` + +## codec.decoders + +This is just like `codec.encoders` except these functions do the opposite. +They have the format: + + decode(raw-buffer) -> body + +```js +var commit = decoders.commit(rawCommitBin); +``` diff --git a/doc/lib/pack-codec.md b/doc/lib/pack-codec.md new file mode 100644 index 0000000..b08a1f9 --- /dev/null +++ b/doc/lib/pack-codec.md @@ -0,0 +1,98 @@ +# Pack Codec + +This module implements a codec for packfile streams used in the git network +protocols as well as the on-disk packfile format. + +These are a sync stream transforms. It accepts an emit function and returns a +write function. Both of these have the same interface. You signal `end` to the +input side by writing undefined (or nothing) and when emit gets called with +undefined that is `end` on the output. + +Since this is sync, errors are simply thrown. If you want to use this in the +context of an async stream with back-pressure, it's up to the consumer to handle +exceptions and write to the input at the correct rate. Basically to implement +back-pressure, you only need to keep writing values to the input till enough +data comes out the output. It's sync so by the time `write()` returns, `emit()` +will have been called as many times as it ever will (without more writes). + +Here is an example of using the decodePack in a node push stream that ignores +backpressure. + +```js +var decodePack = require('js-git/lib/pack-codec').decodePack; + +var write = decodePack(onItem); +stream.on("data", write); +stream.on("end", write); +var meta; +function onItem(item) { + if (item === undefined) { + // END of Stream + } + else if (meta === undefined) { + meta = item; + } + else { + console.log(item); + } +} +``` + +The first output is the meta object: + +```js +{ + version: 2 + num: num-of-objects, +} +``` + +## codec.decodePack(emit) -> write + +Input in this is the raw buffer chunks in the packstream. The chunks can be +broken up at any point so this is ideal for streaming from a disk or network. + + +Version is the git pack protocol version, and num is the number of objects that +will be in this stream. + +All output objects after this will be raw git objects. + +```js +{ + type: type, + size: buffer-size, + body: raw-buffer, + offset: offset-in-stream, + [ref: number-or-hash] +} +``` + +There are two extra types here that aren't seen elsewhere. They are `ofs-delta` +and `ref-delta`. In both cases, these are a diff that applies on top of another +object in the stream. The different is `ofs-delta` stores a number in `ref` +that is the number of bytes to go back in the stream to find the base object. +But `ref-delta` includes the full hash of it's base object. + + +## codec.encodePack(emit) -> write + +This is the reverse. In fact, if you fed this the output from `decodePack`, +it's output should match exactly the original stream. + +The objects don't need as much data as the parser outputs. In specefic, the meta +object only need contain: + +```js +{ num: num-of-objects } +``` + +And the items only need contain: + +```js +{ + type: type, + body: raw-buffer, + [ref: number-or-hash] +} +``` diff --git a/doc/lib/readme.md b/doc/lib/readme.md new file mode 100644 index 0000000..62156f8 --- /dev/null +++ b/doc/lib/readme.md @@ -0,0 +1,3 @@ +# Library + + diff --git a/doc/mixins/fs-db.md b/doc/mixins/fs-db.md new file mode 100644 index 0000000..c178435 --- /dev/null +++ b/doc/mixins/fs-db.md @@ -0,0 +1,53 @@ + +# Filesystem Git Database + +JSGit repositories need `loadAs`, `saveAs`, `loadRaw`, `saveRaw`, `readRef`, and +`updateRef` methods. +Depending on the backing storage, there are various ways to implement these +methods. +The implementation for in-memory storage is `js-git/mixins/mem-db`, and there +are variants for using Github or IndexDB for storage. + +The `js-git/mixins/fs-db` implementation provides these methods as well, but +depends on a file system interface providing `readFile`, `readChunk`, +`writeFile`, and `readDir`. +These file system methods are implemented by the `git-fs-db` and +`git-chrome-db` packages. + +For the purpose of this document, `=>` implies that the function does not block +and accepts a Node.js-style callback. +The arrow points to the type of the result. +None of these methods need to return a continuable if the nodeback is missing. + +The type `binary` stands for whatever binary representation is appropriate for +the underlying platform. +For browsers, binary is a `Uint8Array`. +For Node.js, binary is a `Buffer`. + +## readFile(path) => binary | undefined + +Reads the entirety of the file at the given path and produces the binary. +If the file does not exist, readFile provides `undefined` instead. + +## readChunk(path, start, end) => binary | undefined + +Reads a byte range of the file at the given path. +The byte range is a half open interval, including the byte at the initial index, +and excluding the byte at the terminal index, such that the end minus the start +is the length of the resulting binary data. +The end offset may be negative, in which case it should count back from the end +of the size of the file at the path, such that the size plus the negative end is +the positive end. +If the file does not exist, readChunk provides `undefined` instead. + +## writeFile(path, binary) => undefined + +Writes the given bytes to the file at the given path. +The method creates any directories leading up to the path if they do not already +exist. + +## readDir(path) => array of names | undefined + +Reads the names of the entries in the directory at the given path. +The names are not fully qualified paths, just the name of the entry within the +given directory. diff --git a/doc/mixins/mem-db.md b/doc/mixins/mem-db.md new file mode 100644 index 0000000..6b41aba --- /dev/null +++ b/doc/mixins/mem-db.md @@ -0,0 +1,27 @@ +# mem-db mixin + +This mixin implements object store (normal and raw) and stores the data in memory. + +```js +var memDb = require('js-git/mixins/mem-db'); +var repo = {}; +memDb(repo); +repo.saveAs("blob", "Hello World", function (err, hash) { + if (err) throw err; + console.log("Blob saved with hash " + hash); +}); +``` + +This attaches the following interfaces onto the repo object passed in: + + - `saveAs(type, body) => hash` + - `loadAs(type, hash) => body` + - `loadRaw(hash) => raw-binary` + - `saveRaw(hash, raw-binary) =>` + +All these functions are async and accept either a callback last or return a continuable. + +```js +// Example using continuable interface from gen-run generator body. +var commit = yield repo.loadAs("commit", commitHash); +``` \ No newline at end of file diff --git a/doc/mixins/pack-ops.md b/doc/mixins/pack-ops.md new file mode 100644 index 0000000..71df421 --- /dev/null +++ b/doc/mixins/pack-ops.md @@ -0,0 +1,37 @@ +# pack-ops mixin + +This mixin adds the ability to consume or create packfile streams. + +This depends on the repo already having: + + - `loadRaw(hash) => raw-binary` + - `saveRaw(hash, raw-binary) =>` + +And then adds: + + - `unpack(stream, opts) => hashes` + - `pack(hashes, opts) => stream` + +The streams are simple-stream format. This means they have a `.take(callback)` +method for pulling items out of the stream. + +Example: + +```js +var packOps = require('js-git/mixins/pack-ops'); +packOps(repo); + +repo.unpack(stream, opts, function (err, hashes) { + // hashes is imported objects +}); + +repo.pack(hashes, opts, function (err, stream) { + if (err) throw err; + stream.take(onRead); + function onRead(err, chunk) { + if (err) throw err; + console.log(chunk); + if (item) stream.take(onRead); + } +}); +``` diff --git a/doc/mixins/readme.md b/doc/mixins/readme.md new file mode 100644 index 0000000..5fb958f --- /dev/null +++ b/doc/mixins/readme.md @@ -0,0 +1,9 @@ +# Mixins + +There's three types of mixins thats documented: + +- [fs-db](fs-db.md) + +- [mem-db](mem-db.md) + +- [pack-ops](pack-ops.md) diff --git a/doc/readme.md b/doc/readme.md new file mode 100644 index 0000000..8d68f04 --- /dev/null +++ b/doc/readme.md @@ -0,0 +1,8 @@ +# js-git documentation + +Go to: + +- [Library](lib) + +- [Mixins](mixins) + diff --git a/examples/clone.js b/examples/clone.js deleted file mode 100644 index 1402d3e..0000000 --- a/examples/clone.js +++ /dev/null @@ -1,31 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.'); -var gitRemote = require('git-net')(platform); -var fsDb = require('git-fs-db')(platform); -var fs = platform.fs; -var basename = require('path').basename; - -// Create a remote repo -var url = process.argv[2] || "git://github.com/creationix/conquest.git"; -var remote = gitRemote(url); - -// Create a local repo -var path = process.argv[3] || basename(remote.pathname); -var repo = jsGit(fsDb(fs(path))); - -console.log("Cloning %s to %s", url, path); - -var opts = { - onProgress: function (progress) { - process.stderr.write(progress); - } -}; -if (process.env.DEPTH) { - opts.depth = parseInt(process.env.DEPTH, 10); -} - -repo.fetchPack(remote, opts, function (err) { - if (err) throw err; - console.log("Done"); -}); - diff --git a/examples/create-harmony.js b/examples/create-harmony.js deleted file mode 100644 index 29d821d..0000000 --- a/examples/create-harmony.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -let platform = require('git-node-platform'); -let jsGit = require('../.'); -let fsDb = require('git-fs-db')(platform); -let fs = platform.fs; -let run = require('gen-run'); - -// Create a filesystem backed bare repo -let repo = jsGit(fsDb(fs("test.git"))); - -let mock = require('./mock.js'); - -run(function *() { - yield repo.setHead("master"); - console.log("Git database Initialized"); - - let head; - console.log(yield* map(mock.commits, function* (files, message) { - return head = yield repo.saveAs("commit", { - tree: yield repo.saveAs("tree", yield* map(files, function* (contents) { - return { - mode: 33188, // 0o100644, - hash: yield repo.saveAs("blob", contents) - }; - })), - parent: head, - author: mock.author, - committer: mock.committer, - message: message - }); - })); - - yield repo.updateHead(head); - console.log("Done"); - -}); - -function* map(object, onItem) { - let obj = {}; - for (let key in object) { - let value = object[key]; - obj[key] = yield* onItem(value, key); - } - return obj; -} diff --git a/examples/create.js b/examples/create.js deleted file mode 100644 index 81b6a7d..0000000 --- a/examples/create.js +++ /dev/null @@ -1,103 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.'); -var fsDb = require('git-fs-db')(platform); -var fs = platform.fs; - -if (!module.parent) { - // Create a filesystem backed bare repo - var repo = jsGit(fsDb(fs("test.git"))); - create(repo, function (err) { - if (err) throw err; - }); -} -else { - module.exports = create; -} - - -function create(repo, callback) { - - var mock = require('./mock.js'); - - repo.setHead("master", function (err) { - if (err) return callback(err); - console.log("Git database Initialized"); - - var parent; - serialEach(mock.commits, function (message, files, next) { - // Start building a tree object. - var tree = {}; - parallelEach(files, function (name, contents, next) { - repo.saveAs("blob", contents, function (err, hash) { - if (err) return next(err); - tree[name] = { - mode: 0100644, - hash: hash - }; - next(); - }); - }, function (err) { - if (err) return next(err); - repo.saveAs("tree", tree, function (err, hash) { - if (err) return next(err); - var commit = { - tree: hash, - parent: parent, - author: mock.author, - committer: mock.committer, - message: message - }; - if (!parent) delete commit.parent; - repo.saveAs("commit", commit, function (err, hash) { - if (err) return next(err); - parent = hash; - repo.updateHead(hash, next); - }); - }); - }); - }, function (err) { - if (err) return callback(err); - repo.saveAs("tag", { - object: parent, - type: "commit", - tag: "v0.1.0", - tagger: mock.author, - message: "Details about the v0.1.0 release go here" - }, function (err, hash) { - if (err) return callback(err); - repo.createRef("refs/tags/v0.1.0", hash, callback); - }); - }); - }); -} - -// Mini control-flow library -function serialEach(object, fn, callback) { - var keys = Object.keys(object); - next(); - function next(err) { - if (err) return callback(err); - var key = keys.shift(); - if (!key) return callback(); - fn(key, object[key], next); - } -} -function parallelEach(object, fn, callback) { - var keys = Object.keys(object); - var left = keys.length + 1; - var done = false; - keys.forEach(function (key) { - fn(key, object[key], check); - }); - check(); - function check(err) { - if (done) return; - if (err) { - done = true; - return callback(err); - } - if (--left) return; - done = true; - callback(); - } -} diff --git a/examples/ls-remote.js b/examples/ls-remote.js deleted file mode 100644 index f249dc5..0000000 --- a/examples/ls-remote.js +++ /dev/null @@ -1,13 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.'); -var gitRemote = require('git-net')(platform); - -var repo = jsGit(); - -var url = process.argv[2] || "git://github.com/creationix/conquest.git"; -repo.lsRemote(gitRemote(url), function (err, refs) { - if (err) throw err; - Object.keys(refs).forEach(function (ref) { - console.log(refs[ref] + "\t" + ref); - }); -}); diff --git a/examples/mock.js b/examples/mock.js deleted file mode 100644 index c3c9d00..0000000 --- a/examples/mock.js +++ /dev/null @@ -1,18 +0,0 @@ -// Mock data for generating some history -exports.author = { name: "Tim Caswell", email: "tim@creationix.com" }; -exports.committer = { name: "JS-Git", email: "js-git@creationix.com" }; -exports.commits = { - "Initial Commit\n": { - "README.md": "# This is a test Repo\n\nIt's generated entirely by JavaScript\n" - }, - "Add package.json and blank module\n": { - "README.md": "# This is a test Repo\n\nIt's generated entirely by JavaScript\n", - "package.json": '{\n "name": "awesome-lib",\n "version": "3.1.3",\n "main": "awesome.js"\n}\n', - "awesome.js": 'module.exports = function () {\n throw new Error("TODO: Implement Awesome");\n};\n' - }, - "Implement awesome and bump version to 3.1.4\n": { - "README.md": "# This is a test Repo\n\nIt's generated entirely by JavaScript\n", - "package.json": '{\n "name": "awesome-lib",\n "version": "3.1.4",\n "main": "awesome.js"\n}\n', - "awesome.js": 'module.exports = function () {\n return 42;\n};\n' - } -}; diff --git a/examples/read-harmony.js b/examples/read-harmony.js deleted file mode 100644 index 16677bf..0000000 --- a/examples/read-harmony.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -let platform = require('git-node-platform'); -let jsGit = require('../.'); -let fsDb = require('git-fs-db')(platform); -let fs = platform.fs; -let run = require('gen-run'); - -// Create a filesystem backed bare repo -let repo = jsGit(fsDb(fs("test.git"))); - -run(start("HEAD")); - -function* start(hashish) { - let hash = yield repo.resolveHashish(hashish); - console.log(hashish, hash); - yield* loadCommit(hash); -} - -function* loadCommit(hash) { - let commit = yield repo.loadAs("commit", hash); - console.log("COMMIT", hash, commit); - let tree = yield repo.loadAs("tree", commit.tree); - console.log("TREE", commit.tree, tree); - for (let entry of tree.values()) { - let blob = yield repo.loadAs("blob", entry.hash); - console.log("BLOB", entry.hash, blob); - } - for (let parent of commit.parents.values()) { - yield* loadCommit(parent); - } -} diff --git a/examples/read-pack.js b/examples/read-pack.js deleted file mode 100644 index c8d61e5..0000000 --- a/examples/read-pack.js +++ /dev/null @@ -1,130 +0,0 @@ -var frame = require('../lib/frame.js'); -var sha1 = require('../lib/sha1.js'); -var decoders = require('../lib/decoders.js'); -var decodePack = require('../lib/pack-codec.js').decodePack; -var applyDelta = require('../lib/apply-delta.js'); -var inspect = require('util').inspect; - -var nodes = {}; -var links = []; -var items = {}; -var hashes = {}; -var left, num; - -var onItem = decodePack(function (item) { - if (left === undefined) { - left = num = item.num; - } - else if (item) { - left--; - console.error("%s/%s left", left, num); - } - else { - if (left) throw new Error(left + " items missing!"); - } - if (item && item.body) { - var hash = item.hash = sha1(frame(item.type, item.body)); - hashes[item.offset] = hash; - items[hash] = item; - - if (item.type === "ofs-delta") { - item.ref = hashes[item.offset - item.ref]; - item.type = "ref-delta"; - } - if (item.type === "ref-delta") { - var target = items[item.ref]; - item.type = target.type; - item.body = applyDelta(item.body, target.body); - delete items[hash]; - hash = item.hash = sha1(frame(item.type, item.body)); - hashes[item.offset] = hash; - items[hash] = item; - } - - var obj = item.obj = decoders[item.type](item.body); - - if (item.type === "commit") { - var label = []; - nodes[hash] = { - color: "deepskyblue4", - shape: "record", - label: label - }; - label.push(" " + shorten(hash)); - label.push(" " + obj.message.split("\n")[0].replace(/"/g, '')); - links.push([ - '"' + hash + '":commit', - '"' + obj.tree + '":hash', - ]); - obj.parents.forEach(function (parent) { - links.push([ - '"' + hash + '":hash', - '"' + parent + '":hash', - ]); - }); - } - else if (item.type === "tree") { - var label = [ - " " + shorten(hash), - ]; - Object.keys(obj).forEach(function (name, i) { - var key = "f" + i; - label.push("<" + key + "> " + name); - links.push([ - '"' + hash + '":' + key, - '"' + obj[name].hash + '":hash', - ]); - }); - nodes[hash] = { - color: "forestgreen", - shape: "record", - label: label - }; - } - else if (item.type === "blob") { - nodes[hash] = { - color: "firebrick4", - shape: "record", - label: [ - " " + shorten(hash), - item.body.length + " bytes data" - ] - }; - } - } - - if (item === undefined) printDot(); - console.error(inspect(item, {colors:true})); -}); -process.stdin.on('data', onItem); -process.stdin.on('end', onItem); -process.stdin.resume(); - -function printDot() { - var dot = []; - Object.keys(nodes).forEach(function (hash) { - var props = nodes[hash]; - dot.push('"' + hash + '" [\n' + Object.keys(props).map(function (name) { - var value = props[name]; - if (Array.isArray(value)) value = value.join("|"); - return ' ' + name + ' = "' + value + '"'; - }).join("\n") + '\n];'); - }); - links.forEach(function (pair) { - if (pair[2]) { - dot.push(pair[0] + ' -> ' + pair[1] + ' [label="' + pair[2] + '"];'); - } - else { - dot.push(pair[0] + ' -> ' + pair[1] + ';'); - } - }); - - dot.unshift('graph [rankdir = "LR" aspect=1];'); - dot.unshift('digraph packfile {'); - dot.push('}'); - console.log(dot.join("\n\n")); -} - -function shorten(hash) { - return hash.substr(0, 6) + "..." + hash.substr(hash.length - 6); -} \ No newline at end of file diff --git a/examples/read.js b/examples/read.js deleted file mode 100644 index 2eb5082..0000000 --- a/examples/read.js +++ /dev/null @@ -1,39 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.'); -var fsDb = require('git-fs-db')(platform); -var fs = platform.fs; - -// Create a filesystem backed bare repo -var repo = jsGit(fsDb(fs("test.git"))); - -loadCommit("HEAD"); - -function loadCommit(hashish) { - repo.loadAs("commit", hashish, onCommit); -} - -function onCommit(err, commit, hash) { - if (err) throw err; - console.log("COMMIT", hash, commit); - loadTree(commit.tree); - if (commit.parents) { - commit.parents.forEach(loadCommit); - } -} - -function loadTree(hash) { - repo.loadAs("tree", hash, onTree); -} - -function onTree(err, tree, hash) { - if (err) throw err; - console.log("TREE", hash, tree); - tree.forEach(onEntry); -} - -function onEntry(entry) { - repo.loadAs("blob", entry.hash, function (err, blob) { - if (err) throw err; - console.log("BLOB", entry.hash, blob); - }); -} diff --git a/examples/serve.js b/examples/serve.js deleted file mode 100644 index 0eca92d..0000000 --- a/examples/serve.js +++ /dev/null @@ -1,246 +0,0 @@ -var jsGit = require('../.'); -var net = require('net'); -var inspect = require('util').inspect; - -var db = memDb(); -var repo = jsGit(db); -db.init(function (err) { - if (err) throw err; - require('./create.js')(repo, function (err) { - if (err) throw err; - console.log("Repo Initialized with sample data"); - }); -}); - -var server = net.createServer(connectionHandler(function (req, callback) { - if (req.path !== "/test.git") return callback(new Error("Unknown repo: " + req.path)); - callback(null, repo); -})); -server.listen(9418, "127.0.0.1", function () { - console.log("GIT server listening at", server.address()); -}); - -////////////////////// TCP transport for git:// uris /////////////////////////// - -function connectionHandler(onReq, opts) { - opts = opts || {}; - return function (socket) { - var remote = wrap(socket), command, path, host; - socket.on("error", onDone); - remote.read(function (err, line) { - if (err) return onDone(err); - var match = line.match(/^(git-upload-pack|git-receive-pack) (.+?)\0(?:host=(.+?)\0)$/); - if (!match) return onDone(new Error("Invalid connection message: " + line)); - command = match[1]; - path = match[2]; - host = match[3]; - onReq({ path: path, host: host }, onRepo); - }); - - function onRepo(err, repo) { - if (err) return onDone(err); - if (command === "git-upload-pack") { - return repo.uploadPack(remote, opts, onDone); - } - if (command === "git-receive-pack") { - return repo.receivePack(remote, opts, onDone); - } - } - - function onDone(err, changes) { - if (err) console.error(err.stack); - else console.log("DONE", { - command: command, - path: path, - host: host, - changes: changes - }); - socket.destroy(); - } - }; -} - -var pktLine = require('./pkt-line.js'); -function wrap(socket) { - var queue = []; - var rerr = null; - var rcb = null, wcb = null; - var onChunk = pktLine.deframer(onFrame); - var writeFrame = pktLine.framer(writeChunk); - socket.on("data", onEvent); - socket.on("end", onEvent); - socket.on("drain", onDrain); - return { read: read, write: write }; - - function onEvent(chunk) { - try { - onChunk(chunk); - } - catch (err) { - console.error(err.stack); - rerr = err; - check(); - } - } - - function onFrame(frame) { - console.log("<-", inspect(frame, {colors:true})); - queue.push(frame); - check(); - } - - function read(callback) { - if (!callback) return read; - if (rcb) return callback(new Error("Only one read at a time")); - rcb = callback; - check(); - } - - function check() { - if (rcb && (rerr || queue.length)) { - var callback = rcb; - rcb = null; - if (rerr) { - var err = rerr; - rerr = null; - callback(err); - } - else { - callback(null, queue.shift()); - } - } - if (queue.length) socket.pause(); - else if (rcb) socket.resume(); - } - - function write(frame, callback) { - if (callback === undefined) return write.bind(this, frame); - if (callback) { - if (wcb) return callback(new Error("Only one write at a time")); - wcb = callback; - } - try { - console.log("->", inspect(frame, {colors:true})); - writeFrame(frame); - } - catch (err) { - if (wcb) { - wcb = null; - callback(err); - } - else { - throw err; - } - } - } - - function writeChunk(chunk) { - // console.log(">>", inspect("" + chunk, {colors:true})); - if (chunk === undefined) { - socket.end(); - onDrain(); - } - else if (socket.write(chunk)) { - onDrain(); - } - } - - function onDrain() { - if (wcb) { - var callback = wcb; - wcb = null; - callback(); - } - } - -} - -/////////////////// inMemory database for easy testing ///////////////////////// - -function makeAsync(fn, callback) { - if (!callback) return makeAsync.bind(this, fn); - process.nextTick(function () { - var result; - try { result = fn(); } - catch (err) { return callback(err); } - if (result === undefined) return callback(); - return callback(null, result); - }); -} - -function memDb() { - - // Store everything in ram! - var objects; - var others; - var isHash = /^[a-z0-9]{40}$/; - - return { - get: get, - set: set, - has: has, - del: del, - keys: keys, - init: init, - clear: init, - }; - - function get(key, callback) { - console.log("GET", key); - return makeAsync(function () { - if (isHash.test(key)) { - return objects[key]; - } - return others[key]; - }, callback); - } - - function set(key, value, callback) { - console.log("SET", key); - return makeAsync(function () { - if (isHash.test(key)) { - objects[key] = value; - } - else { - others[key] = value.toString(); - } - }, callback); - } - - function has(key, callback) { - return makeAsync(function () { - if (isHash.test(key)) { - return key in objects; - } - return key in others; - }, callback); - } - - function del(key, callback) { - return makeAsync(function () { - if (isHash.test(key)) { - delete objects[key]; - } - else { - delete others[key]; - } - }, callback); - } - - function keys(prefix, callback) { - return makeAsync(function () { - var length = prefix.length; - return Object.keys(others).filter(function (key) { - return key.substr(0, length) === prefix; - }); - }, callback); - } - - function init(callback) { - return makeAsync(function () { - objects = {}; - others = {}; - }, callback); - } - -} diff --git a/examples/walk.js b/examples/walk.js deleted file mode 100644 index 63f4103..0000000 --- a/examples/walk.js +++ /dev/null @@ -1,50 +0,0 @@ -var platform = require('git-node-platform'); -var jsGit = require('../.'); -var fsDb = require('git-fs-db')(platform); -var fs = platform.fs; - -// Create a filesystem backed bare repo -var repo = jsGit(fsDb(fs(process.argv[2] || "test.git"))); -repo.logWalk("HEAD", function (err, log) { - if (err) throw err; - var shallow; - return log.read(onRead); - - function onRead(err, commit) { - if (err) throw err; - if (!commit) return logEnd(shallow); - if (commit.last) shallow = true; - logCommit(commit); - repo.treeWalk(commit.tree, function (err, tree) { - if (err) throw err; - tree.read(onEntry); - function onEntry(err, entry) { - if (err) throw err; - if (!entry) { - return log.read(onRead); - } - logEntry(entry); - return tree.read(onEntry); - } - }); - } -}); - -function logCommit(commit) { - var author = commit.author; - var message = commit.message; - console.log("\n\x1B[33mcommit %s\x1B[0m", commit.hash); - console.log("Author: %s <%s>", author.name, author.email); - console.log("Date: %s", author.date); - console.log("\n \x1B[32;1m" + message.trim().split("\n").join("\x1B[0m\n \x1B[32m") + "\x1B[0m\n"); -} - -function logEntry(entry) { - var path = entry.path.replace(/\//g, "\x1B[1;34m/\x1B[0;34m") + "\x1B[0m"; - console.log(" %s %s", entry.hash, path); -} - -function logEnd(shallow) { - var message = shallow ? "End of shallow record." : "Beginning of history"; - console.log("\n\x1B[30;1m%s\x1B[0m\n", message); -} \ No newline at end of file diff --git a/js-git.js b/js-git.js deleted file mode 100644 index dc48018..0000000 --- a/js-git.js +++ /dev/null @@ -1,34 +0,0 @@ -module.exports = newRepo; - -function newRepo(db) { - if (!db) throw new TypeError("A db interface instance is required"); - - // Create a new repo object. - var repo = {}; - - // Auto trace the db if tracing is turned on. - if (require('./lib/trace.js')) db = require('./lib/tracedb.js')(db); - - // Add the db interface (used by objects, refs, and packops mixins) - repo.db = db; - - // Mix in object store interface - require('./mixins/objects.js')(repo); - - // Mix in the references interface - require('./mixins/refs.js')(repo); - - // Mix in the walker helpers - require('./mixins/walkers.js')(repo); - - // Mix in packfile import and export ability - require('./mixins/packops.js')(repo); - - // Mix in git network client ability - require('./mixins/client.js')(repo); - - // Mix in git network client ability - require('./mixins/server.js')(repo); - - return repo; -} diff --git a/lib/agent.js b/lib/agent.js deleted file mode 100644 index 565482d..0000000 --- a/lib/agent.js +++ /dev/null @@ -1,2 +0,0 @@ -var meta = require('../package.json'); -module.exports = meta.name + "/" + meta.version; diff --git a/lib/apply-delta.js b/lib/apply-delta.js index 20a7f00..5357ac3 100644 --- a/lib/apply-delta.js +++ b/lib/apply-delta.js @@ -1,94 +1,61 @@ -// This is Chris Dickinson's code +var bodec = require('bodec'); -var binary = require('bops') - , Decoder = require('varint/decode.js') - , vi = new Decoder +module.exports = applyDelta; -// we use writeUint[8|32][LE|BE] instead of indexing -// into buffers so that we get buffer-browserify compat. -var OFFSET_BUFFER = binary.create(4) - , LENGTH_BUFFER = binary.create(4) +function applyDelta(delta, base) { + var deltaOffset = 0; -module.exports = apply_delta; -function apply_delta(delta, target) { - var base_size_info = {size: null, buffer: null} - , resized_size_info = {size: null, buffer: null} - , output_buffer - , out_idx - , command - , len - , idx - - delta_header(delta, base_size_info) - delta_header(base_size_info.buffer, resized_size_info) - - delta = resized_size_info.buffer - - idx = - out_idx = 0 - output_buffer = binary.create(resized_size_info.size) - - len = delta.length - - while(idx < len) { - command = delta[idx++] - command & 0x80 ? copy() : insert() + if (base.length !== readLength()) { + throw new Error("Base length mismatch"); } - return output_buffer - - function copy() { - binary.writeUInt32LE(OFFSET_BUFFER, 0, 0) - binary.writeUInt32LE(LENGTH_BUFFER, 0, 0) - - var check = 1 - , length - , offset - - for(var x = 0; x < 4; ++x) { - if(command & check) { - OFFSET_BUFFER[3 - x] = delta[idx++] - } - check <<= 1 + // Create a new output buffer with length from header. + var outOffset = 0; + var out = bodec.create(readLength()); + + while (deltaOffset < delta.length) { + var byte = delta[deltaOffset++]; + // Copy command. Tells us offset in base and length to copy. + if (byte & 0x80) { + var offset = 0; + var length = 0; + if (byte & 0x01) offset |= delta[deltaOffset++] << 0; + if (byte & 0x02) offset |= delta[deltaOffset++] << 8; + if (byte & 0x04) offset |= delta[deltaOffset++] << 16; + if (byte & 0x08) offset |= delta[deltaOffset++] << 24; + if (byte & 0x10) length |= delta[deltaOffset++] << 0; + if (byte & 0x20) length |= delta[deltaOffset++] << 8; + if (byte & 0x40) length |= delta[deltaOffset++] << 16; + if (length === 0) length = 0x10000; + // copy the data + bodec.copy(bodec.slice(base, offset, offset + length), out, outOffset); + outOffset += length; } - - for(var x = 0; x < 3; ++x) { - if(command & check) { - LENGTH_BUFFER[3 - x] = delta[idx++] - } - check <<= 1 + // Insert command, opcode byte is length itself + else if (byte) { + bodec.copy(bodec.slice(delta, deltaOffset, deltaOffset + byte), out, outOffset); + deltaOffset += byte; + outOffset += byte; } - LENGTH_BUFFER[0] = 0 - - length = binary.readUInt32BE(LENGTH_BUFFER, 0) || 0x10000 - offset = binary.readUInt32BE(OFFSET_BUFFER, 0) - - binary.copy(target, output_buffer, out_idx, offset, offset + length) - out_idx += length + else throw new Error('Invalid delta opcode'); } - function insert() { - binary.copy(delta, output_buffer, out_idx, idx, command + idx) - idx += command - out_idx += command + if (outOffset !== out.length) { + throw new Error("Size mismatch in check"); } -} -function delta_header(buf, output) { - var done = false - , idx = 0 - , size = 0 - - vi.ondata = function(s) { - size = s - done = true + return out; + + // Read a variable length number our of delta and move the offset. + function readLength() { + var byte = delta[deltaOffset++]; + var length = byte & 0x7f; + var shift = 7; + while (byte & 0x80) { + byte = delta[deltaOffset++]; + length |= (byte & 0x7f) << shift; + shift += 7; + } + return length; } - - do { - vi.write(buf[idx++]) - } while(!done) - - output.size = size - output.buffer = binary.subarray(buf, idx) - -} \ No newline at end of file +} diff --git a/lib/assert-type.js b/lib/assert-type.js deleted file mode 100644 index c1808db..0000000 --- a/lib/assert-type.js +++ /dev/null @@ -1,5 +0,0 @@ -module.exports = function assertType(object, type) { - if (object.type !== type) { - throw new Error(type + " expected, but found " + object.type); - } -}; diff --git a/lib/config-codec.js b/lib/config-codec.js new file mode 100644 index 0000000..e69264c --- /dev/null +++ b/lib/config-codec.js @@ -0,0 +1,67 @@ +"use strict"; + +// This is for working with git config files like .git/config and .gitmodules. +// I believe this is just INI format. +module.exports = { + encode: encode, + decode: decode +}; + +function encode(config) { + var lines = []; + Object.keys(config).forEach(function (name) { + var obj = config[name]; + var deep = {}; + var values = {}; + var hasValues = false; + Object.keys(obj).forEach(function (key) { + var value = obj[key]; + if (typeof value === 'object') { + deep[key] = value; + } + else { + hasValues = true; + values[key] = value; + } + }); + if (hasValues) { + encodeBody('[' + name + ']', values); + } + + Object.keys(deep).forEach(function (sub) { + var child = deep[sub]; + encodeBody('[' + name + ' "' + sub + '"]', child); + }); + }); + + return lines.join("\n") + "\n"; + + function encodeBody(header, obj) { + lines.push(header); + Object.keys(obj).forEach(function (name) { + lines.push( "\t" + name + " = " + obj[name]); + }); + } + +} + + +function decode(text) { + var config = {}; + var section; + text.split(/[\r\n]+/).forEach(function (line) { + var match = line.match(/\[([^ \t"\]]+) *(?:"([^"]+)")?\]/); + if (match) { + section = config[match[1]] || (config[match[1]] = {}); + if (match[2]) { + section = section[match[2]] = {}; + } + return; + } + match = line.match(/([^ \t=]+)[ \t]*=[ \t]*(.+)/); + if (match) { + section[match[1]] = match[2]; + } + }); + return config; +} diff --git a/lib/decoders.js b/lib/decoders.js deleted file mode 100644 index 1ea962d..0000000 --- a/lib/decoders.js +++ /dev/null @@ -1,104 +0,0 @@ -var indexOf = require('./indexof.js'); -var parseOct = require('./parseoct.js'); -var parseAscii = require('./parseascii.js'); -var parseToHex = require('./parsetohex.js'); - -exports.commit = function decodeCommit(body) { - var i = 0; - var start; - var key; - var parents = []; - var commit = { - tree: "", - parents: parents, - author: "", - committer: "", - message: "" - }; - while (body[i] !== 0x0a) { - start = i; - i = indexOf(body, 0x20, start); - if (i < 0) throw new SyntaxError("Missing space"); - key = parseAscii(body, start, i++); - start = i; - i = indexOf(body, 0x0a, start); - if (i < 0) throw new SyntaxError("Missing linefeed"); - var value = parseAscii(body, start, i++); - if (key === "parent") { - parents.push(value); - } - else { - if (key === "author" || key === "committer") { - value = decodePerson(value); - } - commit[key] = value; - } - } - i++; - commit.message = parseAscii(body, i, body.length); - return commit; -}; - -exports.tag = function decodeTag(body) { - var i = 0; - var start; - var key; - var tag = {}; - while (body[i] !== 0x0a) { - start = i; - i = indexOf(body, 0x20, start); - if (i < 0) throw new SyntaxError("Missing space"); - key = parseAscii(body, start, i++); - start = i; - i = indexOf(body, 0x0a, start); - if (i < 0) throw new SyntaxError("Missing linefeed"); - var value = parseAscii(body, start, i++); - if (key === "tagger") value = decodePerson(value); - tag[key] = value; - } - i++; - tag.message = parseAscii(body, i, body.length); - return tag; -}; - -exports.tree = function decodeTree(body) { - var i = 0; - var length = body.length; - var start; - var mode; - var name; - var hash; - var tree = {}; - while (i < length) { - start = i; - i = indexOf(body, 0x20, start); - if (i < 0) throw new SyntaxError("Missing space"); - mode = parseOct(body, start, i++); - start = i; - i = indexOf(body, 0x00, start); - name = parseAscii(body, start, i++); - hash = parseToHex(body, i, i += 20); - tree[name] = { - mode: mode, - hash: hash - }; - } - return tree; -}; - -exports.blob = function decodeBlob(body) { - return body; -}; - -function decodePerson(string) { - var match = string.match(/^([^<]*) <([^>]*)> ([^ ]*) (.*)$/); - if (!match) throw new Error("Improperly formatted person string"); - var sec = parseInt(match[3], 10); - var date = new Date(sec * 1000); - date.timeZoneoffset = parseInt(match[4], 10) / 100 * -60; - return { - name: match[1], - email: match[2], - date: date - }; -} diff --git a/lib/defer.js b/lib/defer.js new file mode 100644 index 0000000..be50543 --- /dev/null +++ b/lib/defer.js @@ -0,0 +1,33 @@ +"use strict"; + +var timeouts, messageName; + +// node.js +if (typeof process === "object" && typeof process.nextTick === "function") { + module.exports = process.nextTick; +} +// some browsers +else if (typeof setImmediate === "function") { + module.exports = setImmediate; +} +// most other browsers +else { + timeouts = []; + messageName = "zero-timeout-message"; + window.addEventListener("message", handleMessage, true); + + module.exports = function (fn) { + timeouts.push(fn); + window.postMessage(messageName, "*"); + }; +} + +function handleMessage(event) { + if (event.source == window && event.data == messageName) { + event.stopPropagation(); + if (timeouts.length > 0) { + var fn = timeouts.shift(); + fn(); + } + } +} diff --git a/lib/deflate.js b/lib/deflate.js index a7b797a..512bcc5 100644 --- a/lib/deflate.js +++ b/lib/deflate.js @@ -1,5 +1,10 @@ -var zlib = require('zlib'); -module.exports = function deflate(buffer, callback) { - return zlib.deflate(buffer, callback); -}; -// TODO: make this work in the browser too. \ No newline at end of file +var pako = require('pako'); +var Binary = require('bodec').Binary; +if (Binary === Uint8Array) { + module.exports = pako.deflate; +} +else { + module.exports = function deflate(value) { + return new Binary(pako.deflate(new Uint8Array(value))); + }; +} diff --git a/lib/deframe.js b/lib/deframe.js deleted file mode 100644 index 30d02e6..0000000 --- a/lib/deframe.js +++ /dev/null @@ -1,18 +0,0 @@ -var bops = require('bops'); -var indexOf = require('./indexof.js'); -var parseDec = require('./parsedec.js'); -var parseAscii = require('./parseascii.js'); - -module.exports = function deframe(buffer) { - var space = indexOf(buffer, 0x20); - if (space < 0) throw new Error("Invalid git object buffer"); - var nil = indexOf(buffer, 0x00, space); - if (nil < 0) throw new Error("Invalid git object buffer"); - var body = bops.subarray(buffer, nil + 1); - var size = parseDec(buffer, space + 1, nil); - if (size !== body.length) throw new Error("Invalid body length."); - return [ - parseAscii(buffer, 0, space), - body - ]; -}; diff --git a/lib/each.js b/lib/each.js deleted file mode 100644 index 94a3db3..0000000 --- a/lib/each.js +++ /dev/null @@ -1,11 +0,0 @@ -module.exports = each; - -// A functional forEach that works on both arrays and objects -function each(obj, fn) { - if (Array.isArray(obj)) return obj.forEach(fn); - var keys = Object.keys(obj); - for (var i = 0, l = keys.length; i < l; i++) { - var key = keys[i]; - fn(obj[key], key, obj); - } -} diff --git a/lib/encoders.js b/lib/encoders.js deleted file mode 100644 index 1462efc..0000000 --- a/lib/encoders.js +++ /dev/null @@ -1,76 +0,0 @@ -var bops = require('bops'); -var pathCmp = require('./pathcmp.js'); - -exports.commit = function encodeCommit(commit) { - if (!commit.tree || !commit.author || !commit.message) { - throw new TypeError("Tree, author, and message are require for commits"); - } - var parents = commit.parents || (commit.parent ? [ commit.parent ] : []); - if (!Array.isArray(parents)) { - throw new TypeError("Parents must be an array"); - } - var str = "tree " + commit.tree; - for (var i = 0, l = parents.length; i < l; ++i) { - str += "\nparent " + parents[i]; - } - str += "\nauthor " + encodePerson(commit.author) + - "\ncommitter " + encodePerson(commit.committer || commit.author) + - "\n\n" + commit.message; - return bops.from(str); -}; - -exports.tag = function encodeTag(tag) { - if (!tag.object || !tag.type || !tag.tag || !tag.tagger || !tag.message) { - throw new TypeError("Object, type, tag, tagger, and message required"); - } - var str = "object " + tag.object + - "\ntype " + tag.type + - "\ntag " + tag.tag + - "\ntagger " + encodePerson(tag.tagger) + - "\n\n" + tag.message; - return bops.from(str + "\n" + tag.message); -}; - -exports.tree = function encodeTree(tree) { - var chunks = []; - if (!Array.isArray(tree)) { - tree = Object.keys(tree).map(function (name) { - var entry = tree[name]; - entry.name = name; - return entry; - }); - } - tree.sort(pathCmp).forEach(onEntry); - return bops.join(chunks); - - function onEntry(entry) { - chunks.push( - bops.from(entry.mode.toString(8) + " " + entry.name + "\0"), - bops.from(entry.hash, "hex") - ); - } -}; - -exports.blob = function encodeBlob(blob) { - if (bops.is(blob)) return blob; - return bops.from(blob); -}; - -function encodePerson(person) { - if (!person.name || !person.email) { - throw new TypeError("Name and email are required for person fields"); - } - return safe(person.name) + - " <" + safe(person.email) + "> " + - formatDate(person.date || new Date()); -} - -function safe(string) { - return string.replace(/(?:^[\.,:;<>"']+|[\0\n<>]+|[\.,:;<>"']+$)/gm, ""); -} - -function formatDate(date) { - var timezone = (date.timeZoneoffset || date.getTimezoneOffset()) / 60; - var seconds = Math.floor(date.getTime() / 1000); - return seconds + " " + (timezone > 0 ? "-0" : "0") + timezone + "00"; -} diff --git a/lib/find-common.js b/lib/find-common.js new file mode 100644 index 0000000..245a5e2 --- /dev/null +++ b/lib/find-common.js @@ -0,0 +1,58 @@ +function oneCall(fn) { + var done = false; + return function () { + if (done) return; + done = true; + return fn.apply(this, arguments); + }; +} + +module.exports = findCommon; + +function findCommon(repo, a, b, callback) { + callback = oneCall(callback); + var ahead = 0, behind = 0; + var aStream, bStream; + var aCommit, bCommit; + + if (a === b) return callback(null, ahead, behind); + repo.logWalk(a, onAStream); + repo.logWalk(b, onBStream); + + function onAStream(err, stream) { + if (err) return callback(err); + aStream = stream; + aStream.read(onA); + } + + function onBStream(err, stream) { + if (err) return callback(err); + bStream = stream; + bStream.read(onB); + } + + function onA(err, commit) { + if (!commit) return callback(err || new Error("No common commit")); + aCommit = commit; + if (bCommit) compare(); + } + + function onB(err, commit) { + if (!commit) return callback(err || new Error("No common commit")); + bCommit = commit; + if (aCommit) compare(); + } + + function compare() { + if (aCommit.hash === bCommit.hash) return callback(null, ahead, behind); + if (aCommit.author.date.seconds > bCommit.author.date.seconds) { + ahead++; + aStream.read(onA); + } + else { + behind++; + bStream.read(onB); + } + } + +} diff --git a/lib/frame.js b/lib/frame.js deleted file mode 100644 index 3717b83..0000000 --- a/lib/frame.js +++ /dev/null @@ -1,8 +0,0 @@ -var bops = require('bops'); - -module.exports = function frame(type, body) { - return bops.join([ - bops.from(type + " " + body.length + "\0"), - body - ]); -}; diff --git a/lib/git-fs.js b/lib/git-fs.js new file mode 100644 index 0000000..c8d34d3 --- /dev/null +++ b/lib/git-fs.js @@ -0,0 +1,125 @@ +"use strict"; + +var modes = require('./modes'); +var defer = require('./defer'); + +// options.encrypt(plain) -> encrypted +// options.decrypt(encrypted) -> plain +// options.shouldEncrypt(path) -> boolean +// options.getRootTree() => hash +// options.setRootTree(hash) => +module.exports = function (repo, options) { + var toWrite = {}; + var callbacks = []; + var writing = false; + + return { + readFile: readFile, + writeFile: writeFile, + readDir: readDir + }; + + function readFile(path, callback) { + if (!callback) return readFile.bind(null, path); + + // If there is a pending write for this path, pull from the cache. + if (toWrite[path]) return callback(null, toWrite[path]); + + // Otherwise read from the persistent storage + options.getRootTree(onRootTree); + + function onRootTree(err, hash) { + if (!hash) return callback(err); + repo.pathToEntry(hash, path, onEntry); + } + + function onEntry(err, entry) { + if (!entry || !modes.isBlob(entry.mode)) return callback(err); + + repo.loadAs("blob", entry.hash, function (err, content) { + if (!content) return callback(err); + if (entry.mode === modes.sym) { + content = options.decrypt(content); + } + callback(null, content); + }); + } + } + + function writeFile(path, binary, callback) { + if (!callback) return writeFile.bind(null, path, binary); + toWrite[path] = binary; + callbacks.push(callback); + defer(check); + } + + function readDir(path, callback) { + if (!callback) return readDir.bind(null, path); + + options.getRootTree(onRootTree); + + function onRootTree(err, hash) { + if (!hash) return callback(err); + repo.pathToEntry(hash, path, onEntry); + } + + function onEntry(err, entry) { + if (!entry || entry.mode !== modes.tree) return callback(err); + repo.loadAs("tree", entry.hash, onTree); + } + + function onTree(err, tree) { + if (!tree) return callback(err); + callback(null, Object.keys(tree)); + } + } + + function check() { + if (writing || !callbacks.length) return; + writing = true; + options.getRootTree(onRootTree); + + function onRootTree(err, hash) { + if (err) return callall(err); + var files = pullFiles(); + if (hash) files.base = hash; + repo.createTree(files, onNewTree); + } + + function onNewTree(err, hash) { + if (err) return callall(err); + options.setRootTree(hash, onSaveRoot); + } + + function onSaveRoot(err) { + if (err) return callall(err); + writing = false; + callall(); + defer(check); + } + } + + function pullFiles() { + var files = Object.keys(toWrite).map(function (path) { + var content = toWrite[path]; + delete toWrite[path]; + var mode = modes.blob; + if (options.shouldEncrypt && options.shouldEncrypt(path)) { + mode = modes.sym; + content = options.encrypt(content); + } + return { + path: path, + mode: mode, + content: content + }; + }); + return files; + } + + function callall(err) { + callbacks.splice(0, callbacks.length).forEach(function (callback) { + callback(err); + }); + } +}; diff --git a/lib/indexof.js b/lib/indexof.js deleted file mode 100644 index 18c61a5..0000000 --- a/lib/indexof.js +++ /dev/null @@ -1,8 +0,0 @@ -module.exports = function indexOf(buffer, byte, i) { - i |= 0; - var length = buffer.length; - for (;;i++) { - if (i >= length) return -1; - if (buffer[i] === byte) return i; - } -}; diff --git a/lib/inflate-stream.js b/lib/inflate-stream.js new file mode 100644 index 0000000..ce8d318 --- /dev/null +++ b/lib/inflate-stream.js @@ -0,0 +1,36 @@ +var Inflate = require('pako').Inflate; +var Binary = require('bodec').Binary; + +// Byte oriented inflate stream. Wrapper for pako's Inflate. +// +// var inf = inflate(); +// inf.write(byte) -> more - Write a byte to inflate's state-machine. +// Returns true if more data is expected. +// inf.recycle() - Reset the internal state machine. +// inf.flush() -> data - Flush the output as a binary buffer. +// +module.exports = function inflateStream() { + var inf = new Inflate(); + var b = new Uint8Array(1); + var empty = new Binary(0); + + return { + write: write, + recycle: recycle, + flush: Binary === Uint8Array ? flush : flushConvert + }; + + function write(byte) { + b[0] = byte; + inf.push(b); + return !inf.ended; + } + + function recycle() { inf = new Inflate(); } + + function flush() { return inf.result || empty; } + + function flushConvert() { + return inf.result ? new Binary(inf.result) : empty; + } +}; diff --git a/lib/inflate.js b/lib/inflate.js index 48dc528..038f8a4 100644 --- a/lib/inflate.js +++ b/lib/inflate.js @@ -1,853 +1,10 @@ -var bops = require('bops'); - -// Wrapper for proposed new API to inflate: -// -// var inf = inflate(); -// inf.write(byte) -> more - Write a byte to inflate's state-machine. -// Returns true if more data is expected. -// inf.recycle() - Reset the internal state machine. -// inf.flush() -> data - Flush the output as a binary buffer. -// -// This is quite slow, but could be made fast if baked into inflate itself. -module.exports = function () { - var push = inflate(onEmit, onUnused); - var more = true; - var chunks = []; - var b = bops.create(1); - - return { write: write, recycle: recycle, flush: flush }; - - function write(byte) { - b[0] = byte; - push(null, b); - return more; - } - - function recycle() { - push.recycle(); - more = true; - } - - function flush() { - var buffer = bops.join(chunks); - chunks.length = 0; - return buffer; - } - - function onEmit(err, item) { - if (err) throw err; - if (item === undefined) { - // console.log("onEnd"); - more = false; - return; - } - chunks.push(item); - } - - function onUnused(chunks) { - // console.log("onUnused", chunks); - more = false; - } -}; - -var MAXBITS = 15 - , MAXLCODES = 286 - , MAXDCODES = 30 - , MAXCODES = (MAXLCODES+MAXDCODES) - , FIXLCODES = 288 - -var lens = [ - 3, 4, 5, 6, 7, 8, 9, 10, 11, 13, 15, 17, 19, 23, 27, 31, - 35, 43, 51, 59, 67, 83, 99, 115, 131, 163, 195, 227, 258 -] - -var lext = [ - 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, - 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 0 -] - -var dists = [ - 1, 2, 3, 4, 5, 7, 9, 13, 17, 25, 33, 49, 65, 97, 129, 193, - 257, 385, 513, 769, 1025, 1537, 2049, 3073, 4097, 6145, - 8193, 12289, 16385, 24577 -] - -var dext = [ - 0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6, 6, - 7, 7, 8, 8, 9, 9, 10, 10, 11, 11, - 12, 12, 13, 13 -] - -var order = [ - 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 -] - -var WINDOW = 32768 - , WINDOW_MINUS_ONE = WINDOW - 1 - -function inflate(emit, on_unused) { - var output = new Uint8Array(WINDOW) - , need_input = false - , buffer_offset = 0 - , bytes_read = 0 - , output_idx = 0 - , ended = false - , state = null - , states = [] - , buffer = [] - , got = 0 - - // buffer up to 128k "output one" bytes - var OUTPUT_ONE_LENGTH = 131070 - , output_one_offs = OUTPUT_ONE_LENGTH - , output_one_buf - - var bitbuf = 0 - , bitcnt = 0 - , is_final = false - , fixed_codes - - var adler_s1 = 1 - , adler_s2 = 0 - - onread.recycle = function recycle() { - var out - buffer.length = 0 - buffer_offset = 0 - output_idx = 0 - bitbuf = 0 - bitcnt = 0 - states.length = 0 - is_final = false - need_input = false - bytes_read = 0 - output_idx = 0 - ended = false - got = 0 - adler_s1 = 1 - adler_s2 = 0 - output_one_offs = 0 - become(noop, {}, noop) - start_stream_header() - // return stream - } - - var bytes_need = 0 - , bytes_value = [] - - var bits_need = 0 - , bits_value = [] - - var codes_distcode = null - , codes_lencode = null - , codes_len = 0 - , codes_dist = 0 - , codes_symbol = 0 - - var dynamic_distcode = {symbol: [], count: []} - , dynamic_lencode = {symbol: [], count: []} - , dynamic_lengths = [] - , dynamic_nlen = 0 - , dynamic_ndist = 0 - , dynamic_ncode = 0 - , dynamic_index = 0 - , dynamic_symbol = 0 - , dynamic_len = 0 - - var decode_huffman = null - , decode_len = 0 - , decode_code = 0 - , decode_first = 0 - , decode_count = 0 - , decode_index = 0 - - var last = null - - become(noop, {}, noop) - start_stream_header() - - return onread - - function onread(err, buf) { - if(buf === undefined) { - return emit(err) - } - - return write(buf) - } - - function noop() { - - } - - function call_header() { - } - - function call_bytes(need) { - bytes_value.length = 0 - bytes_need = need - } - - function call_bits(need) { - bits_value = 0 - bits_need = need - } - - function call_codes(distcode, lencode) { - codes_len = - codes_dist = - codes_symbol = 0 - codes_distcode = distcode - codes_lencode = lencode - } - - function call_dynamic() { - dynamic_distcode.symbol.length = - dynamic_distcode.count.length = - dynamic_lencode.symbol.length = - dynamic_lencode.count.length = - dynamic_lengths.length = 0 - dynamic_nlen = 0 - dynamic_ndist = 0 - dynamic_ncode = 0 - dynamic_index = 0 - dynamic_symbol = 0 - dynamic_len = 0 - } - - function call_decode(h) { - decode_huffman = h - decode_len = 1 - decode_first = - decode_index = - decode_code = 0 - } - - function write(buf) { - buffer.push(buf) - got += buf.length - if(!ended) { - execute() - } - } - - function execute() { - do { - states[0].current() - } while(!need_input && !ended) - - var needed = need_input - need_input = false - } - - function start_stream_header() { - become(bytes, call_bytes(2), got_stream_header) - } - - function got_stream_header() { - var cmf = last[0] - , flg = last[1] - - - if((cmf << 8 | flg) % 31 !== 0) { - emit(new Error( - 'failed header check' - )) - return - } - - - - - if(flg & 32) { - return become(bytes, call_bytes(4), on_got_fdict) - } - return become(bits, call_bits(1), on_got_is_final) - } - - - - - function on_got_fdict() { - return become(bits, call_bits(1), on_got_is_final) - } - - - - - - - - - function on_got_is_final() { - is_final = last - become(bits, call_bits(2), on_got_type) - } - - - - - - - - - - - - - function on_got_type() { - if(last === 0) { - become(bytes, call_bytes(4), on_got_len_nlen) - return - } - - if(last === 1) { - // `fixed` and `dynamic` blocks both eventually delegate - // to the "codes" state -- which reads bits of input, throws - // them into a huffman tree, and produces "symbols" of output. - fixed_codes = fixed_codes || build_fixed() - become(start_codes, call_codes( - fixed_codes.distcode - , fixed_codes.lencode - ), done_with_codes) - return - } - - become(start_dynamic, call_dynamic(), done_with_codes) - return - } - - - - - function on_got_len_nlen() { - var want = last[0] | (last[1] << 8) - , nlen = last[2] | (last[3] << 8) - - if((~nlen & 0xFFFF) !== want) { - emit(new Error( - 'failed len / nlen check' - )) - } - - if(!want) { - become(bits, call_bits(1), on_got_is_final) - return - } - become(bytes, call_bytes(want), on_got_stored) - } - - - - - function on_got_stored() { - output_many(last) - if(is_final) { - become(bytes, call_bytes(4), on_got_adler) - return - } - become(bits, call_bits(1), on_got_is_final) - } - - - - - - - function start_dynamic() { - become(bits, call_bits(5), on_got_nlen) - } - - function on_got_nlen() { - dynamic_nlen = last + 257 - become(bits, call_bits(5), on_got_ndist) - } - - function on_got_ndist() { - dynamic_ndist = last + 1 - become(bits, call_bits(4), on_got_ncode) - } - - function on_got_ncode() { - dynamic_ncode = last + 4 - if(dynamic_nlen > MAXLCODES || dynamic_ndist > MAXDCODES) { - emit(new Error('bad counts')) - return - } - - become(bits, call_bits(3), on_got_lengths_part) - } - - function on_got_lengths_part() { - dynamic_lengths[order[dynamic_index]] = last - - ++dynamic_index - if(dynamic_index === dynamic_ncode) { - for(; dynamic_index < 19; ++dynamic_index) { - dynamic_lengths[order[dynamic_index]] = 0 - } - - // temporarily construct the `lencode` using the - // lengths we've read. we'll actually be using the - // symbols produced by throwing bits into the huffman - // tree to constuct the `lencode` and `distcode` huffman - // trees. - construct(dynamic_lencode, dynamic_lengths, 19) - dynamic_index = 0 - - become(decode, call_decode(dynamic_lencode), on_got_dynamic_symbol_iter) - return - } - become(bits, call_bits(3), on_got_lengths_part) - } - - function on_got_dynamic_symbol_iter() { - dynamic_symbol = last - - if(dynamic_symbol < 16) { - dynamic_lengths[dynamic_index++] = dynamic_symbol - do_check() - return - } - - dynamic_len = 0 - if(dynamic_symbol === 16) { - become(bits, call_bits(2), on_got_dynamic_symbol_16) - return - } - - if(dynamic_symbol === 17) { - become(bits, call_bits(3), on_got_dynamic_symbol_17) - return - } - - become(bits, call_bits(7), on_got_dynamic_symbol) - } - - function on_got_dynamic_symbol_16() { - dynamic_len = dynamic_lengths[dynamic_index - 1] - on_got_dynamic_symbol_17() - } - - function on_got_dynamic_symbol_17() { - dynamic_symbol = 3 + last - do_dynamic_end_loop() - } - - function on_got_dynamic_symbol() { - dynamic_symbol = 11 + last - do_dynamic_end_loop() - } - - function do_dynamic_end_loop() { - if(dynamic_index + dynamic_symbol > dynamic_nlen + dynamic_ndist) { - emit(new Error('too many lengths')) - return - } - - while(dynamic_symbol--) { - dynamic_lengths[dynamic_index++] = dynamic_len - } - - do_check() - } - - function do_check() { - if(dynamic_index >= dynamic_nlen + dynamic_ndist) { - end_read_dynamic() - return - } - become(decode, call_decode(dynamic_lencode), on_got_dynamic_symbol_iter) - } - - function end_read_dynamic() { - // okay, we can finally start reading data out of the stream. - construct(dynamic_lencode, dynamic_lengths, dynamic_nlen) - construct(dynamic_distcode, dynamic_lengths.slice(dynamic_nlen), dynamic_ndist) - become(start_codes, call_codes( - dynamic_distcode - , dynamic_lencode - ), done_with_codes) - } - - function start_codes() { - become(decode, call_decode(codes_lencode), on_got_codes_symbol) - } - - function on_got_codes_symbol() { - var symbol = codes_symbol = last - if(symbol < 0) { - emit(new Error('invalid symbol')) - return - } - - if(symbol < 256) { - output_one(symbol) - become(decode, call_decode(codes_lencode), on_got_codes_symbol) - return - } - - if(symbol > 256) { - symbol = codes_symbol -= 257 - if(symbol >= 29) { - emit(new Error('invalid fixed code')) - return - } - - become(bits, call_bits(lext[symbol]), on_got_codes_len) - return - } - - if(symbol === 256) { - unbecome() - return - } - } - - - - - - - function on_got_codes_len() { - codes_len = lens[codes_symbol] + last - become(decode, call_decode(codes_distcode), on_got_codes_dist_symbol) - } - - - function on_got_codes_dist_symbol() { - codes_symbol = last - if(codes_symbol < 0) { - emit(new Error('invalid distance symbol')) - return - } - - become(bits, call_bits(dext[codes_symbol]), on_got_codes_dist_dist) - } - - function on_got_codes_dist_dist() { - var dist = dists[codes_symbol] + last - - // Once we have a "distance" and a "length", we start to output bytes. - // We reach "dist" back from our current output position to get the byte - // we should repeat and output it (thus moving the output window cursor forward). - // Two notes: - // - // 1. Theoretically we could overlap our output and input. - // 2. `X % (2^N) == X & (2^N - 1)` with the distinction that - // the result of the bitwise AND won't be negative for the - // range of values we're feeding it. Spare a modulo, spoil the child. - while(codes_len--) { - output_one(output[(output_idx - dist) & WINDOW_MINUS_ONE]) - } - - become(decode, call_decode(codes_lencode), on_got_codes_symbol) - } - - function done_with_codes() { - if(is_final) { - become(bytes, call_bytes(4), on_got_adler) - return - } - become(bits, call_bits(1), on_got_is_final) - } - - - - - function on_got_adler() { - var check_s1 = last[3] | (last[2] << 8) - , check_s2 = last[1] | (last[0] << 8) - - if(check_s2 !== adler_s2 || check_s1 !== adler_s1) { - emit(new Error( - 'bad adler checksum: '+[check_s2, adler_s2, check_s1, adler_s1] - )) - return - } - - ended = true - - output_one_recycle() - - if(on_unused) { - on_unused( - [bops.subarray(buffer[0], buffer_offset)].concat(buffer.slice(1)) - , bytes_read - ) - } - - output_idx = 0 - ended = true - emit() - } - - function decode() { - _decode() - } - - function _decode() { - if(decode_len > MAXBITS) { - emit(new Error('ran out of codes')) - return - } - - become(bits, call_bits(1), got_decode_bit) - } - - function got_decode_bit() { - decode_code = (decode_code | last) >>> 0 - decode_count = decode_huffman.count[decode_len] - if(decode_code < decode_first + decode_count) { - unbecome(decode_huffman.symbol[decode_index + (decode_code - decode_first)]) - return - } - decode_index += decode_count - decode_first += decode_count - decode_first <<= 1 - decode_code = (decode_code << 1) >>> 0 - ++decode_len - _decode() - } - - - function become(fn, s, then) { - if(typeof then !== 'function') { - throw new Error - } - states.unshift({ - current: fn - , next: then - }) - } - - function unbecome(result) { - if(states.length > 1) { - states[1].current = states[0].next - } - states.shift() - if(!states.length) { - ended = true - - output_one_recycle() - if(on_unused) { - on_unused( - [bops.subarray(buffer[0], buffer_offset)].concat(buffer.slice(1)) - , bytes_read - ) - } - output_idx = 0 - ended = true - emit() - // return - } - else { - last = result - } - } - - function bits() { - var byt - , idx - - idx = 0 - bits_value = bitbuf - while(bitcnt < bits_need) { - // we do this to preserve `bits_value` when - // "need_input" is tripped. - // - // fun fact: if we moved that into the `if` statement - // below, it would trigger a deoptimization of this (very - // hot) function. JITs! - bitbuf = bits_value - byt = take() - if(need_input) { - break - } - ++idx - bits_value = (bits_value | (byt << bitcnt)) >>> 0 - bitcnt += 8 - } - - if(!need_input) { - bitbuf = bits_value >>> bits_need - bitcnt -= bits_need - unbecome((bits_value & ((1 << bits_need) - 1)) >>> 0) - } - } - - - - function bytes() { - var byte_accum = bytes_value - , value - - while(bytes_need--) { - value = take() - - - if(need_input) { - bitbuf = bitcnt = 0 - bytes_need += 1 - break - } - byte_accum[byte_accum.length] = value - } - if(!need_input) { - bitcnt = bitbuf = 0 - unbecome(byte_accum) - } - } - - - - function take() { - if(!buffer.length) { - need_input = true - return - } - - if(buffer_offset === buffer[0].length) { - buffer.shift() - buffer_offset = 0 - return take() - } - - ++bytes_read - - return bitbuf = takebyte() - } - - function takebyte() { - return buffer[0][buffer_offset++] - } - - - - function output_one(val) { - adler_s1 = (adler_s1 + val) % 65521 - adler_s2 = (adler_s2 + adler_s1) % 65521 - output[output_idx++] = val - output_idx &= WINDOW_MINUS_ONE - output_one_pool(val) - } - - function output_one_pool(val) { - if(output_one_offs === OUTPUT_ONE_LENGTH) { - output_one_recycle() - } - - output_one_buf[output_one_offs++] = val - } - - function output_one_recycle() { - if(output_one_offs > 0) { - if(output_one_buf) { - emit(null, bops.subarray(output_one_buf, 0, output_one_offs)) - } else { - } - output_one_buf = bops.create(OUTPUT_ONE_LENGTH) - output_one_offs = 0 - } - } - - function output_many(vals) { - var len - , byt - , olen - - output_one_recycle() - for(var i = 0, len = vals.length; i < len; ++i) { - byt = vals[i] - adler_s1 = (adler_s1 + byt) % 65521 - adler_s2 = (adler_s2 + adler_s1) % 65521 - output[output_idx++] = byt - output_idx &= WINDOW_MINUS_ONE - } - - emit(null, bops.from(vals)) - } +var pako = require('pako'); +var Binary = require('bodec').Binary; +if (Binary === Uint8Array) { + module.exports = pako.inflate; } - -function build_fixed() { - var lencnt = [] - , lensym = [] - , distcnt = [] - , distsym = [] - - var lencode = { - count: lencnt - , symbol: lensym - } - - var distcode = { - count: distcnt - , symbol: distsym - } - - var lengths = [] - , symbol - - for(symbol = 0; symbol < 144; ++symbol) { - lengths[symbol] = 8 - } - for(; symbol < 256; ++symbol) { - lengths[symbol] = 9 - } - for(; symbol < 280; ++symbol) { - lengths[symbol] = 7 - } - for(; symbol < FIXLCODES; ++symbol) { - lengths[symbol] = 8 - } - construct(lencode, lengths, FIXLCODES) - - for(symbol = 0; symbol < MAXDCODES; ++symbol) { - lengths[symbol] = 5 - } - construct(distcode, lengths, MAXDCODES) - return {lencode: lencode, distcode: distcode} -} - -function construct(huffman, lengths, num) { - var symbol - , left - , offs - , len - - offs = [] - - for(len = 0; len <= MAXBITS; ++len) { - huffman.count[len] = 0 - } - - for(symbol = 0; symbol < num; ++symbol) { - huffman.count[lengths[symbol]] += 1 - } - - if(huffman.count[0] === num) { - return - } - - left = 1 - for(len = 1; len <= MAXBITS; ++len) { - left <<= 1 - left -= huffman.count[len] - if(left < 0) { - return left - } - } - - offs[1] = 0 - for(len = 1; len < MAXBITS; ++len) { - offs[len + 1] = offs[len] + huffman.count[len] - } - - for(symbol = 0; symbol < num; ++symbol) { - if(lengths[symbol] !== 0) { - huffman.symbol[offs[lengths[symbol]]++] = symbol - } - } - - return left +else { + module.exports = function inflate(value) { + return new Binary(pako.inflate(new Uint8Array(value))); + }; } diff --git a/lib/ishash.js b/lib/ishash.js deleted file mode 100644 index 6e46845..0000000 --- a/lib/ishash.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = function isHash(hash) { - return (/^[0-9a-f]{40}$/).test(hash); -}; diff --git a/lib/map.js b/lib/map.js deleted file mode 100644 index 0a1c903..0000000 --- a/lib/map.js +++ /dev/null @@ -1,14 +0,0 @@ -module.exports = map; - -// A functional map that works on both arrays and objects -// The returned object has the same shape as the original, but values mapped. -function map(obj, fn) { - if (Array.isArray(obj)) return obj.map(fn); - var result = {}; - var keys = Object.keys(obj); - for (var i = 0, l = keys.length; i < l; i++) { - var key = keys[i]; - result[key] = fn(obj[key], key, obj); - } - return result; -} diff --git a/lib/modes.js b/lib/modes.js new file mode 100644 index 0000000..9162c62 --- /dev/null +++ b/lib/modes.js @@ -0,0 +1,28 @@ +"use strict"; + +var masks = { + mask: parseInt('100000', 8), + blob: parseInt('140000', 8), + file: parseInt('160000', 8) +}; + +var modes = module.exports = { + isBlob: function (mode) { + return (mode & masks.blob) === masks.mask; + }, + isFile: function (mode) { + return (mode & masks.file) === masks.mask; + }, + toType: function (mode) { + if (mode === modes.commit) return "commit"; + if (mode === modes.tree) return "tree"; + if ((mode & masks.blob) === masks.mask) return "blob"; + return "unknown"; + }, + tree: parseInt( '40000', 8), + blob: parseInt('100644', 8), + file: parseInt('100644', 8), + exec: parseInt('100755', 8), + sym: parseInt('120000', 8), + commit: parseInt('160000', 8) +}; diff --git a/lib/object-codec.js b/lib/object-codec.js new file mode 100644 index 0000000..a1609c3 --- /dev/null +++ b/lib/object-codec.js @@ -0,0 +1,265 @@ +"use strict"; +var bodec = require('bodec'); +var modes = require('./modes'); + +// (body) -> raw-buffer +var encoders = exports.encoders = { + blob: encodeBlob, + tree: encodeTree, + commit: encodeCommit, + tag: encodeTag +}; + + // ({type:type, body:raw-buffer}) -> buffer +exports.frame = frame; + +// (raw-buffer) -> body +var decoders = exports.decoders ={ + blob: decodeBlob, + tree: decodeTree, + commit: decodeCommit, + tag: decodeTag +}; + +// (buffer) -> {type:type, body:raw-buffer} +exports.deframe = deframe; + +// Export git style path sort in case it's wanted. +exports.treeMap = treeMap; +exports.treeSort = treeSort; + +function encodeBlob(body) { + if (!bodec.isBinary(body)) throw new TypeError("Blobs must be binary values"); + return body; +} + +function treeMap(key) { + /*jshint validthis:true*/ + var entry = this[key]; + return { + name: key, + mode: entry.mode, + hash: entry.hash + }; +} + +function treeSort(a, b) { + var aa = (a.mode === modes.tree) ? a.name + "/" : a.name; + var bb = (b.mode === modes.tree) ? b.name + "/" : b.name; + return aa > bb ? 1 : aa < bb ? -1 : 0; +} + +function encodeTree(body) { + var tree = ""; + if (Array.isArray(body)) throw new TypeError("Tree must be in object form"); + var list = Object.keys(body).map(treeMap, body).sort(treeSort); + for (var i = 0, l = list.length; i < l; i++) { + var entry = list[i]; + tree += entry.mode.toString(8) + " " + bodec.encodeUtf8(entry.name) + + "\0" + bodec.decodeHex(entry.hash); + } + return bodec.fromRaw(tree); +} + +function encodeTag(body) { + var str = "object " + body.object + + "\ntype " + body.type + + "\ntag " + body.tag + + "\ntagger " + formatPerson(body.tagger) + + "\n\n" + body.message; + return bodec.fromUnicode(str); +} + +function encodeCommit(body) { + var str = "tree " + body.tree; + for (var i = 0, l = body.parents.length; i < l; ++i) { + str += "\nparent " + body.parents[i]; + } + str += "\nauthor " + formatPerson(body.author) + + "\ncommitter " + formatPerson(body.committer) + + "\n\n" + body.message; + return bodec.fromUnicode(str); +} + + +function formatPerson(person) { + return safe(person.name) + + " <" + safe(person.email) + "> " + + formatDate(person.date); +} + +function safe(string) { + return string.replace(/(?:^[\.,:;<>"']+|[\0\n<>]+|[\.,:;<>"']+$)/gm, ""); +} + +function two(num) { + return (num < 10 ? "0" : "") + num; +} + +function formatDate(date) { + var seconds, offset; + if (date.seconds) { + seconds = date.seconds; + offset = date.offset; + } + // Also accept Date instances + else { + seconds = Math.floor(date.getTime() / 1000); + offset = date.getTimezoneOffset(); + } + var neg = "+"; + if (offset <= 0) offset = -offset; + else neg = "-"; + offset = neg + two(Math.floor(offset / 60)) + two(offset % 60); + return seconds + " " + offset; +} + +function frame(obj) { + var type = obj.type; + var body = obj.body; + if (!bodec.isBinary(body)) body = encoders[type](body); + return bodec.join([ + bodec.fromRaw(type + " " + body.length + "\0"), + body + ]); +} + +function decodeBlob(body) { + return body; +} + +function decodeTree(body) { + var i = 0; + var length = body.length; + var start; + var mode; + var name; + var hash; + var tree = {}; + while (i < length) { + start = i; + i = indexOf(body, 0x20, start); + if (i < 0) throw new SyntaxError("Missing space"); + mode = parseOct(body, start, i++); + start = i; + i = indexOf(body, 0x00, start); + name = bodec.toUnicode(body, start, i++); + hash = bodec.toHex(body, i, i += 20); + tree[name] = { + mode: mode, + hash: hash + }; + } + return tree; +} + +function decodeCommit(body) { + var i = 0; + var start; + var key; + var parents = []; + var commit = { + tree: "", + parents: parents, + author: "", + committer: "", + message: "" + }; + while (body[i] !== 0x0a) { + start = i; + i = indexOf(body, 0x20, start); + if (i < 0) throw new SyntaxError("Missing space"); + key = bodec.toRaw(body, start, i++); + start = i; + i = indexOf(body, 0x0a, start); + if (i < 0) throw new SyntaxError("Missing linefeed"); + var value = bodec.toUnicode(body, start, i++); + if (key === "parent") { + parents.push(value); + } + else { + if (key === "author" || key === "committer") { + value = decodePerson(value); + } + commit[key] = value; + } + } + i++; + commit.message = bodec.toUnicode(body, i, body.length); + return commit; +} + +function decodeTag(body) { + var i = 0; + var start; + var key; + var tag = {}; + while (body[i] !== 0x0a) { + start = i; + i = indexOf(body, 0x20, start); + if (i < 0) throw new SyntaxError("Missing space"); + key = bodec.toRaw(body, start, i++); + start = i; + i = indexOf(body, 0x0a, start); + if (i < 0) throw new SyntaxError("Missing linefeed"); + var value = bodec.toUnicode(body, start, i++); + if (key === "tagger") value = decodePerson(value); + tag[key] = value; + } + i++; + tag.message = bodec.toUnicode(body, i, body.length); + return tag; +} + +function decodePerson(string) { + var match = string.match(/^([^<]*) <([^>]*)> ([^ ]*) (.*)$/); + if (!match) throw new Error("Improperly formatted person string"); + return { + name: match[1], + email: match[2], + date: { + seconds: parseInt(match[3], 10), + offset: parseInt(match[4], 10) / 100 * -60 + } + }; +} + +function deframe(buffer, decode) { + var space = indexOf(buffer, 0x20); + if (space < 0) throw new Error("Invalid git object buffer"); + var nil = indexOf(buffer, 0x00, space); + if (nil < 0) throw new Error("Invalid git object buffer"); + var body = bodec.slice(buffer, nil + 1); + var size = parseDec(buffer, space + 1, nil); + if (size !== body.length) throw new Error("Invalid body length."); + var type = bodec.toRaw(buffer, 0, space); + return { + type: type, + body: decode ? decoders[type](body) : body + }; +} + +function indexOf(buffer, byte, i) { + i |= 0; + var length = buffer.length; + for (;;i++) { + if (i >= length) return -1; + if (buffer[i] === byte) return i; + } +} + +function parseOct(buffer, start, end) { + var val = 0; + while (start < end) { + val = (val << 3) + buffer[start++] - 0x30; + } + return val; +} + +function parseDec(buffer, start, end) { + var val = 0; + while (start < end) { + val = val * 10 + buffer[start++] - 0x30; + } + return val; +} diff --git a/lib/pack-codec.js b/lib/pack-codec.js index 8dad19a..93ac7c1 100644 --- a/lib/pack-codec.js +++ b/lib/pack-codec.js @@ -1,11 +1,8 @@ +var inflateStream = require('./inflate-stream.js'); var inflate = require('./inflate.js'); var deflate = require('./deflate.js'); -var sha1 = require('./sha1.js'); -var bops = { - subarray: require('bops/subarray.js'), - join: require('bops/join.js'), - from: require('bops/from.js'), -}; +var sha1 = require('git-sha1'); +var bodec = require('bodec'); var typeToNum = { commit: 1, @@ -20,30 +17,53 @@ for (var type in typeToNum) { var num = typeToNum[type]; numToType[num] = type; } +exports.parseEntry = parseEntry; +function parseEntry(chunk) { + var offset = 0; + var byte = chunk[offset++]; + var type = numToType[(byte >> 4) & 0x7]; + var size = byte & 0xf; + var left = 4; + while (byte & 0x80) { + byte = chunk[offset++]; + size |= (byte & 0x7f) << left; + left += 7; + } + size = size >>> 0; + var ref; + if (type === "ref-delta") { + ref = bodec.toHex(bodec.slice(chunk, offset, offset += 20)); + } + else if (type === "ofs-delta") { + byte = chunk[offset++]; + ref = byte & 0x7f; + while (byte & 0x80) { + byte = chunk[offset++]; + ref = ((ref + 1) << 7) | (byte & 0x7f); + } + } -exports.packFrame = packFrame; -function packFrame(type, body, callback) { - var length = body.length; - var head = [(typeToNum[type] << 4) | (length & 0xf)]; - var i = 0; - length >>= 4; - while (length) { - head[i++] |= 0x80; - head[i] = length & 0x7f; - length >>= 7; + var body = inflate(bodec.slice(chunk, offset)); + if (body.length !== size) { + throw new Error("Size mismatch"); + } + var result = { + type: type, + body: body + }; + if (typeof ref !== "undefined") { + result.ref = ref; } - deflate(body, function (err, body) { - if (err) return callback(err); - callback(null, bops.join([bops.from(head), body])); - }); + return result; } + exports.decodePack = decodePack; function decodePack(emit) { var state = $pack; var sha1sum = sha1(); - var inf = inflate(); + var inf = inflateStream(); var offset = 0; var position = 0; @@ -65,7 +85,7 @@ function decodePack(emit) { for (var i = 0, l = chunk.length; i < l; i++) { // console.log([state, i, chunk[i].toString(16)]); - if (!state) throw new Error("Unexpected extra bytes: " + bops.subarray(chunk, i)); + if (!state) throw new Error("Unexpected extra bytes: " + bodec.slice(chunk, i)); state = state(chunk[i], i, chunk); position++; } @@ -143,6 +163,7 @@ function decodePack(emit) { ref = ""; return $refDelta; } + // console.log({type: type,length: length}) return $body; } @@ -173,10 +194,14 @@ function decodePack(emit) { // Common helper for emitting all three object shapes function emitObject() { + var body = bodec.join(parts); + if (body.length !== length) { + throw new Error("Body length mismatch"); + } var item = { type: numToType[type], size: length, - body: bops.join(parts), + body: body, offset: start }; if (ref) item.ref = ref; @@ -201,7 +226,7 @@ function decodePack(emit) { emitObject(); // If this was all the objects, start calculating the sha1sum if (--num) return $header; - sha1sum.update(bops.subarray(chunk, 0, i + 1)); + sha1sum.update(bodec.slice(chunk, 0, i + 1)); return $checksum; } @@ -214,3 +239,88 @@ function decodePack(emit) { } } + + +exports.encodePack = encodePack; +function encodePack(emit) { + var sha1sum = sha1(); + var left; + return function (item) { + if (item === undefined) { + if (left !== 0) throw new Error("Some items were missing"); + return emit(); + } + if (typeof item.num === "number") { + if (left !== undefined) throw new Error("Header already sent"); + left = item.num; + write(packHeader(item.num)); + } + else if (typeof item.type === "string" && bodec.isBinary(item.body)) { + // The header must be sent before items. + if (typeof left !== "number") throw new Error("Headers not sent yet"); + + // Make sure we haven't sent all the items already + if (!left) throw new Error("All items already sent"); + + // Send the item in packstream format + write(packFrame(item)); + + // Send the checksum after the last item + if (!--left) { + emit(bodec.fromHex(sha1sum.digest())); + } + } + else { + throw new Error("Invalid item"); + } + }; + function write(chunk) { + sha1sum.update(chunk); + emit(chunk); + } +} + +function packHeader(length) { + return bodec.fromArray([ + 0x50, 0x41, 0x43, 0x4b, // PACK + 0, 0, 0, 2, // version 2 + length >> 24, // Num of objects + (length >> 16) & 0xff, + (length >> 8) & 0xff, + length & 0xff + ]); +} + +function packFrame(item) { + var length = item.body.length; + + // write TYPE_AND_BASE128_SIZE + var head = [(typeToNum[item.type] << 4) | (length & 0xf)]; + var i = 0; + length >>= 4; + while (length) { + head[i++] |= 0x80; + head[i] = length & 0x7f; + length >>= 7; + } + + if (typeof item.ref === "number") { + // write BIG_ENDIAN_MODIFIED_BASE_128_NUMBER + var offset = item.ref; + // Calculate how many digits we need in base 128 and move the pointer + i += Math.floor(Math.log(offset) / Math.log(0x80)) + 1; + // Write the last digit + head[i] = offset & 0x7f; + // Then write the rest + while (offset >>= 7) { + head[--i] = 0x80 | (--offset & 0x7f); + } + } + + var parts = [bodec.fromArray(head)]; + if (typeof item.ref === "string") { + parts.push(bodec.fromHex(item.ref)); + } + parts.push(deflate(item.body)); + return bodec.join(parts); +} diff --git a/lib/parallel.js b/lib/parallel.js deleted file mode 100644 index adeb739..0000000 --- a/lib/parallel.js +++ /dev/null @@ -1,45 +0,0 @@ -module.exports = parallel; - -// Run several continuables in parallel. The results are stored in the same -// shape as the input continuables (array or object). -// Returns a new continuable or accepts a callback. -// This will bail on the first error and ignore all others after it. -function parallel(commands, callback) { - if (!callback) return parallel.bind(this, commands); - var results, length, left, i, done; - - // Handle array shapes - if (Array.isArray(commands)) { - left = length = commands.length; - results = new Array(left); - for (i = 0; i < length; i++) { - run(i, commands[i]); - } - } - - // Otherwise assume it's an object. - else { - var keys = Object.keys(commands); - left = length = keys.length; - results = {}; - for (i = 0; i < length; i++) { - var key = keys[i]; - run(key, commands[key]); - } - } - - // Common logic for both - function run(key, command) { - command(function (err, result) { - if (done) return; - if (err) { - done = true; - return callback(err); - } - results[key] = result; - if (--left) return; - done = true; - callback(null, results); - }); - } -} diff --git a/lib/parseascii.js b/lib/parseascii.js deleted file mode 100644 index 78f5eb5..0000000 --- a/lib/parseascii.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = function parseAscii(buffer, start, end) { - var val = ""; - while (start < end) { - val += String.fromCharCode(buffer[start++]); - } - return val; -}; diff --git a/lib/parsedec.js b/lib/parsedec.js deleted file mode 100644 index e87151d..0000000 --- a/lib/parsedec.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = function parseDec(buffer, start, end) { - var val = 0; - while (start < end) { - val = val * 10 + buffer[start++] - 0x30; - } - return val; -}; diff --git a/lib/parseoct.js b/lib/parseoct.js deleted file mode 100644 index d67d8d9..0000000 --- a/lib/parseoct.js +++ /dev/null @@ -1,7 +0,0 @@ -module.exports = function parseOct(buffer, start, end) { - var val = 0; - while (start < end) { - val = (val << 3) + buffer[start++] - 0x30; - } - return val; -}; diff --git a/lib/parsetohex.js b/lib/parsetohex.js deleted file mode 100644 index a2a02af..0000000 --- a/lib/parsetohex.js +++ /dev/null @@ -1,10 +0,0 @@ -var chars = "0123456789abcdef"; - -module.exports = function parseToHex(buffer, start, end) { - var val = ""; - while (start < end) { - var byte = buffer[start++]; - val += chars[byte >> 4] + chars[byte & 0xf]; - } - return val; -}; diff --git a/lib/pathcmp.js b/lib/pathcmp.js deleted file mode 100644 index bc3189d..0000000 --- a/lib/pathcmp.js +++ /dev/null @@ -1,6 +0,0 @@ -module.exports = function pathCmp(oa, ob) { - var a = oa.name; - var b = ob.name; - a += "/"; b += "/"; - return a < b ? -1 : a > b ? 1 : 0; -}; diff --git a/examples/pkt-line.js b/lib/pkt-line.js similarity index 63% rename from examples/pkt-line.js rename to lib/pkt-line.js index 09cc550..4134462 100644 --- a/examples/pkt-line.js +++ b/lib/pkt-line.js @@ -1,18 +1,11 @@ -var bops = { - is: require('bops/is.js'), - to: require('bops/to.js'), - from: require('bops/from.js'), - create: require('bops/create.js'), - subarray: require('bops/subarray.js'), - join: require('bops/join.js'), -}; +"use strict"; -var PACK = bops.from("PACK"); +var bodec = require('bodec'); +var PACK = bodec.fromRaw("PACK"); module.exports = { deframer: deframer, - framer: framer, - frame: frame, + framer: framer }; function deframer(emit) { @@ -20,6 +13,7 @@ function deframer(emit) { var offset = 4; var length = 0; var data; + var more = true; return function (item) { @@ -47,15 +41,15 @@ function deframer(emit) { if (offset === 0) { if (length === 4) { offset = 4; - emit(""); + more = emit(""); } else if (length === 0) { offset = 4; - emit(null); + more = emit(null); } else if (length > 4) { length -= 4; - data = bops.create(length); + data = bodec.create(length); state = 1; } else { @@ -71,16 +65,16 @@ function deframer(emit) { state = 0; length = 0; if (data[0] === 1) { - emit(bops.subarray(data, 1)); + more = emit(bodec.slice(data, 1)); } else if (data[0] === 2) { - emit(["progress", bops.to(bops.subarray(data, 1))]); + more = emit({progress: bodec.toUnicode(data, 1)}); } else if (data[0] === 3) { - emit(["error", bops.to(bops.subarray(data, 1))]); + more = emit({error: bodec.toUnicode(data, 1)}); } else { - emit(bops.to(data)); + more = emit(bodec.toUnicode(data).trim()); } } } @@ -89,52 +83,34 @@ function deframer(emit) { continue; } state = 3; - emit(bops.join([PACK, bops.subarray(item, i)])); + more = emit(bodec.join([PACK, bodec.subarray(item, i)])); break; } else { throw new Error("pkt-line decoder in invalid state"); } } + + return more; }; } - function framer(emit) { return function (item) { if (item === undefined) return emit(); - emit(frame(item)); - }; -} - -function frame(item) { - if (item === null) return bops.from("0000"); - if (typeof item === "string") { - item = bops.from(item); - } - if (bops.is(item)) { - return bops.join([frameHead(item.length + 4), item]); - } - if (Array.isArray(item)) { - var type = item[0]; - item = item[1]; - var head = bops.create(5); - if (type === "pack") head[4] = 1; - else if (type === "progress") head[4] = 2; - else if (type === "error") head[4] = 3; - else throw new Error("Invalid channel name: " + type); + if (item === null) { + return emit(bodec.fromRaw("0000")); + } if (typeof item === "string") { - item = bops.from(item); + item = bodec.fromUnicode(item); } - return bops.join([frameHead(item.length + 5, head), item]); - } - throw new Error("Invalid input: " + item); + return emit(bodec.join([frameHead(item.length + 4), item])); + }; } - -function frameHead(length, buffer) { - buffer = buffer || bops.create(4); +function frameHead(length) { + var buffer = bodec.create(4); buffer[0] = toHexChar(length >>> 12); buffer[1] = toHexChar((length >>> 8) & 0xf); buffer[2] = toHexChar((length >>> 4) & 0xf); diff --git a/lib/serial.js b/lib/serial.js deleted file mode 100644 index 390b97a..0000000 --- a/lib/serial.js +++ /dev/null @@ -1,38 +0,0 @@ -module.exports = serial; - -// Run several continuables in serial. The results are stored in the same -// shape as the input continuables (array or object). -// Returns a new continuable or accepts a callback. -// This will bail on the first error. -function serial(commands, callback) { - if (!callback) return serial.bind(this, commands); - var results, keys, index = 0, length, key; - - if (Array.isArray(commands)) { - length = commands.length; - results = new Array(length); - } - else { - results = {}; - keys = Object.keys(commands); - length = keys.length; - } - - index = 0; - return runNext(); - - function runNext() { - if (index >= length) { - return callback(null, results); - } - key = keys ? keys[index] : index; - var command = commands[key]; - command(onResult); - } - - function onResult(err, result) { - if (err) return callback(err); - results[key] = result; - runNext(); - } -} \ No newline at end of file diff --git a/lib/sha1.js b/lib/sha1.js deleted file mode 100644 index 1438d5c..0000000 --- a/lib/sha1.js +++ /dev/null @@ -1,146 +0,0 @@ -var Array32 = typeof Uint32Array === "function" ? Uint32Array : Array; - -module.exports = function sha1(buffer) { - if (buffer === undefined) return create(); - var shasum = create(); - shasum.update(buffer); - return shasum.digest(); -}; - -// A streaming interface for when nothing is passed in. -function create() { - var h0 = 0x67452301; - var h1 = 0xEFCDAB89; - var h2 = 0x98BADCFE; - var h3 = 0x10325476; - var h4 = 0xC3D2E1F0; - // The first 64 bytes (16 words) is the data chunk - var block = new Array32(80), offset = 0, shift = 24; - var totalLength = 0; - - return { update: update, digest: digest }; - - // The user gave us more data. Store it! - function update(chunk) { - if (typeof chunk === "string") return updateString(chunk); - var length = chunk.length; - totalLength += length * 8; - for (var i = 0; i < length; i++) { - write(chunk[i]); - } - } - - function updateString(string) { - var length = string.length; - totalLength += length * 8; - for (var i = 0; i < length; i++) { - write(string.charCodeAt(i)); - } - } - - function write(byte) { - block[offset] |= (byte & 0xff) << shift; - if (shift) { - shift -= 8; - } - else { - offset++; - shift = 24; - } - if (offset === 16) processBlock(); - } - - // No more data will come, pad the block, process and return the result. - function digest() { - // Pad - write(0x80); - if (offset > 14 || (offset === 14 && shift < 24)) { - processBlock(); - } - offset = 14; - shift = 24; - - // 64-bit length big-endian - write(0x00); // numbers this big aren't accurate in javascript anyway - write(0x00); // ..So just hard-code to zero. - write(totalLength > 0xffffffffff ? totalLength / 0x10000000000 : 0x00); - write(totalLength > 0xffffffff ? totalLength / 0x100000000 : 0x00); - for (var s = 24; s >= 0; s -= 8) { - write(totalLength >> s); - } - - // At this point one last processBlock() should trigger and we can pull out the result. - return toHex(h0) - + toHex(h1) - + toHex(h2) - + toHex(h3) - + toHex(h4); - } - - // We have a full block to process. Let's do it! - function processBlock() { - // Extend the sixteen 32-bit words into eighty 32-bit words: - for (var i = 16; i < 80; i++) { - var w = block[i - 3] ^ block[i - 8] ^ block[i - 14] ^ block[i - 16]; - block[i] = (w << 1) | (w >>> 31); - } - - // log(block); - - // Initialize hash value for this chunk: - var a = h0; - var b = h1; - var c = h2; - var d = h3; - var e = h4; - var f, k; - - // Main loop: - for (i = 0; i < 80; i++) { - if (i < 20) { - f = d ^ (b & (c ^ d)); - k = 0x5A827999; - } - else if (i < 40) { - f = b ^ c ^ d; - k = 0x6ED9EBA1; - } - else if (i < 60) { - f = (b & c) | (d & (b | c)); - k = 0x8F1BBCDC; - } - else { - f = b ^ c ^ d; - k = 0xCA62C1D6; - } - var temp = (a << 5 | a >>> 27) + f + e + k + (block[i]|0); - e = d; - d = c; - c = (b << 30 | b >>> 2); - b = a; - a = temp; - } - - // Add this chunk's hash to result so far: - h0 = (h0 + a) | 0; - h1 = (h1 + b) | 0; - h2 = (h2 + c) | 0; - h3 = (h3 + d) | 0; - h4 = (h4 + e) | 0; - - // The block is now reusable. - offset = 0; - for (i = 0; i < 16; i++) { - block[i] = 0; - } - } - - function toHex(word) { - var hex = ""; - for (var i = 28; i >= 0; i -= 4) { - hex += ((word >> i) & 0xf).toString(16); - } - return hex; - } - -} diff --git a/lib/trace.js b/lib/trace.js deleted file mode 100644 index a5d3020..0000000 --- a/lib/trace.js +++ /dev/null @@ -1 +0,0 @@ -module.exports = false; diff --git a/lib/tracedb.js b/lib/tracedb.js deleted file mode 100644 index 65da059..0000000 --- a/lib/tracedb.js +++ /dev/null @@ -1,52 +0,0 @@ -var trace = require('./trace.js'); - -module.exports = function (db) { - return { - get: wrap1("get", db.get), - set: wrap2("set", db.set), - has: wrap1("has", db.has), - del: wrap1("del", db.del), - keys: wrap1("keys", db.keys), - init: wrap0("init", db.init), - }; -}; - -function wrap0(type, fn) { - return zero; - function zero(callback) { - if (!callback) return zero.bind(this); - return fn.call(this, check); - function check(err) { - if (err) return callback(err); - trace(type, null); - return callback.apply(this, arguments); - } - } -} - -function wrap1(type, fn) { - return one; - function one(arg, callback) { - if (!callback) return one.bind(this, arg); - return fn.call(this, arg, check); - function check(err) { - if (err) return callback(err); - trace(type, null, arg); - return callback.apply(this, arguments); - } - } -} - -function wrap2(type, fn) { - return two; - function two(arg1, arg2, callback) { - if (!callback) return two.bind(this, arg1. arg2); - return fn.call(this, arg1, arg2, check); - function check(err) { - if (err) return callback(err); - trace(type, null, arg1); - return callback.apply(this, arguments); - } - } -} - diff --git a/lib/walk.js b/lib/walk.js deleted file mode 100644 index 677b9dc..0000000 --- a/lib/walk.js +++ /dev/null @@ -1,42 +0,0 @@ -module.exports = function walk(seed, scan, loadKey, compare) { - var queue = [seed]; - var working = 0, error, cb; - return {read: read, abort: abort}; - - function read(callback) { - if (cb) return callback(new Error("Only one read at a time")); - if (working) { cb = callback; return; } - var item = queue.shift(); - if (!item) return callback(); - try { scan(item).forEach(onKey); } - catch (err) { return callback(err); } - return callback(null, item); - } - - function abort(callback) { return callback(); } - - function onError(err) { - if (cb) { - var callback = cb; cb = null; - return callback(err); - } - error = err; - } - - function onKey(key) { - working++; - loadKey(key, onItem); - } - - function onItem(err, item) { - working--; - if (err) return onError(err); - var index = queue.length; - while (index && compare(item, queue[index - 1])) index--; - queue.splice(index, 0, item); - if (!working && cb) { - var callback = cb; cb = null; - return read(callback); - } - } -}; \ No newline at end of file diff --git a/lib/wrap-handler.js b/lib/wrap-handler.js new file mode 100644 index 0000000..9a1c050 --- /dev/null +++ b/lib/wrap-handler.js @@ -0,0 +1,21 @@ +"use strict"; + +module.exports = wrapHandler; + +function wrapHandler(fn, onError) { + if (onError) { + return function (err, value) { + if (err) return onError(err); + try { + return fn(value); + } + catch (err) { + return onError(err); + } + }; + } + return function (err, value) { + if (err) throw err; + return fn(value); + }; +} diff --git a/mixins/add-cache.js b/mixins/add-cache.js new file mode 100644 index 0000000..6d94285 --- /dev/null +++ b/mixins/add-cache.js @@ -0,0 +1,61 @@ +"use strict"; + +module.exports = addCache; +function addCache(repo, cache) { + var loadAs = repo.loadAs; + if (loadAs) repo.loadAs = loadAsCached; + var saveAs = repo.saveAs; + if (saveAs) repo.saveAs = saveAsCached; + var createTree = repo.createTree; + if (createTree) repo.createTree = createTreeCached; + + function loadAsCached(type, hash, callback) { + // Next check in disk cache... + cache.loadAs(type, hash, onCacheLoad); + + function onCacheLoad(err, value) { + if (err) return callback(err); + // ...and return if it's there. + if (value !== undefined) { + return callback(null, value, hash); + } + + // Otherwise load from real data source... + loadAs.call(repo, type, hash, onLoad); + } + + function onLoad(err, value) { + if (value === undefined) return callback(err); + + // Store it on disk too... + // Force the hash to prevent mismatches. + cache.saveAs(type, value, onSave, hash); + + function onSave(err) { + if (err) return callback(err); + // Finally return the value to caller. + callback(null, value, hash); + } + } + } + + function saveAsCached(type, value, callback) { + saveAs.call(repo, type, value, onSave); + + function onSave(err, hash) { + if (err) return callback(err); + // Store in disk, forcing hash to match. + cache.saveAs(type, value, callback, hash); + } + } + + function createTreeCached(entries, callback) { + createTree.call(repo, entries, onTree); + + function onTree(err, hash, tree) { + if (err) return callback(err); + cache.saveAs("tree", tree, callback, hash); + } + } + +} diff --git a/mixins/client.js b/mixins/client.js deleted file mode 100644 index 0910bd6..0000000 --- a/mixins/client.js +++ /dev/null @@ -1,180 +0,0 @@ -var pushToPull = require('push-to-pull'); -var parse = pushToPull(require('../lib/pack-codec.js').decodePack); -var agent = require('../lib/agent.js'); - -module.exports = function (repo) { - repo.fetchPack = fetchPack; - repo.sendPack = sendPack; -}; - -function fetchPack(remote, opts, callback) { - if (!callback) return fetchPack.bind(this, remote, opts); - var repo = this; - var db = repo.db; - var refs, branch, queue, ref, hash; - return remote.discover(onDiscover); - - function onDiscover(err, serverRefs, serverCaps) { - if (err) return callback(err); - refs = serverRefs; - opts.caps = processCaps(opts, serverCaps); - return processWants(refs, opts.want, onWants); - } - - function onWants(err, wants) { - if (err) return callback(err); - opts.wants = wants; - return remote.fetch(repo, opts, onPackStream); - } - - function onPackStream(err, raw) { - if (err) return callback(err); - if (!raw) return remote.close(onDone); - var packStream = parse(raw); - return repo.unpack(packStream, opts, onUnpack); - } - - function onUnpack(err) { - if (err) return callback(err); - return remote.close(onClose); - } - - function onClose(err) { - if (err) return callback(err); - queue = Object.keys(refs); - return next(); - } - - function next(err) { - if (err) return callback(err); - ref = queue.shift(); - if (!ref) return repo.setHead(branch, onDone); - if (ref === "HEAD" || /{}$/.test(ref)) return next(); - hash = refs[ref]; - if (!branch && (hash === refs.HEAD)) branch = ref.substr(11); - db.has(hash, onHas); - } - - function onHas(err, has) { - if (err) return callback(err); - if (!has) return next(); - return db.set(ref, hash + "\n", next); - } - - function onDone(err) { - if (err) return callback(err); - return callback(null, refs); - } - - function processCaps(opts, serverCaps) { - var caps = []; - if (serverCaps["ofs-delta"]) caps.push("ofs-delta"); - if (serverCaps["thin-pack"]) caps.push("thin-pack"); - if (opts.includeTag && serverCaps["include-tag"]) caps.push("include-tag"); - if ((opts.onProgress || opts.onError) && - (serverCaps["side-band-64k"] || serverCaps["side-band"])) { - caps.push(serverCaps["side-band-64k"] ? "side-band-64k" : "side-band"); - if (!opts.onProgress && serverCaps["no-progress"]) { - caps.push("no-progress"); - } - } - if (serverCaps.agent) caps.push("agent=" + agent); - return caps; - } - - function processWants(refs, filter, callback) { - if (filter === null || filter === undefined) { - return defaultWants(refs, callback); - } - filter = Array.isArray(filter) ? arrayFilter(filter) : - typeof filter === "function" ? filter = filter : - wantFilter(filter); - - var list = Object.keys(refs); - var wants = {}; - var ref, hash; - return shift(); - function shift() { - ref = list.shift(); - if (!ref) return callback(null, Object.keys(wants)); - hash = refs[ref]; - repo.resolve(ref, onResolve); - } - function onResolve(err, oldHash) { - // Skip refs we already have - if (hash === oldHash) return shift(); - filter(ref, onFilter); - } - function onFilter(err, want) { - if (err) return callback(err); - // Skip refs the user doesn't want - if (want) wants[hash] = true; - return shift(); - } - } - - function defaultWants(refs, callback) { - return repo.listRefs("refs/heads", onRefs); - - function onRefs(err, branches) { - if (err) return callback(err); - var wants = Object.keys(branches); - wants.unshift("HEAD"); - return processWants(refs, wants, callback); - } - } - -} - -function wantMatch(ref, want) { - if (want === "HEAD" || want === null || want === undefined) { - return ref === "HEAD"; - } - if (Object.prototype.toString.call(want) === '[object RegExp]') { - return want.test(ref); - } - if (typeof want === "boolean") return want; - if (typeof want !== "string") { - throw new TypeError("Invalid want type: " + typeof want); - } - return (/^refs\//.test(ref) && ref === want) || - (ref === "refs/heads/" + want) || - (ref === "refs/tags/" + want); -} - -function wantFilter(want) { - return filter; - function filter(ref, callback) { - var result; - try { - result = wantMatch(ref, want); - } - catch (err) { - return callback(err); - } - return callback(null, result); - } -} - -function arrayFilter(want) { - var length = want.length; - return filter; - function filter(ref, callback) { - var result; - try { - for (var i = 0; i < length; ++i) { - result = wantMatch(ref, want[i]); - if (result) break; - } - } - catch (err) { - return callback(err); - } - return callback(null, result); - } -} - -function sendPack(remote, opts, callback) { - if (!callback) return sendPack.bind(this, remote, opts); - throw "TODO: Implement repo.sendPack"; -} diff --git a/mixins/clone.js b/mixins/clone.js deleted file mode 100644 index 53b6524..0000000 --- a/mixins/clone.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = function (repo) { - // TODO: Implement clone -}; \ No newline at end of file diff --git a/mixins/create-tree.js b/mixins/create-tree.js new file mode 100644 index 0000000..5137dae --- /dev/null +++ b/mixins/create-tree.js @@ -0,0 +1,148 @@ +"use strict"; + +var modes = require('../lib/modes.js'); + +module.exports = function (repo) { + repo.createTree = createTree; + + function createTree(entries, callback) { + if (!callback) return createTree.bind(null, entries); + callback = singleCall(callback); + if (!Array.isArray(entries)) { + entries = Object.keys(entries).map(function (path) { + var entry = entries[path]; + entry.path = path; + return entry; + }); + } + + // Tree paths that we need loaded + var toLoad = {}; + function markTree(path) { + while(true) { + if (toLoad[path]) return; + toLoad[path] = true; + trees[path] = { + add: [], + del: [], + tree: {} + }; + if (!path) break; + path = path.substring(0, path.lastIndexOf("/")); + } + } + + // Commands to run organized by tree path + var trees = {}; + + // Counter for parallel I/O operations + var left = 1; // One extra counter to protect again zalgo cache callbacks. + + // First pass, stubs out the trees structure, sorts adds from deletes, + // and saves any inline content blobs. + entries.forEach(function (entry) { + var index = entry.path.lastIndexOf("/"); + var parentPath = entry.path.substr(0, index); + var name = entry.path.substr(index + 1); + markTree(parentPath); + var tree = trees[parentPath]; + var adds = tree.add; + var dels = tree.del; + + if (!entry.mode) { + dels.push(name); + return; + } + var add = { + name: name, + mode: entry.mode, + hash: entry.hash + }; + adds.push(add); + if (entry.hash) return; + left++; + repo.saveAs("blob", entry.content, function (err, hash) { + if (err) return callback(err); + add.hash = hash; + check(); + }); + }); + + // Preload the base trees + if (entries.base) loadTree("", entries.base); + + // Check just in case there was no IO to perform + check(); + + function loadTree(path, hash) { + left++; + delete toLoad[path]; + repo.loadAs("tree", hash, function (err, tree) { + if (err) return callback(err); + trees[path].tree = tree; + Object.keys(tree).forEach(function (name) { + var childPath = path ? path + "/" + name : name; + if (toLoad[childPath]) loadTree(childPath, tree[name].hash); + }); + check(); + }); + } + + function check() { + if (--left) return; + findLeaves().forEach(processLeaf); + } + + function processLeaf(path) { + var entry = trees[path]; + delete trees[path]; + var tree = entry.tree; + entry.del.forEach(function (name) { + delete tree[name]; + }); + entry.add.forEach(function (item) { + tree[item.name] = { + mode: item.mode, + hash: item.hash + }; + }); + left++; + repo.saveAs("tree", tree, function (err, hash, tree) { + if (err) return callback(err); + if (!path) return callback(null, hash, tree); + var index = path.lastIndexOf("/"); + var parentPath = path.substring(0, index); + var name = path.substring(index + 1); + trees[parentPath].add.push({ + name: name, + mode: modes.tree, + hash: hash + }); + if (--left) return; + findLeaves().forEach(processLeaf); + }); + } + + function findLeaves() { + var paths = Object.keys(trees); + var parents = {}; + paths.forEach(function (path) { + if (!path) return; + var parent = path.substring(0, path.lastIndexOf("/")); + parents[parent] = true; + }); + return paths.filter(function (path) { + return !parents[path]; + }); + } + } +}; + +function singleCall(callback) { + var done = false; + return function () { + if (done) return console.warn("Discarding extra callback"); + done = true; + return callback.apply(this, arguments); + }; +} diff --git a/mixins/delay.js b/mixins/delay.js new file mode 100644 index 0000000..8291224 --- /dev/null +++ b/mixins/delay.js @@ -0,0 +1,51 @@ +"use strict"; + +module.exports = function (repo, ms) { + var saveAs = repo.saveAs; + var loadAs = repo.loadAs; + var readRef = repo.readRef; + var updateRef = repo.updateRef; + var createTree = repo.createTree; + + repo.saveAs = saveAsDelayed; + repo.loadAs = loadAsDelayed; + repo.readRef = readRefDelayed; + repo.updateRed = updateRefDelayed; + if (createTree) repo.createTree = createTreeDelayed; + + function saveAsDelayed(type, value, callback) { + if (!callback) return saveAsDelayed.bind(repo, type, value); + setTimeout(function () { + return saveAs.call(repo, type, value, callback); + }, ms); + } + + function loadAsDelayed(type, hash, callback) { + if (!callback) return loadAsDelayed.bind(repo, type, hash); + setTimeout(function () { + return loadAs.call(repo, type, hash, callback); + }, ms); + } + + function readRefDelayed(ref, callback) { + if (!callback) return readRefDelayed.bind(repo, ref); + setTimeout(function () { + return readRef.call(repo, ref, callback); + }, ms); + } + + function updateRefDelayed(ref, hash, callback) { + if (!callback) return updateRefDelayed.bind(repo, ref, hash); + setTimeout(function () { + return updateRef.call(repo, ref, hash, callback); + }, ms); + } + + function createTreeDelayed(entries, callback) { + if (!callback) return createTreeDelayed.bind(repo, entries); + setTimeout(function () { + return createTree.call(repo, entries, callback); + }, ms); + } + +}; diff --git a/mixins/fall-through.js b/mixins/fall-through.js new file mode 100644 index 0000000..3953499 --- /dev/null +++ b/mixins/fall-through.js @@ -0,0 +1,26 @@ +var modes = require('../lib/modes'); + +module.exports = function (local, remote) { + var loadAs = local.loadAs; + local.loadAs = newLoadAs; + function newLoadAs(type, hash, callback) { + if (!callback) return newLoadAs.bind(local. type, hash); + loadAs.call(local, type, hash, function (err, body) { + if (err) return callback(err); + if (body === undefined) return remote.loadAs(type, hash, callback); + callback(null, body); + }); + } + + var readRef = local.readRef; + local.readRef = newReadRef; + function newReadRef(ref, callback) { + if (!callback) return newReadRef.bind(local. ref); + readRef.call(local, ref, function (err, body) { + if (err) return callback(err); + if (body === undefined) return remote.readRef(ref, callback); + callback(null, body); + }); + } + +}; diff --git a/mixins/formats.js b/mixins/formats.js new file mode 100644 index 0000000..88ac21f --- /dev/null +++ b/mixins/formats.js @@ -0,0 +1,133 @@ +"use strict"; + +var bodec = require('bodec'); +var treeMap = require('../lib/object-codec').treeMap; + +module.exports = function (repo) { + var loadAs = repo.loadAs; + repo.loadAs = newLoadAs; + var saveAs = repo.saveAs; + repo.saveAs = newSaveAs; + + function newLoadAs(type, hash, callback) { + if (!callback) return newLoadAs.bind(repo, type, hash); + var realType = type === "text" ? "blob": + type === "array" ? "tree" : type; + return loadAs.call(repo, realType, hash, onLoad); + + function onLoad(err, body, hash) { + if (body === undefined) return callback(err); + if (type === "text") body = bodec.toUnicode(body); + if (type === "array") body = toArray(body); + return callback(err, body, hash); + } + } + + function newSaveAs(type, body, callback) { + if (!callback) return newSaveAs.bind(repo, type, body); + type = type === "text" ? "blob": + type === "array" ? "tree" : type; + if (type === "blob") { + if (typeof body === "string") { + body = bodec.fromUnicode(body); + } + } + else if (type === "tree") { + body = normalizeTree(body); + } + else if (type === "commit") { + body = normalizeCommit(body); + } + else if (type === "tag") { + body = normalizeTag(body); + } + return saveAs.call(repo, type, body, callback); + } + +}; + +function toArray(tree) { + return Object.keys(tree).map(treeMap, tree); +} + +function normalizeTree(body) { + var type = body && typeof body; + if (type !== "object") { + throw new TypeError("Tree body must be array or object"); + } + var tree = {}, i, l, entry; + // If array form is passed in, convert to object form. + if (Array.isArray(body)) { + for (i = 0, l = body.length; i < l; i++) { + entry = body[i]; + tree[entry.name] = { + mode: entry.mode, + hash: entry.hash + }; + } + } + else { + var names = Object.keys(body); + for (i = 0, l = names.length; i < l; i++) { + var name = names[i]; + entry = body[name]; + tree[name] = { + mode: entry.mode, + hash: entry.hash + }; + } + } + return tree; +} + +function normalizeCommit(body) { + if (!body || typeof body !== "object") { + throw new TypeError("Commit body must be an object"); + } + if (!(body.tree && body.author && body.message)) { + throw new TypeError("Tree, author, and message are required for commits"); + } + var parents = body.parents || (body.parent ? [ body.parent ] : []); + if (!Array.isArray(parents)) { + throw new TypeError("Parents must be an array"); + } + var author = normalizePerson(body.author); + var committer = body.committer ? normalizePerson(body.committer) : author; + return { + tree: body.tree, + parents: parents, + author: author, + committer: committer, + message: body.message + }; +} + +function normalizeTag(body) { + if (!body || typeof body !== "object") { + throw new TypeError("Tag body must be an object"); + } + if (!(body.object && body.type && body.tag && body.tagger && body.message)) { + throw new TypeError("Object, type, tag, tagger, and message required"); + } + return { + object: body.object, + type: body.type, + tag: body.tag, + tagger: normalizePerson(body.tagger), + message: body.message + }; +} + +function normalizePerson(person) { + if (!person || typeof person !== "object") { + throw new TypeError("Person must be an object"); + } + if (typeof person.name !== "string" || typeof person.email !== "string") { + throw new TypeError("Name and email are required for person fields"); + } + return { + name: person.name, + email: person.email, + date: person.date || new Date() + }; +} diff --git a/mixins/fs-db.js b/mixins/fs-db.js new file mode 100644 index 0000000..12e1cb0 --- /dev/null +++ b/mixins/fs-db.js @@ -0,0 +1,339 @@ +"use strict"; +var bodec = require('bodec'); +var inflate = require('../lib/inflate'); +var deflate = require('../lib/deflate'); +var codec = require('../lib/object-codec'); +var parsePackEntry = require('../lib/pack-codec').parseEntry; +var applyDelta = require('../lib/apply-delta'); +var sha1 = require('git-sha1'); +var pathJoin = require('path').join; + +// The fs object has the following interface: +// - readFile(path) => binary +// Must also call callback() with no arguments if the file does not exist. +// - readChunk(path, start, end) => binary +// Must also call callback() with no arguments if the file does not exist. +// - writeFile(path, binary) => +// Must also make every directory up to parent of path. +// - readDir(path) => array +// Must also call callback() with no arguments if the file does not exist. +// The repo is expected to have a rootPath property that points to +// the .git folder within the filesystem. +module.exports = function (repo, fs) { + + var cachedIndexes = {}; + + repo.loadAs = loadAs; + repo.saveAs = saveAs; + repo.loadRaw = loadRaw; + repo.saveRaw = saveRaw; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.hasHash = hasHash; + repo.init = init; + repo.setShallow = setShallow; + + function init(ref, callback) { + if (!callback) return init.bind(null, ref); + ref = ref || "refs/heads/master"; + var path = pathJoin(repo.rootPath, "HEAD"); + fs.writeFile(path, "ref: " + ref, callback); + } + + function setShallow(ref, callback) { + if (!callback) return setShallow.bind(null, ref); + var path = pathJoin(repo.rootPath, "shallow"); + fs.writeFile(path, ref, callback); + } + + function updateRef(ref, hash, callback) { + if (!callback) return updateRef.bind(repo, ref, hash); + var path = pathJoin(repo.rootPath, ref); + var lock = path + ".lock"; + fs.writeFile(lock, bodec.fromRaw(hash + "\n"), function(err) { + if(err) return callback(err); + fs.rename(lock, path, callback); + }); + } + + function readRef(ref, callback) { + if (!callback) return readRef.bind(repo, ref); + var path = pathJoin(repo.rootPath, ref); + fs.readFile(path, function (err, binary) { + if (err) return callback(err); + if (binary === undefined) { + return readPackedRef(ref, callback); + } + var hash; + try { hash = bodec.toRaw(binary).trim(); } + catch (err) { return callback(err); } + callback(null, hash); + }); + } + + function readPackedRef(ref, callback) { + var path = pathJoin(repo.rootPath, "packed-refs"); + fs.readFile(path, function (err, binary) { + if (binary === undefined) return callback(err); + var hash; + try { + var text = bodec.toRaw(binary); + var index = text.indexOf(ref); + if (index >= 0) { + hash = text.substring(index - 41, index - 1); + } + } + catch (err) { + return callback(err); + } + callback(null, hash); + }); + } + + function saveAs(type, body, callback) { + if (!callback) return saveAs.bind(repo, type, body); + var raw, hash; + try { + raw = codec.frame({ + type: type, + body: codec.encoders[type](body) + }); + hash = sha1(raw); + } + catch (err) { return callback(err); } + saveRaw(hash, raw, function (err) { + if (err) return callback(err); + callback(null, hash); + }); + } + + function saveRaw(hash, raw, callback) { + if (!callback) return saveRaw.bind(repo, hash, raw); + var buffer, path; + try { + if (sha1(raw) !== hash) { + throw new Error("Save data does not match hash"); + } + buffer = deflate(raw); + path = hashToPath(hash); + } + catch (err) { return callback(err); } + // Try to read the object first. + loadRaw(hash, function (err, data) { + // If it already exists, we're done + if (data) return callback(); + // Otherwise write a new file + var tmp = path.replace(/[0-9a-f]+$/, 'tmp_obj_' + Math.random().toString(36).substr(2)) + fs.writeFile(tmp, buffer, function(err) { + if(err) return callback(err); + fs.rename(tmp, path, callback); + }); + }); + } + + function loadAs(type, hash, callback) { + if (!callback) return loadAs.bind(repo, type, hash); + loadRaw(hash, function (err, raw) { + if (raw === undefined) return callback(err); + var body; + try { + raw = codec.deframe(raw); + if (raw.type !== type) throw new TypeError("Type mismatch"); + body = codec.decoders[raw.type](raw.body); + } + catch (err) { return callback(err); } + callback(null, body); + }); + } + + function hasHash(hash, callback) { + if (!callback) return hasHash.bind(repo, hash); + loadRaw(hash, function (err, body) { + if (err) return callback(err); + return callback(null, !!body); + }); + } + + function loadRaw(hash, callback) { + if (!callback) return loadRaw.bind(repo, hash); + var path = hashToPath(hash); + fs.readFile(path, function (err, buffer) { + if (err) return callback(err); + if (buffer) { + var raw; + try { raw = inflate(buffer); } + catch (err) { return callback(err); } + return callback(null, raw); + } + return loadRawPacked(hash, callback); + }); + } + + function loadRawPacked(hash, callback) { + var packDir = pathJoin(repo.rootPath, "objects/pack"); + var packHashes = []; + fs.readDir(packDir, function (err, entries) { + if (!entries) return callback(err); + entries.forEach(function (name) { + var match = name.match(/pack-([0-9a-f]{40}).idx/); + if (match) packHashes.push(match[1]); + }); + start(); + }); + + function start() { + var packHash = packHashes.pop(); + var offsets; + if (!packHash) return callback(); + if (!cachedIndexes[packHash]) loadIndex(packHash); + else onIndex(); + + function loadIndex() { + var indexFile = pathJoin(packDir, "pack-" + packHash + ".idx" ); + fs.readFile(indexFile, function (err, buffer) { + if (!buffer) return callback(err); + try { + cachedIndexes[packHash] = parseIndex(buffer); + } + catch (err) { return callback(err); } + onIndex(); + }); + } + + function onIndex() { + var cached = cachedIndexes[packHash]; + var packFile = pathJoin(packDir, "pack-" + packHash + ".pack" ); + var index = cached.byHash[hash]; + if (!index) return start(); + offsets = cached.offsets; + loadChunk(packFile, index.offset, callback); + } + + function loadChunk(packFile, start, callback) { + var index = offsets.indexOf(start); + if (index < 0) { + var error = new Error("Can't find chunk starting at " + start); + return callback(error); + } + var end = index + 1 < offsets.length ? offsets[index + 1] : -20; + fs.readChunk(packFile, start, end, function (err, chunk) { + if (!chunk) return callback(err); + var raw; + try { + var entry = parsePackEntry(chunk); + if (entry.type === "ref-delta") { + return loadRaw.call(repo, entry.ref, onBase); + } + else if (entry.type === "ofs-delta") { + return loadChunk(packFile, start - entry.ref, onBase); + } + raw = codec.frame(entry); + } + catch (err) { return callback(err); } + callback(null, raw); + + function onBase(err, base) { + if (!base) return callback(err); + var object = codec.deframe(base); + var buffer; + try { + object.body = applyDelta(entry.body, object.body); + buffer = codec.frame(object); + } + catch (err) { return callback(err); } + callback(null, buffer); + } + }); + } + + } + } + + function hashToPath(hash) { + return pathJoin(repo.rootPath, "objects", hash.substring(0, 2), hash.substring(2)); + } + +}; + +function parseIndex(buffer) { + if (readUint32(buffer, 0) !== 0xff744f63 || + readUint32(buffer, 4) !== 0x00000002) { + throw new Error("Only v2 pack indexes supported"); + } + + // Get the number of hashes in index + // This is the value of the last fan-out entry + var hashOffset = 8 + 255 * 4; + var length = readUint32(buffer, hashOffset); + hashOffset += 4; + var crcOffset = hashOffset + 20 * length; + var lengthOffset = crcOffset + 4 * length; + var largeOffset = lengthOffset + 4 * length; + var checkOffset = largeOffset; + var indexes = new Array(length); + for (var i = 0; i < length; i++) { + var start = hashOffset + i * 20; + var hash = bodec.toHex(bodec.slice(buffer, start, start + 20)); + var crc = readUint32(buffer, crcOffset + i * 4); + var offset = readUint32(buffer, lengthOffset + i * 4); + if (offset & 0x80000000) { + offset = largeOffset + (offset &0x7fffffff) * 8; + checkOffset = Math.max(checkOffset, offset + 8); + offset = readUint64(buffer, offset); + } + indexes[i] = { + hash: hash, + offset: offset, + crc: crc + }; + } + var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20)); + var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40)); + if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) { + throw new Error("Checksum mistmatch"); + } + + var byHash = {}; + indexes.sort(function (a, b) { + return a.offset - b.offset; + }); + indexes.forEach(function (data) { + byHash[data.hash] = { + offset: data.offset, + crc: data.crc, + }; + }); + var offsets = indexes.map(function (entry) { + return entry.offset; + }).sort(function (a, b) { + return a - b; + }); + + return { + offsets: offsets, + byHash: byHash, + checksum: packChecksum + }; +} + +function readUint32(buffer, offset) { + return (buffer[offset] << 24 | + buffer[offset + 1] << 16 | + buffer[offset + 2] << 8 | + buffer[offset + 3] << 0) >>> 0; +} + +// Yes this will lose precision over 2^53, but that can't be helped when +// returning a single integer. +// We simply won't support packfiles over 8 petabytes. I'm ok with that. +function readUint64(buffer, offset) { + var hi = (buffer[offset] << 24 | + buffer[offset + 1] << 16 | + buffer[offset + 2] << 8 | + buffer[offset + 3] << 0) >>> 0; + var lo = (buffer[offset + 4] << 24 | + buffer[offset + 5] << 16 | + buffer[offset + 6] << 8 | + buffer[offset + 7] << 0) >>> 0; + return hi * 0x100000000 + lo; +} diff --git a/mixins/indexed-db.js b/mixins/indexed-db.js new file mode 100644 index 0000000..19b43a5 --- /dev/null +++ b/mixins/indexed-db.js @@ -0,0 +1,147 @@ +"use strict"; +/*global indexedDB*/ + +var codec = require('../lib/object-codec.js'); +var sha1 = require('git-sha1'); +var modes = require('../lib/modes.js'); +var db; + +mixin.init = init; + +mixin.loadAs = loadAs; +mixin.saveAs = saveAs; +module.exports = mixin; + +function init(callback) { + + db = null; + var request = indexedDB.open("tedit", 1); + + // We can only create Object stores in a versionchange transaction. + request.onupgradeneeded = function(evt) { + var db = evt.target.result; + + if (evt.dataLoss && evt.dataLoss !== "none") { + return callback(new Error(evt.dataLoss + ": " + evt.dataLossMessage)); + } + + // A versionchange transaction is started automatically. + evt.target.transaction.onerror = onError; + + if(db.objectStoreNames.contains("objects")) { + db.deleteObjectStore("objects"); + } + if(db.objectStoreNames.contains("refs")) { + db.deleteObjectStore("refs"); + } + + db.createObjectStore("objects", {keyPath: "hash"}); + db.createObjectStore("refs", {keyPath: "path"}); + }; + + request.onsuccess = function (evt) { + db = evt.target.result; + callback(); + }; + request.onerror = onError; +} + + +function mixin(repo, prefix) { + if (!prefix) throw new Error("Prefix required"); + repo.refPrefix = prefix; + repo.saveAs = saveAs; + repo.loadAs = loadAs; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.hasHash = hasHash; +} + +function onError(evt) { + console.error("error", evt.target.error); +} + +function saveAs(type, body, callback, forcedHash) { + if (!callback) return saveAs.bind(this, type, body); + var hash; + try { + var buffer = codec.frame({type:type,body:body}); + hash = forcedHash || sha1(buffer); + } + catch (err) { return callback(err); } + var trans = db.transaction(["objects"], "readwrite"); + var store = trans.objectStore("objects"); + var entry = { hash: hash, type: type, body: body }; + var request = store.put(entry); + request.onsuccess = function() { + // console.warn("SAVE", type, hash); + callback(null, hash, body); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} + +function loadAs(type, hash, callback) { + if (!callback) return loadAs.bind(this, type, hash); + loadRaw(hash, function (err, entry) { + if (!entry) return callback(err); + if (type !== entry.type) { + return callback(new TypeError("Type mismatch")); + } + callback(null, entry.body, hash); + }); +} + +function loadRaw(hash, callback) { + var trans = db.transaction(["objects"], "readwrite"); + var store = trans.objectStore("objects"); + var request = store.get(hash); + request.onsuccess = function(evt) { + var entry = evt.target.result; + if (!entry) return callback(); + return callback(null, entry); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} + +function hasHash(hash, callback) { + if (!callback) return hasHash.bind(this, hash); + loadRaw(hash, function (err, body) { + if (err) return callback(err); + return callback(null, !!body); + }); +} + +function readRef(ref, callback) { + if (!callback) return readRef.bind(this, ref); + var key = this.refPrefix + "/" + ref; + var trans = db.transaction(["refs"], "readwrite"); + var store = trans.objectStore("refs"); + var request = store.get(key); + request.onsuccess = function(evt) { + var entry = evt.target.result; + if (!entry) return callback(); + callback(null, entry.hash); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} + +function updateRef(ref, hash, callback) { + if (!callback) return updateRef.bind(this, ref, hash); + var key = this.refPrefix + "/" + ref; + var trans = db.transaction(["refs"], "readwrite"); + var store = trans.objectStore("refs"); + var entry = { path: key, hash: hash }; + var request = store.put(entry); + request.onsuccess = function() { + callback(); + }; + request.onerror = function(evt) { + callback(new Error(evt.value)); + }; +} diff --git a/mixins/mem-cache.js b/mixins/mem-cache.js new file mode 100644 index 0000000..0434481 --- /dev/null +++ b/mixins/mem-cache.js @@ -0,0 +1,53 @@ +"use strict"; + +var encoders = require('../lib/object-codec').encoders; +var decoders = require('../lib/object-codec').decoders; +var Binary = require('bodec').Binary; + +var cache = memCache.cache = {}; +module.exports = memCache; + +function memCache(repo) { + var loadAs = repo.loadAs; + repo.loadAs = loadAsCached; + function loadAsCached(type, hash, callback) { + if (!callback) return loadAsCached.bind(this, type, hash); + if (hash in cache) return callback(null, dupe(type, cache[hash]), hash); + loadAs.call(repo, type, hash, function (err, value) { + if (value === undefined) return callback(err); + if (type !== "blob" || value.length < 100) { + cache[hash] = dupe(type, value); + } + return callback.apply(this, arguments); + }); + } + + var saveAs = repo.saveAs; + repo.saveAs = saveAsCached; + function saveAsCached(type, value, callback) { + if (!callback) return saveAsCached.bind(this, type, value); + value = dupe(type, value); + saveAs.call(repo, type, value, function (err, hash) { + if (err) return callback(err); + if (type !== "blob" || value.length < 100) { + cache[hash] = value; + } + return callback(null, hash, value); + }); + } +} +function dupe(type, value) { + if (type === "blob") { + if (type.length >= 100) return value; + return new Binary(value); + } + return decoders[type](encoders[type](value)); +} + +function deepFreeze(obj) { + Object.freeze(obj); + Object.keys(obj).forEach(function (key) { + var value = obj[key]; + if (typeof value === "object") deepFreeze(value); + }); +} diff --git a/mixins/mem-db.js b/mixins/mem-db.js new file mode 100644 index 0000000..fd9a33d --- /dev/null +++ b/mixins/mem-db.js @@ -0,0 +1,95 @@ +"use strict"; + +var defer = require('../lib/defer.js'); +var codec = require('../lib/object-codec.js'); +var sha1 = require('git-sha1'); + +module.exports = mixin; +var isHash = /^[0-9a-f]{40}$/; + +function mixin(repo) { + var objects = {}; + var refs = {}; + + repo.saveAs = saveAs; + repo.loadAs = loadAs; + repo.saveRaw = saveRaw; + repo.loadRaw = loadRaw; + repo.hasHash = hasHash; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.listRefs = listRefs; + + function readRef(ref, callback) { + return makeAsync(function () { + return refs[ref]; + }, callback); + } + + function listRefs(prefix, callback) { + return makeAsync(function () { + var regex = prefix && new RegExp("^" + prefix + "[/$]"); + var out = {}; + Object.keys(refs).forEach(function (name) { + if (regex && !regex.test(name)) return; + out[name] = refs[name]; + }); + return out; + }, callback); + } + + function updateRef(ref, hash, callback) { + return makeAsync(function () { + return (refs[ref] = hash); + }, callback); + } + + function hasHash(hash, callback) { + return makeAsync(function () { + if (!isHash.test(hash)) hash = refs[hash]; + return hash in objects; + }, callback); + } + + function saveAs(type, body, callback) { + return makeAsync(function () { + var buffer = codec.frame({type:type,body:body}); + var hash = sha1(buffer); + objects[hash] = buffer; + return hash; + }, callback); + } + + function saveRaw(hash, buffer, callback) { + return makeAsync(function () { + objects[hash] = buffer; + }, callback); + } + + function loadAs(type, hash, callback) { + return makeAsync(function () { + if (!isHash.test(hash)) hash = refs[hash]; + var buffer = objects[hash]; + if (!buffer) return []; + var obj = codec.deframe(buffer, true); + if (obj.type !== type) throw new TypeError("Type mismatch"); + return obj.body; + }, callback); + } + + function loadRaw(hash, callback) { + return makeAsync(function () { + return objects[hash]; + }, callback); + } +} + +function makeAsync(fn, callback) { + if (!callback) return makeAsync.bind(null, fn); + defer(function () { + var out; + try { out = fn(); } + catch (err) { return callback(err); } + callback(null, out); + }); +} diff --git a/mixins/objects.js b/mixins/objects.js deleted file mode 100644 index e2c545f..0000000 --- a/mixins/objects.js +++ /dev/null @@ -1,126 +0,0 @@ -var sha1 = require('../lib/sha1.js'); -var frame = require('../lib/frame.js'); -var deframe = require('../lib/deframe.js'); -var encoders = require('../lib/encoders.js'); -var decoders = require('../lib/decoders.js'); -var parseAscii = require('../lib/parseascii.js'); -var isHash = require('../lib/ishash.js'); - -// Add "objects" capabilities to a repo using db as storage. -module.exports = function (repo) { - - // Add Object store capability to the system - repo.load = load; // (hash-ish) -> object - repo.save = save; // (object) -> hash - repo.loadRaw = loadRaw; // (hash) -> buffer - repo.saveRaw = saveRaw; // (hash, buffer) - repo.has = has; // (hash) -> true or false - repo.loadAs = loadAs; // (type, hash-ish) -> value - repo.saveAs = saveAs; // (type, value) -> hash - repo.remove = remove; // (hash) - - // This is a fallback resolve in case there is no refs system installed. - if (!repo.resolve) repo.resolve = function (hash, callback) { - if (isHash(hash)) return callback(null, hash); - return callback(new Error("This repo only supports direct hashes")); - }; - -}; - -function load(hashish, callback) { - if (!callback) return load.bind(this, hashish); - var hash; - var repo = this; - var db = repo.db; - return repo.resolve(hashish, onHash); - - function onHash(err, result) { - if (result === undefined) return callback(err); - hash = result; - return db.get(hash, onBuffer); - } - - function onBuffer(err, buffer) { - if (buffer === undefined) return callback(err); - var type, object; - try { - if (sha1(buffer) !== hash) { - throw new Error("Hash checksum failed for " + hash); - } - var pair = deframe(buffer); - type = pair[0]; - buffer = pair[1]; - object = { - type: type, - body: decoders[type](buffer) - }; - } catch (err) { - if (err) return callback(err); - } - return callback(null, object, hash); - } -} - -function loadRaw(hash, callback) { - return this.db.get(hash, callback); -} - -function saveRaw(hash, buffer, callback) { - return this.db.set(hash, buffer, callback); -} - -function has(hash, callback) { - return this.db.has(hash, callback); -} - -function save(object, callback) { - if (!callback) return save.bind(this, object); - var buffer, hash; - var repo = this; - var db = repo.db; - try { - buffer = encoders[object.type](object.body); - buffer = frame(object.type, buffer); - hash = sha1(buffer); - } - catch (err) { - return callback(err); - } - return db.set(hash, buffer, onSave); - - function onSave(err) { - if (err) return callback(err); - return callback(null, hash); - } -} - -function remove(hash, callback) { - if (!callback) return remove.bind(this, hash); - if (!isHash(hash)) return callback(new Error("Invalid hash: " + hash)); - var repo = this; - var db = repo.db; - return db.del(hash, callback); -} - -function loadAs(type, hashish, callback) { - if (!callback) return loadAs.bind(this, type, hashish); - return this.load(hashish, onObject); - - function onObject(err, object, hash) { - if (object === undefined) return callback(err); - if (type === "text") { - type = "blob"; - object.body = parseAscii(object.body, 0, object.body.length); - } - if (object.type !== type) { - return new Error("Expected " + type + ", but found " + object.type); - } - return callback(null, object.body, hash); - } -} - -function saveAs(type, body, callback) { - if (!callback) return saveAs.bind(this, type, body); - if (type === "text") type = "blob"; - return this.save({ type: type, body: body }, callback); -} diff --git a/mixins/packops.js b/mixins/pack-ops.js similarity index 70% rename from mixins/packops.js rename to mixins/pack-ops.js index e49b1c3..dece5ac 100644 --- a/mixins/packops.js +++ b/mixins/pack-ops.js @@ -1,27 +1,28 @@ -var bops = require('bops'); -var deframe = require('../lib/deframe.js'); -var frame = require('../lib/frame.js'); -var sha1 = require('../lib/sha1.js'); +"use strict"; + +var sha1 = require('git-sha1'); var applyDelta = require('../lib/apply-delta.js'); -var pushToPull = require('push-to-pull'); +var codec = require('../lib/object-codec.js'); var decodePack = require('../lib/pack-codec.js').decodePack; -var packFrame = require('../lib/pack-codec.js').packFrame; +var encodePack = require('../lib/pack-codec.js').encodePack; +var makeChannel = require('culvert'); module.exports = function (repo) { - // packStream is a simple-stream containing raw packfile binary data + // packChannel is a writable culvert channel {put,drain} containing raw packfile binary data // opts can contain "onProgress" or "onError" hook functions. // callback will be called with a list of all unpacked hashes on success. - repo.unpack = unpack; // (packStream, opts) -> hashes + repo.unpack = unpack; // (packChannel, opts) => hashes // hashes is an array of hashes to pack - // callback will be a simple-stream containing raw packfile binary data - repo.pack = pack; // (hashes, opts) -> packStream + // packChannel will be a readable culvert channel {take} containing raw packfile binary data + repo.pack = pack; // (hashes, opts) => packChannel }; -function unpack(packStream, opts, callback) { - if (!callback) return unpack.bind(this, packStream, opts); +function unpack(packChannel, opts, callback) { + /*jshint validthis:true*/ + if (!callback) return unpack.bind(this, packChannel, opts); - packStream = pushToPull(decodePack)(packStream); + packChannel = applyParser(packChannel, decodePack, callback); var repo = this; @@ -35,7 +36,7 @@ function unpack(packStream, opts, callback) { // key is hash we're waiting for, value is array of items that are waiting. var pending = {}; - return packStream.read(onStats); + return packChannel.take(onStats); function onDone(err) { if (done) return; @@ -48,7 +49,7 @@ function unpack(packStream, opts, callback) { if (err) return onDone(err); version = stats.version; num = stats.num; - packStream.read(onRead); + packChannel.take(onRead); } function objectProgress(more) { @@ -87,9 +88,9 @@ function unpack(packStream, opts, callback) { return repo.loadRaw(item.ref, function (err, buffer) { if (err) return onDone(err); if (!buffer) return onDone(new Error("Missing base image at " + item.ref)); - var target = deframe(buffer); - item.type = target[0]; - item.body = applyDelta(item.body, target[1]); + var target = codec.deframe(buffer); + item.type = target.type; + item.body = applyDelta(item.body, target.body); return saveValue(item); }); } @@ -98,7 +99,7 @@ function unpack(packStream, opts, callback) { var hasTarget = has[item.ref]; if (hasTarget === true) return resolveDelta(item); if (hasTarget === false) return enqueueDelta(item); - return repo.has(item.ref, function (err, value) { + return repo.hasHash(item.ref, function (err, value) { if (err) return onDone(err); has[item.ref] = value; if (value) return resolveDelta(item); @@ -107,7 +108,7 @@ function unpack(packStream, opts, callback) { } function saveValue(item) { - var buffer = frame(item.type, item.body); + var buffer = codec.frame(item); var hash = sha1(buffer); hashes[item.offset] = hash; has[hash] = true; @@ -125,61 +126,44 @@ function unpack(packStream, opts, callback) { function onSave(err) { if (err) return callback(err); - packStream.read(onRead); + packChannel.take(onRead); } function enqueueDelta(item) { var list = pending[item.ref]; if (!list) pending[item.ref] = [item]; else list.push(item); - packStream.read(onRead); + packChannel.take(onRead); } } // TODO: Implement delta refs to reduce stream size function pack(hashes, opts, callback) { + /*jshint validthis:true*/ if (!callback) return pack.bind(this, hashes, opts); var repo = this; - var sha1sum = sha1(); var i = 0, first = true, done = false; - return callback(null, { read: read, abort: callback }); + return callback(null, applyParser({ take: take }, encodePack)); - function read(callback) { + function take(callback) { if (done) return callback(); if (first) return readFirst(callback); var hash = hashes[i++]; if (hash === undefined) { - var sum = sha1sum.digest(); - done = true; - return callback(null, bops.from(sum, "hex")); + return callback(); } repo.loadRaw(hash, function (err, buffer) { if (err) return callback(err); if (!buffer) return callback(new Error("Missing hash: " + hash)); // Reframe with pack format header - var pair = deframe(buffer); - packFrame(pair[0], pair[1], function (err, buffer) { - if (err) return callback(err); - sha1sum.update(buffer); - callback(null, buffer); - }); + callback(null, codec.deframe(buffer)); }); } function readFirst(callback) { - var length = hashes.length; - var chunk = bops.create([ - 0x50, 0x41, 0x43, 0x4b, // PACK - 0, 0, 0, 2, // version 2 - length >> 24, // Num of objects - (length >> 16) & 0xff, - (length >> 8) & 0xff, - length & 0xff - ]); first = false; - sha1sum.update(chunk); - callback(null, chunk); + callback(null, {num: hashes.length}); } } @@ -193,3 +177,25 @@ function values(object) { return out; } + +function applyParser(stream, parser, onError) { + var extra = makeChannel(); + extra.put = parser(extra.put); + stream.take(onData); + + function onData(err, item) { + if (err) return onError(err); + var more; + try { more = extra.put(item); } + catch (err) { return onError(err); } + if (more) stream.take(onData); + else extra.drain(onDrain); + } + + function onDrain(err) { + if (err) return onError(err); + stream.take(onData); + } + + return { take: extra.take }; +} diff --git a/mixins/path-to-entry.js b/mixins/path-to-entry.js new file mode 100644 index 0000000..3615b7a --- /dev/null +++ b/mixins/path-to-entry.js @@ -0,0 +1,51 @@ +var cache = require('./mem-cache').cache; +var modes = require('../lib/modes'); + +module.exports = function (repo) { + repo.pathToEntry = pathToEntry; +}; + +function pathToEntry(rootTree, path, callback) { + if (!callback) return pathToEntry.bind(this, rootTree, path); + var repo = this; + var mode = modes.tree; + var hash = rootTree; + var parts = path.split("/").filter(Boolean); + var index = 0; + var cached; + loop(); + function loop() { + while (index < parts.length) { + if (mode === modes.tree) { + cached = cache[hash]; + if (!cached) return repo.loadAs("tree", hash, onLoad); + var entry = cached[parts[index]]; + if (!entry) return callback(); + mode = entry.mode; + hash = entry.hash; + index++; + continue; + } + if (modes.isFile(mode)) return callback(); + return callback(null, { + last: { + mode: mode, + hash: hash, + path: parts.slice(0, index).join("/"), + rest: parts.slice(index).join("/"), + } + }); + } + callback(null, { + mode: mode, + hash: hash + }); + } + + function onLoad(err, value) { + if (!value) return callback(err || new Error("Missing object: " + hash)); + cache[hash] = value; + loop(); + } + +} diff --git a/mixins/read-combiner.js b/mixins/read-combiner.js new file mode 100644 index 0000000..39f128d --- /dev/null +++ b/mixins/read-combiner.js @@ -0,0 +1,28 @@ +"use strict"; + +// This replaces loadAs with a version that batches concurrent requests for +// the same hash. +module.exports = function (repo) { + var pendingReqs = {}; + + var loadAs = repo.loadAs; + repo.loadAs = newLoadAs; + + function newLoadAs(type, hash, callback) { + if (!callback) return newLoadAs.bind(null, type, hash); + var list = pendingReqs[hash]; + if (list) { + if (list.type !== type) callback(new Error("Type mismatch")); + else list.push(callback); + return; + } + list = pendingReqs[hash] = [callback]; + list.type = type; + loadAs.call(repo, type, hash, function () { + delete pendingReqs[hash]; + for (var i = 0, l = list.length; i < l; i++) { + list[i].apply(this, arguments); + } + }); + } +}; diff --git a/mixins/refs.js b/mixins/refs.js deleted file mode 100644 index af8cdde..0000000 --- a/mixins/refs.js +++ /dev/null @@ -1,153 +0,0 @@ -var isHash = require('../lib/ishash.js'); - -module.exports = function (repo) { - // Refs - repo.resolve = resolve; // (hash-ish) -> hash - repo.updateHead = updateHead; // (hash) - repo.getHead = getHead; // () -> ref - repo.setHead = setHead; // (ref) - repo.readRef = readRef; // (ref) -> hash - repo.createRef = createRef; // (ref, hash) - repo.updateRef = updateRef; // (ref, hash) - repo.deleteRef = deleteRef; // (ref) - repo.listRefs = listRefs; // (prefix) -> refs -}; - -function resolve(hashish, callback) { - if (!callback) return resolve.bind(this, hashish); - hashish = hashish.trim(); - var repo = this, db = repo.db; - if (isHash(hashish)) return callback(null, hashish); - if (hashish === "HEAD") return repo.getHead(onBranch); - if ((/^refs\//).test(hashish)) { - return db.get(hashish, checkBranch); - } - return checkBranch(); - - function onBranch(err, ref) { - if (err) return callback(err); - if (!ref) return callback(); - return repo.resolve(ref, callback); - } - - function checkBranch(err, hash) { - if (err && err.code !== "ENOENT") return callback(err); - if (hash) { - return repo.resolve(hash, callback); - } - return db.get("refs/heads/" + hashish, checkTag); - } - - function checkTag(err, hash) { - if (err && err.code !== "ENOENT") return callback(err); - if (hash) { - return repo.resolve(hash, callback); - } - return db.get("refs/tags/" + hashish, final); - } - - function final(err, hash) { - if (err) return callback(err); - if (hash) { - return repo.resolve(hash, callback); - } - err = new Error("ENOENT: Cannot find " + hashish); - err.code = "ENOENT"; - return callback(err); - } -} - -function updateHead(hash, callback) { - if (!callback) return updateHead.bind(this, hash); - var ref; - var repo = this, db = repo.db; - return this.getHead(onBranch); - - function onBranch(err, result) { - if (err) return callback(err); - if (result === undefined) { - return repo.setHead("master", function (err) { - if (err) return callback(err); - onBranch(err, "refs/heads/master"); - }); - } - ref = result; - return db.set(ref, hash + "\n", callback); - } -} - -function getHead(callback) { - if (!callback) return getHead.bind(this); - var repo = this, db = repo.db; - return db.get("HEAD", onRead); - - function onRead(err, ref) { - if (err) return callback(err); - if (!ref) return callback(); - var match = ref.match(/^ref: *(.*)/); - if (!match) return callback(new Error("Invalid HEAD")); - return callback(null, match[1]); - } -} - -function setHead(branchName, callback) { - if (!callback) return setHead.bind(this, branchName); - var ref = "refs/heads/" + branchName; - return this.db.set("HEAD", "ref: " + ref + "\n", callback); -} - -function readRef(ref, callback) { - if (!callback) return readRef.bind(this, ref); - return this.db.get(ref, function (err, result) { - if (err) return callback(err); - if (!result) return callback(); - return callback(null, result.trim()); - }); -} - -function createRef(ref, hash, callback) { - if (!callback) return createRef.bind(this, ref, hash); - // TODO: should we check to make sure it doesn't exist first? - return this.db.set(ref, hash + "\n", callback); -} - -function updateRef(ref, hash, callback) { - if (!callback) return updateRef.bind(this, ref, hash); - // TODO: should we check to make sure it does exist first? - return this.db.set(ref, hash + "\n", callback); -} - -function deleteRef(ref, callback) { - if (!callback) return deleteRef.bind(this, ref); - return this.db.del(ref, callback); -} - -function listRefs(prefix, callback) { - if (!callback) return listRefs.bind(this, prefix); - if (!prefix) prefix = "refs\/"; - else if (!/^refs\//.test(prefix)) { - return callback(new TypeError("Invalid prefix: " + prefix)); - } - var db = this.db; - var refs = {}; - return db.keys(prefix, onKeys); - - function onKeys(err, keys) { - if (err) return callback(err); - var left = keys.length, done = false; - if (!left) return callback(null, refs); - keys.forEach(function (key) { - db.get(key, function (err, value) { - if (done) return; - if (err) { - done = true; - return callback(err); - } - refs[key] = value.trim(); - if (--left) return; - done = true; - callback(null, refs); - }); - }); - } -} diff --git a/mixins/server.js b/mixins/server.js deleted file mode 100644 index 288fbab..0000000 --- a/mixins/server.js +++ /dev/null @@ -1,289 +0,0 @@ -var parallel = require('../lib/parallel.js'); -var map = require('../lib/map.js'); -var each = require('../lib/each.js'); - -var bops = { - join: require('bops/join.js') -}; - -module.exports = function (repo) { - repo.uploadPack = uploadPack; - repo.receivePack = receivePack; -}; - -function uploadPack(remote, opts, callback) { - if (!callback) return uploadPack.bind(this, remote, opts); - var repo = this, refs, wants = {}, haves = {}, clientCaps = {}; - var packQueue = []; - var queueBytes = 0; - var queueLimit = 0; - return parallel({ - head: repo.getHead(), - refs: getRefs() - }, onHeadRef); - - // The peeled value of a ref (that is "ref^{}") MUST be immediately after - // the ref itself, if presented. A conforming server MUST peel the ref if - // it’s an annotated tag. - function getRefs(callback) { - if (!callback) return getRefs; - var refs; - repo.listRefs(null, onRefs); - - function onRefs(err, result) { - if (err) return callback(err); - refs = result; - parallel(map(refs, function (hash) { - return repo.load(hash); - }), onValues); - } - - function onValues(err, values) { - each(values, function (value, name) { - if (value.type !== "tag") return; - refs[name + "^{}"] = value.body.object; - }); - callback(null, refs); - } - } - - function onHeadRef(err, result) { - if (err) return callback(err); - var head = result.head; - refs = result.refs; - - // The returned response is a pkt-line stream describing each ref and its - // current value. The stream MUST be sorted by name according to the C - // locale ordering. - var keys = Object.keys(refs).sort(); - var lines = keys.map(function (ref) { - return refs[ref] + " " + ref; - }); - - // If HEAD is a valid ref, HEAD MUST appear as the first advertised ref. - // If HEAD is not a valid ref, HEAD MUST NOT appear in the advertisement - // list at all, but other refs may still appear. - if (head) lines.unshift(refs[head] + " HEAD"); - - // The stream MUST include capability declarations behind a NUL on the - // first ref. - // TODO: add "multi_ack" once it's implemented - // TODO: add "multi_ack_detailed" once it's implemented - // TODO: add "shallow" once it's implemented - // TODO: add "include-tag" once it's implemented - // TODO: add "thin-pack" once it's implemented - lines[0] += "\0no-progress side-band side-band-64k ofs-delta"; - - // Server SHOULD terminate each non-flush line using LF ("\n") terminator; - // client MUST NOT complain if there is no terminator. - lines.forEach(function (line) { - remote.write(line, null); - }); - - remote.write(null, null); - remote.read(onWant); - } - - function onWant(err, line) { - if (line === undefined) return callback(err); - if (line === null) { - return remote.read(onHave); - } - var match = line.match(/^want ([0-9a-f]{40})(?: (.+))?\n?$/); - if (!match) { - return callback(new Error("Invalid want: " + line)); - } - var hash = match[1]; - if (match[2]) clientCaps = parseCaps(match[2]); - wants[hash] = true; - remote.read(onWant); - } - - function onHave(err, line) { - if (line === undefined) return callback(err); - var match = line.match(/^(done|have)(?: ([0-9a-f]{40}))?\n?$/); - if (!match) { - return callback(new Error("Unexpected have line: " + line)); - } - if (match[1] === "have") { - haves[match[2]] = true; - return remote.read(onHave); - } - if (Object.keys(haves).length) { - throw new Error("TODO: handle haves"); - } - remote.write("NAK\n", null); - walkRepo(repo, wants, haves, onHashes); - } - - function onHashes(err, hashes) { - if (err) return callback(err); - if (clientCaps["side-band-64k"]) queueLimit = 65519; - else if (clientCaps["size-band"]) queueLimit = 999; - repo.pack(hashes, opts, onPack); - } - - function flush(callback) { - if (!queueBytes) return callback(); - var chunk = bops.join(packQueue, queueBytes); - packQueue.length = 0; - queueBytes = 0; - remote.write(["pack", chunk], callback); - } - - function onPack(err, packStream) { - if (err) return callback(err); - onWrite(); - - function onRead(err, chunk) { - if (err) return callback(err); - if (chunk === undefined) return flush(onFlush); - if (!queueLimit) { - return remote.write(chunk, onWrite); - } - var length = chunk.length; - if (queueBytes + length <= queueLimit) { - packQueue.push(chunk); - queueBytes += length; - return onWrite(); - } - if (queueBytes) { - flush(function (err) { - if (err) return callback(err); - return onRead(null, chunk); - }); - } - remote.write(["pack", bops.subarray(chunk, 0, queueLimit)], function (err) { - if (err) return callback(err); - return onRead(null, bops.subarray(chunk, queueLimit)); - }); - } - function onWrite(err) { - if (err) return callback(err); - packStream.read(onRead); - } - } - - function onFlush(err) { - if (err) return callback(err); - if (queueLimit) remote.write(null, callback); - else callback(); - } - -} - -function receivePack(remote, opts, callback) { - if (!callback) return receivePack.bind(this, remote, opts); - var clientCaps = {}, changes = []; - var repo = this; - this.listRefs(null, function (err, refs) { - if (err) return callback(err); - Object.keys(refs).forEach(function (ref, i) { - var hash = refs[ref]; - var line = hash + " " + ref; - // TODO: Implement report-status below and add here - if (!i) line += "\0delete-refs ofs-delta"; - remote.write(line, null); - }); - remote.write(null, null); - remote.read(onLine); - }); - - function onLine(err, line) { - if (err) return callback(err); - if (line === null) { - if (changes.length) return repo.unpack(remote, opts, onUnpack); - return callback(null, changes); - } - var match = line.match(/^([0-9a-f]{40}) ([0-9a-f]{40}) ([^ ]+)(?: (.+))?\n?$/); - changes.push({ - oldHash: match[1], - newHash: match[2], - ref: match[3] - }); - if (match[4]) clientCaps = parseCaps(match[4]); - remote.read(onLine); - } - - function onUnpack(err) { - if (err) return callback(err); - var i = 0, change; - next(); - function next(err) { - if (err) return callback(err); - change = changes[i++]; - if (!change) return callback(err, changes); - if (change.oldHash === "0000000000000000000000000000000000000000") { - return repo.createRef(change.ref, change.newHash, next); - } - if (change.newHash === "0000000000000000000000000000000000000000") { - return repo.deleteRef(change.ref, next); - } - return repo.updateRef(change.ref, change.newHash, next); - } - } -} - -function parseCaps(line) { - var caps = {}; - line.split(" ").map(function (cap) { - var pair = cap.split("="); - caps[pair[0]] = pair[1] || true; - }); - return caps; -} - -// Calculate a list of hashes to be included in a pack file based on have and want lists. -// -function walkRepo(repo, wants, haves, callback) { - var hashes = {}; - var done = false; - var left = 0; - - function onDone(err) { - if (done) return; - done = true; - return callback(err, Object.keys(hashes)); - } - - var keys = Object.keys(wants); - if (!keys.length) return onDone(); - keys.forEach(walkCommit); - - function walkCommit(hash) { - if (done) return; - if (hash in hashes || hash in haves) return; - hashes[hash] = true; - left++; - repo.loadAs("commit", hash, function (err, commit) { - if (done) return; - if (err) return onDone(err); - if (!commit) return onDone(new Error("Missing Commit: " + hash)); - commit.parents.forEach(walkCommit); - walkTree(commit.tree); - if (!--left) return onDone(); - }); - } - - function walkTree(hash) { - if (done) return; - if (hash in hashes || hash in haves) return; - hashes[hash] = true; - left++; - repo.loadAs("tree", hash, function (err, tree) { - if (done) return; - if (err) return onDone(err); - if (tree === undefined) return onDone(new Error("Missing tree: " + hash)); - Object.keys(tree).forEach(function (name) { - if (done) return; - var item = tree[name]; - if (item.mode === 040000) walkTree(item.hash); - else { - if (item.hash in hashes || item.hash in haves) return; - hashes[item.hash] = true; - } - }); - if (!--left) return onDone(); - }); - } -} diff --git a/mixins/sync.js b/mixins/sync.js new file mode 100644 index 0000000..6222c11 --- /dev/null +++ b/mixins/sync.js @@ -0,0 +1,147 @@ +"use strict"; + +var modes = require('../lib/modes'); + +module.exports = function (local, remote) { + local.fetch = fetch; + local.send = send; + local.readRemoteRef = remote.readRef.bind(remote); + local.updateRemoteRef = remote.updateRef.bind(remote); + + function fetch(ref, depth, callback) { + if (!callback) return fetch.bind(local, ref, depth); + sync(local, remote, ref, depth, callback); + } + + function send(ref, callback) { + if (!callback) return send.bind(local, ref); + sync(remote, local, ref, Infinity, callback); + } +}; + +// Download remote ref with depth +// Make sure to use Infinity for depth on github mounts or anything that +// doesn't allow shallow clones. +function sync(local, remote, ref, depth, callback) { + if (typeof ref !== "string") throw new TypeError("ref must be string"); + if (typeof depth !== "number") throw new TypeError("depth must be number"); + + var hasCache = {}; + + remote.readRef(ref, function (err, hash) { + if (!hash) return callback(err); + importCommit(hash, depth, function (err) { + if (err) return callback(err); + callback(null, hash); + }); + }); + + // Caching has check. + function check(type, hash, callback) { + if (typeof type !== "string") throw new TypeError("type must be string"); + if (typeof hash !== "string") throw new TypeError("hash must be string"); + if (hasCache[hash]) return callback(null, true); + local.hasHash(hash, function (err, has) { + if (err) return callback(err); + hasCache[hash] = has; + callback(null, has); + }); + } + + function importCommit(hash, depth, callback) { + check("commit", hash, onCheck); + + function onCheck(err, has) { + if (err || has) return callback(err); + remote.loadAs("commit", hash, onLoad); + } + + function onLoad(err, commit) { + if (!commit) return callback(err || new Error("Missing commit " + hash)); + var i = 0; + importTree(commit.tree, onImport); + + function onImport(err) { + if (err) return callback(err); + if (i >= commit.parents.length || depth <= 1) { + return local.saveAs("commit", commit, onSave); + } + importCommit(commit.parents[i++], depth - 1, onImport); + } + } + + function onSave(err, newHash) { + if (err) return callback(err); + if (newHash !== hash) { + return callback(new Error("Commit hash mismatch " + hash + " != " + newHash)); + } + hasCache[hash] = true; + callback(); + } + } + + function importTree(hash, callback) { + check("tree", hash, onCheck); + + function onCheck(err, has) { + if (err || has) return callback(err); + remote.loadAs("tree", hash, onLoad); + } + + function onLoad(err, tree) { + if (!tree) return callback(err || new Error("Missing tree " + hash)); + var i = 0; + var names = Object.keys(tree); + onImport(); + + function onImport(err) { + if (err) return callback(err); + if (i >= names.length) { + return local.saveAs("tree", tree, onSave); + } + var name = names[i++]; + var entry = tree[name]; + if (modes.isBlob(entry.mode)) { + return importBlob(entry.hash, onImport); + } + if (entry.mode === modes.tree) { + return importTree(entry.hash, onImport); + } + // Skip others. + onImport(); + } + } + + function onSave(err, newHash) { + if (err) return callback(err); + if (newHash !== hash) { + return callback(new Error("Tree hash mismatch " + hash + " != " + newHash)); + } + hasCache[hash] = true; + callback(); + } + } + + function importBlob(hash, callback) { + check("blob", hash, onCheck); + + function onCheck(err, has) { + if (err || has) return callback(err); + remote.loadAs("blob", hash, onLoad); + } + + function onLoad(err, blob) { + if (!blob) return callback(err || new Error("Missing blob " + hash)); + local.saveAs("blob", blob, onSave); + } + + function onSave(err, newHash) { + if (err) return callback(err); + if (newHash !== hash) { + return callback(new Error("Blob hash mismatch " + hash + " != " + newHash)); + } + hasCache[hash] = true; + callback(); + } + } +} diff --git a/mixins/walkers.js b/mixins/walkers.js index 5373043..59a5475 100644 --- a/mixins/walkers.js +++ b/mixins/walkers.js @@ -1,27 +1,31 @@ -var walk = require('../lib/walk.js'); -var assertType = require('../lib/assert-type.js'); +var modes = require('../lib/modes.js'); module.exports = function (repo) { - repo.logWalk = logWalk; // (hash-ish) => stream - repo.treeWalk = treeWalk; // (hash-ish) => stream + repo.logWalk = logWalk; // (ref) => stream + repo.treeWalk = treeWalk; // (treeHash) => stream }; +module.exports.walk = walk; -function logWalk(hashish, callback) { - if (!callback) return logWalk.bind(this, hashish); +function logWalk(ref, callback) { + if (!callback) return logWalk.bind(this, ref); var last, seen = {}; var repo = this; + if (!repo.readRef) return onShallow(); return repo.readRef("shallow", onShallow); function onShallow(err, shallow) { last = shallow; - return repo.loadAs("commit", hashish, onLoad); + resolveRef(repo, ref, onHash); } - function onLoad(err, commit, hash) { - if (commit === undefined) return callback(err); - commit.hash = hash; - seen[hash] = true; - return callback(null, walk(commit, scan, loadKey, compare)); + function onHash(err, hash) { + if (err) return callback(err); + return repo.loadAs("commit", hash, function (err, commit) { + if (commit === undefined) return callback(err); + commit.hash = hash; + seen[hash] = true; + return callback(null, walk(commit, scan, loadKey, compare)); + }); } function scan(commit) { @@ -33,7 +37,7 @@ function logWalk(hashish, callback) { function loadKey(hash, callback) { return repo.loadAs("commit", hash, function (err, commit) { - if (err) return callback(err); + if (!commit) return callback(err || new Error("Missing commit " + hash)); commit.hash = hash; if (hash === last) commit.last = true; return callback(null, commit); @@ -46,23 +50,28 @@ function compare(commit, other) { return commit.author.date < other.author.date; } -function treeWalk(hashish, callback) { - if (!callback) return treeWalk.bind(this, hashish); +function treeWalk(hash, callback) { + if (!callback) return treeWalk.bind(this, hash); var repo = this; - return repo.load(hashish, onLoad); - function onLoad(err, item, hash) { - if (err) return callback(err); - if (item.type === "commit") return repo.load(item.body.tree, onLoad); - item.hash = hash; - item.path = "/"; - return callback(null, walk(item, treeScan, treeLoadKey, treeCompare)); + return repo.loadAs("tree", hash, onTree); + + function onTree(err, body) { + if (!body) return callback(err || new Error("Missing tree " + hash)); + var tree = { + mode: modes.tree, + hash: hash, + body: body, + path: "/" + }; + return callback(null, walk(tree, treeScan, treeLoadKey, treeCompare)); } function treeLoadKey(entry, callback) { - return repo.load(entry.hash, function (err, object) { + if (entry.mode !== modes.tree) return callback(null, entry); + var type = modes.toType(entry.mode); + return repo.loadAs(type, entry.hash, function (err, body) { if (err) return callback(err); - entry.type = object.type; - entry.body = object.body; + entry.body = body; return callback(null, entry); }); } @@ -70,15 +79,17 @@ function treeWalk(hashish, callback) { } function treeScan(object) { - if (object.type === "blob") return []; - assertType(object, "tree"); - return object.body.filter(function (entry) { - return entry.mode !== 0160000; - }).map(function (entry) { - var path = object.path + entry.name; - if (entry.mode === 040000) path += "/"; - entry.path = path; - return entry; + if (object.mode !== modes.tree) return []; + var tree = object.body; + return Object.keys(tree).map(function (name) { + var entry = tree[name]; + var path = object.path + name; + if (entry.mode === modes.tree) path += "/"; + return { + mode: entry.mode, + hash: entry.hash, + path: path + }; }); } @@ -86,3 +97,56 @@ function treeCompare(first, second) { return first.path < second.path; } +function resolveRef(repo, hashish, callback) { + if (/^[0-9a-f]{40}$/.test(hashish)) { + return callback(null, hashish); + } + repo.readRef(hashish, function (err, hash) { + if (!hash) return callback(err || new Error("Bad ref " + hashish)); + callback(null, hash); + }); +} + +function walk(seed, scan, loadKey, compare) { + var queue = [seed]; + var working = 0, error, cb; + return {read: read, abort: abort}; + + function read(callback) { + if (!callback) return read; + if (cb) return callback(new Error("Only one read at a time")); + if (working) { cb = callback; return; } + var item = queue.shift(); + if (!item) return callback(); + try { scan(item).forEach(onKey); } + catch (err) { return callback(err); } + return callback(null, item); + } + + function abort(callback) { return callback(); } + + function onError(err) { + if (cb) { + var callback = cb; cb = null; + return callback(err); + } + error = err; + } + + function onKey(key) { + working++; + loadKey(key, onItem); + } + + function onItem(err, item) { + working--; + if (err) return onError(err); + var index = queue.length; + while (index && compare(item, queue[index - 1])) index--; + queue.splice(index, 0, item); + if (!working && cb) { + var callback = cb; cb = null; + return read(callback); + } + } +} diff --git a/mixins/websql-db.js b/mixins/websql-db.js new file mode 100644 index 0000000..6a25744 --- /dev/null +++ b/mixins/websql-db.js @@ -0,0 +1,167 @@ +"use strict"; + +var codec = require('../lib/object-codec.js'); +var bodec = require('bodec'); +var inflate = require('../lib/inflate'); +var deflate = require('../lib/deflate'); + +var sha1 = require('git-sha1'); +var modes = require('../lib/modes.js'); +var db; + +mixin.init = init; + +mixin.loadAs = loadAs; +mixin.saveAs = saveAs; +mixin.loadRaw = loadRaw; +mixin.saveRaw = saveRaw; +module.exports = mixin; + +function mixin(repo, prefix) { + if (!prefix) throw new Error("Prefix required"); + repo.refPrefix = prefix; + repo.saveAs = saveAs; + repo.saveRaw = saveRaw; + repo.loadAs = loadAs; + repo.loadRaw = loadRaw; + repo.readRef = readRef; + repo.updateRef = updateRef; + repo.hasHash = hasHash; +} + +function init(callback) { + + db = openDatabase('tedit', '1.0', 'tedit local data', 10 * 1024 * 1024); + db.transaction(function (tx) { + tx.executeSql( + 'CREATE TABLE IF NOT EXISTS objects (hash unique, body blob)' + ); + tx.executeSql( + 'CREATE TABLE IF NOT EXISTS refs (path unique, value text)' + ); + }, function () { + console.error(arguments); + callback(new Error("Problem initializing database")); + }, function () { + callback(); + }); +} + +function saveAs(type, body, callback) { + /*jshint: validthis: true */ + if (!callback) return saveAs.bind(this, type, body); + var hash, buffer; + try { + buffer = codec.frame({type:type,body:body}); + hash = sha1(buffer); + } + catch (err) { return callback(err); } + this.saveRaw(hash, buffer, callback); +} + +function saveRaw(hash, buffer, callback) { + /*jshint: validthis: true */ + if (!callback) return saveRaw.bind(this, hash, buffer); + var sql = 'INSERT INTO objects (hash, body) VALUES (?, ?)'; + db.transaction(function (tx) { + var text; + try { + text = bodec.toBase64(deflate(buffer)); + } + catch (err) { + return callback(err); + } + tx.executeSql(sql, [hash, text], function () { + callback(null, hash); + }); + }); +} + +function loadAs(type, hash, callback) { + /*jshint: validthis: true */ + if (!callback) return loadAs.bind(this, type, hash); + loadRaw(hash, function (err, buffer) { + if (!buffer) return callback(err); + var parts, body; + try { + parts = codec.deframe(buffer); + if (parts.type !== type) throw new Error("Type mismatch"); + body = codec.decoders[type](parts.body); + } + catch (err) { + return callback(err); + } + callback(null, body); + }); +} + +function loadRaw(hash, callback) { + /*jshint: validthis: true */ + if (!callback) return loadRaw.bind(this, hash); + var sql = 'SELECT * FROM objects WHERE hash=?'; + db.readTransaction(function (tx) { + tx.executeSql(sql, [hash], function (tx, result) { + if (!result.rows.length) return callback(); + var item = result.rows.item(0); + var buffer; + try { + buffer = inflate(bodec.fromBase64(item.body)); + } + catch (err) { + return callback(err); + } + callback(null, buffer); + }, function (tx, error) { + callback(new Error(error.message)); + }); + }); +} + +function hasHash(type, hash, callback) { + /*jshint: validthis: true */ + loadAs(type, hash, function (err, value) { + if (err) return callback(err); + if (value === undefined) return callback(null, false); + if (type !== "tree") return callback(null, true); + var names = Object.keys(value); + next(); + function next() { + if (!names.length) return callback(null, true); + var name = names.pop(); + var entry = value[name]; + hasHash(modes.toType(entry.mode), entry.hash, function (err, has) { + if (err) return callback(err); + if (has) return next(); + callback(null, false); + }); + } + }); +} + +function readRef(ref, callback) { + /*jshint: validthis: true */ + var key = this.refPrefix + "/" + ref; + var sql = 'SELECT * FROM refs WHERE path=?'; + db.transaction(function (tx) { + tx.executeSql(sql, [key], function (tx, result) { + if (!result.rows.length) return callback(); + var item = result.rows.item(0); + callback(null, item.value); + }, function (tx, error) { + callback(new Error(error.message)); + }); + }); +} + +function updateRef(ref, hash, callback) { + /*jshint: validthis: true */ + var key = this.refPrefix + "/" + ref; + var sql = 'INSERT INTO refs (path, value) VALUES (?, ?)'; + db.transaction(function (tx) { + tx.executeSql(sql, [key, hash], function () { + callback(); + }, function (tx, error) { + callback(new Error(error.message)); + }); + }); +} diff --git a/net/git-fetch-pack.js b/net/git-fetch-pack.js new file mode 100644 index 0000000..4e75303 --- /dev/null +++ b/net/git-fetch-pack.js @@ -0,0 +1,196 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); +var bodec = require('bodec'); + +module.exports = fetchPack; + +function fetchPack(transport, onError) { + + if (!onError) onError = throwIt; + + // Wrap our handler functions to route errors properly. + onRef = wrapHandler(onRef, onError); + onWant = wrapHandler(onWant, onError); + onNak = wrapHandler(onNak, onError); + onMore = wrapHandler(onMore, onError); + onReady = wrapHandler(onReady, onError); + + var caps = null; + var capsSent = false; + var refs = {}; + var haves = {}; + var havesCount = 0; + + // Create a duplex channel for talking with the agent. + var libraryChannel = makeChannel(); + var agentChannel = makeChannel(); + var api = { + put: libraryChannel.put, + drain: libraryChannel.drain, + take: agentChannel.take + }; + + // Start the connection and listen for the response. + var socket = transport("git-upload-pack", onError); + socket.take(onRef); + + // Return the other half of the duplex API channel. + return { + put: agentChannel.put, + drain: agentChannel.drain, + take: libraryChannel.take + }; + + function onRef(line) { + if (line === undefined) { + throw new Error("Socket disconnected"); + } + if (line === null) { + api.put(refs); + api.take(onWant); + return; + } + else if (!caps) { + caps = {}; + Object.defineProperty(refs, "caps", {value: caps}); + Object.defineProperty(refs, "shallows", {value:[]}); + var index = line.indexOf("\0"); + if (index >= 0) { + line.substring(index + 1).split(" ").forEach(function (cap) { + var i = cap.indexOf("="); + if (i >= 0) { + caps[cap.substring(0, i)] = cap.substring(i + 1); + } + else { + caps[cap] = true; + } + }); + line = line.substring(0, index); + } + } + var match = line.match(/(^[0-9a-f]{40}) (.*)$/); + if (!match) { + if (typeof line === "string" && /^ERR/i.test(line)) { + throw new Error(line); + } + throw new Error("Invalid line: " + JSON.stringify(line)); + } + refs[match[2]] = match[1]; + socket.take(onRef); + } + + var packChannel; + var progressChannel; + var errorChannel; + + function onWant(line) { + if (line === undefined) return socket.put(); + if (line === null) { + socket.put(null); + return api.take(onWant); + } + if (line.deepen) { + socket.put("deepen " + line.deepen + "\n"); + return api.take(onWant); + } + if (line.have) { + haves[line.have] = true; + havesCount++; + socket.put("have " + line.have + "\n"); + return api.take(onWant); + } + if (line.want) { + var extra = ""; + if (!capsSent) { + capsSent = true; + if (caps["ofs-delta"]) extra += " ofs-delta"; + if (caps["thin-pack"]) extra += " thin-pack"; + // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed"; + // else if (caps["multi_ack"]) extra +=" multi_ack"; + if (caps["side-band-64k"]) extra += " side-band-64k"; + else if (caps["side-band"]) extra += " side-band"; + // if (caps["agent"]) extra += " agent=" + agent; + if (caps.agent) extra += " agent=" + caps.agent; + } + extra += "\n"; + socket.put("want " + line.want + extra); + return api.take(onWant); + } + if (line.done) { + socket.put("done\n"); + return socket.take(onNak); + } + throw new Error("Invalid have/want command"); + } + + function onNak(line) { + if (line === undefined) return api.put(); + if (line === null) return socket.take(onNak); + if (bodec.isBinary(line) || line.progress || line.error) { + packChannel = makeChannel(); + progressChannel = makeChannel(); + errorChannel = makeChannel(); + api.put({ + pack: { take: packChannel.take }, + progress: { take: progressChannel.take }, + error: { take: errorChannel.take }, + }); + return onMore(null, line); + } + var match = line.match(/^shallow ([0-9a-f]{40})$/); + if (match) { + refs.shallows.push(match[1]); + return socket.take(onNak); + } + match = line.match(/^ACK ([0-9a-f]{40})$/); + if (match) { + return socket.take(onNak); + } + if (line === "NAK") { + return socket.take(onNak); + } + throw new Error("Expected NAK, but got " + JSON.stringify(line)); + } + + function onMore(line) { + + if (line === undefined) { + packChannel.put(); + progressChannel.put(); + errorChannel.put(); + return api.put(); + } + if (line === null) { + api.put(line); + } + else { + if (line.progress) { + progressChannel.put(line.progress); + } + else if (line.error) { + errorChannel.put(line.error); + } + else { + if (!packChannel.put(line)) { + return packChannel.drain(onReady); + } + } + } + socket.take(onMore); + } + + function onReady() { + socket.take(onMore); + } + +} + +var defer = require('js-git/lib/defer'); +function throwIt(err) { + defer(function () { + throw err; + }); + // throw err; +} diff --git a/net/request-xhr.js b/net/request-xhr.js new file mode 100644 index 0000000..5bf9064 --- /dev/null +++ b/net/request-xhr.js @@ -0,0 +1,36 @@ +"use strict"; + +module.exports = request; + +function request(method, url, headers, body, callback) { + if (typeof body === "function") { + callback = body; + body = undefined; + } + if (!callback) { + return request.bind(null, method, url, headers, body); + } + var xhr = new XMLHttpRequest(); + xhr.open(method, url, true); + xhr.responseType = "arraybuffer"; + + Object.keys(headers).forEach(function (name) { + xhr.setRequestHeader(name, headers[name]); + }); + + xhr.onreadystatechange = function () { + if (xhr.readyState !== 4) return; + var resHeaders = {}; + xhr.getAllResponseHeaders().trim().split("\r\n").forEach(function (line) { + var index = line.indexOf(":"); + resHeaders[line.substring(0, index).toLowerCase()] = line.substring(index + 1).trim(); + }); + + callback(null, { + statusCode: xhr.status, + headers: resHeaders, + body: xhr.response && new Uint8Array(xhr.response) + }); + }; + xhr.send(body); +} diff --git a/net/tcp-chrome-sockets.js b/net/tcp-chrome-sockets.js new file mode 100644 index 0000000..2a14aa6 --- /dev/null +++ b/net/tcp-chrome-sockets.js @@ -0,0 +1,108 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); +var tcp = window.chrome.sockets.tcp; +var runtime = window.chrome.runtime; + +module.exports = connect; + +function connect(host, port, onError) { + port = port|0; + host = String(host); + if (!port || !host) throw new TypeError("host and port are required"); + + onCreate = wrap(onCreate, onError); + onConnect = wrap(onConnect, onError); + onInfo = wrap(onInfo, onError); + onReceive = wrap(onReceive, onError); + onReceiveError = wrap(onReceiveError, onError); + onData = wrapHandler(onData, onError); + onWrite = wrap(onWrite, onError); + + var paused = false; + var open = false; + var socketId; + + var serverChannel = makeChannel(); + var clientChannel = makeChannel(); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + tcp.onReceive.addListener(onReceive); + tcp.onReceiveError.addListener(onReceiveError); + tcp.create(onCreate); + + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onCreate(createInfo) { + socketId = createInfo.socketId; + tcp.connect(socketId, host, port, onConnect); + } + + function onConnect(result) { + if (result < 0) throw new Error(runtime.lastError.message + " Connection error"); + tcp.getInfo(socketId, onInfo); + } + + function onInfo(socketInfo) { + if (!socketInfo.connected) { + throw new Error("Connection failed"); + } + open = true; + socket.take(onData); + } + + function onReceive(info) { + if (info.socketId !== socketId) return; + if (socket.put(new Uint8Array(info.data)) || paused) return; + paused = true; + tcp.setPaused(socketId, true); + socket.drain(onDrain); + } + + function onDrain() { + if (!paused) return; + paused = false; + if (open) tcp.setPaused(socketId, false); + } + + function onReceiveError(info) { + if (info.socketId !== socketId) return; + open = false; + tcp.close(socketId); + socket.put(); + // TODO: find a way to tell close and error apart. + // throw new Error("Code " + info.resultCode + " error while receiving."); + } + + function onData(data) { + tcp.send(socketId, data.buffer, onWrite); + } + + function onWrite(info) { + if (info.resultCode < 0) { + throw new Error(runtime.lastError.message + " Error writing."); + } + socket.take(onData); + } +} + + +function wrap(fn, onError) { + return function () { + try { + return fn.apply(this, arguments); + } + catch (err) { + onError(err); + } + }; +} diff --git a/net/tcp-node.js b/net/tcp-node.js new file mode 100644 index 0000000..c7f3967 --- /dev/null +++ b/net/tcp-node.js @@ -0,0 +1,85 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); +var net = require('net'); + +module.exports = connect; + +function connect(host, port, onError) { + port = port|0; + host = String(host); + if (!port || !host) throw new TypeError("host and port are required"); + + // Wrap event handlers from node stream + onConnect = wrap(onConnect, onError); + pump = wrap(pump, onError); + onEnd = wrap(onEnd, onError); + onDrain = wrap(onDrain, onError); + + // Wrap event handlers from culvert socket + onTake = wrapHandler(onTake, onError); + + var serverChannel = makeChannel(); + var clientChannel = makeChannel(); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + var client = net.connect({ host: host, port: port }, onConnect); + if (onError) client.on("error", onError); + + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onConnect() { + socket.take(onTake); + client.on("end", onEnd); + client.on("readable", pump); + client.on("drain", onDrain); + client.on("error", onError); + } + + function pump() { + var chunk; + do { + chunk = client.read(); + if (!chunk) return; + } while (socket.put(chunk)); + socket.drain(pump); + } + + function onEnd() { + socket.put(); + } + + function onTake(data) { + if (data === undefined) { + client.end(); + } + else if (client.write(data)) { + socket.take(onTake); + } + } + + function onDrain() { + socket.take(onTake); + } + +} + +function wrap(fn, onError) { + return function () { + try { + return fn.apply(this, arguments); + } + catch (err) { + onError(err); + } + }; +} diff --git a/net/tcp-ws-proxy.js b/net/tcp-ws-proxy.js new file mode 100644 index 0000000..37a5ff7 --- /dev/null +++ b/net/tcp-ws-proxy.js @@ -0,0 +1,79 @@ +"use strict"; + +var makeChannel = require('culvert'); +var wrapHandler = require('../lib/wrap-handler'); + +module.exports = function (proxyUrl) { + if (proxyUrl[proxyUrl.length - 1] !== "/") proxyUrl += "/"; + + return function connect(host, port, onError) { + port = port|0; + host = String(host); + if (!port || !host) throw new TypeError("host and port are required"); + + onData = wrapHandler(onData, onError); + + var serverChannel = makeChannel(); + var clientChannel = makeChannel(); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + var connected = false; + var ws = new WebSocket(proxyUrl + "tcp/" + host + "/" + port); + ws.binaryType = "arraybuffer"; + + ws.onopen = wrap(onOpen, onError); + ws.onclose = wrap(onClose, onError); + ws.onmessage = wrap(onMessage, onError); + ws.onerror = wrap(onWsError, onError); + + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onOpen() { + ws.send("connect"); + } + + function onClose() { + socket.put(); + } + + function onMessage(evt) { + if (!connected && evt.data === "connect") { + connected = true; + socket.take(onData); + return; + } + + socket.put(new Uint8Array(evt.data)); + } + + function onWsError() { + console.error(arguments); + throw new Error("Generic websocket error"); + } + + function onData(chunk) { + ws.send(chunk.buffer); + socket.take(onData); + } + + }; +}; + +function wrap(fn, onError) { + return function () { + try { + return fn.apply(this, arguments); + } + catch (err) { + onError(err); + } + }; +} diff --git a/net/transport-http.js b/net/transport-http.js new file mode 100644 index 0000000..fd3b0c3 --- /dev/null +++ b/net/transport-http.js @@ -0,0 +1,103 @@ +"use strict"; + +var makeChannel = require('culvert'); +var bodec = require('bodec'); +var pktLine = require('../lib/pkt-line'); +var wrapHandler = require('../lib/wrap-handler'); + +module.exports = function (request) { + + return function httpTransport(gitUrl, username, password) { + // Send Auth header if username is set + var auth; + if (username) { + auth = "Basic " + btoa(username + ":" + (password || "")); + } + + return function (serviceName, onError) { + + // Wrap our handler functions to route errors properly. + onResponse = wrapHandler(onResponse, onError); + onWrite = wrapHandler(onWrite, onError); + onResult = wrapHandler(onResult, onError); + + // Create a duplex channel with transform for internal use. + var serverChannel = makeChannel();//0, "server"); + var clientChannel = makeChannel();//0, "client"); + var socket = { + put: serverChannel.put, + drain: serverChannel.drain, + take: clientChannel.take + }; + + // Send the initial request to start the connection. + var headers = {}; + if (auth) headers.Authorization = auth; + request("GET", gitUrl + "/info/refs?service=" + serviceName, headers, onResponse); + + // Prep for later requests + var bodyParts = []; + var bodyWrite = pktLine.framer(function (chunk) { + bodyParts.push(chunk); + }); + headers["Content-Type"] = "application/x-" + serviceName + "-request"; + socket.take(onWrite); + + var verified = 0; + var parseResponse = pktLine.deframer(function (line) { + if (verified === 2) { + socket.put(line); + } + else if (verified === 0) { + if (line !== "# service=" + serviceName) { + throw new Error("Illegal service response"); + } + verified = 1; + } + else if (verified === 1) { + if (line !== null) { + throw new Error("Expected null after service name"); + } + verified = 2; + } + }); + + // Return the other half of the duplex channel for the protocol logic to use. + return { + put: clientChannel.put, + drain: clientChannel.drain, + take: serverChannel.take + }; + + function onResponse(res) { + if (res.statusCode !== 200) { + throw new Error("Invalid response: " + res.statusCode); + } + if (res.headers["content-type"] !== "application/x-" + serviceName + "-advertisement") { + throw new Error("Not a smart http git server"); + } + parseResponse(res.body); + } + + function onWrite(item) { + if (item === undefined) return socket.put(); + bodyWrite(item); + socket.take(onWrite); + if (item !== "done\n" || !bodyParts.length) return; + var body = bodec.join(bodyParts); + bodyParts.length = 0; + request("POST", gitUrl + "/" + serviceName, headers, body, onResult); + } + + function onResult(res) { + if (res.statusCode !== 200) { + throw new Error("Invalid result: " + res.statusCode); + } + if (res.headers["content-type"] !== "application/x-" + serviceName + "-result") { + throw new Error("Not a smart http git server"); + } + parseResponse(res.body); + } + }; + }; +}; diff --git a/net/transport-tcp.js b/net/transport-tcp.js new file mode 100644 index 0000000..d32728e --- /dev/null +++ b/net/transport-tcp.js @@ -0,0 +1,48 @@ +"use strict"; + +var makeChannel = require('culvert'); +var bodec = require('bodec'); +var pktLine = require('../lib/pkt-line'); +var wrapHandler = require('../lib/wrap-handler'); + +module.exports = function (connect) { + + return function tcpTransport(path, host, port) { + port = (port|0) || 9418; + if (!path || !host) throw new Error("path and host are required"); + + return function (serviceName, onError) { + + onData = wrapHandler(onData, onError); + onDrain = wrapHandler(onDrain, onError); + + var socket = connect(host, port, onError); + var inter = makeChannel(); + inter.put = pktLine.deframer(inter.put); + + socket.put = pktLine.framer(socket.put); + var greeting = bodec.fromRaw(serviceName + " " + path + "\0host=" + host + "\0"); + socket.put(greeting); + + // Pipe socket to inter with backpressure + socket.take(onData); + function onData(chunk) { + if (inter.put(chunk)) { + socket.take(onData); + } + else { + inter.drain(onDrain); + } + } + function onDrain() { + socket.take(onData); + } + + return { + put: socket.put, + drain: socket.drain, + take: inter.take + }; + }; + }; +}; diff --git a/package.json b/package.json index 303ce91..9f70c0d 100644 --- a/package.json +++ b/package.json @@ -1,30 +1,27 @@ { "name": "js-git", - "version": "0.6.2", + "version": "0.7.8", "description": "Git Implemented in JavaScript", - "main": "js-git.js", - "repository": { - "type": "git", - "url": "git://github.com/creationix/js-git.git" - }, - "devDependencies": { - "git-fs-db": "~0.2.0", - "git-net": "~0.0.4", - "git-node-platform": "~0.1.4", - "gen-run": "~0.1.1" - }, "keywords": [ "git", "js-git" ], + "repository": { + "type": "git", + "url": "git://github.com/creationix/js-git.git" + }, "author": "Tim Caswell ", "license": "MIT", "bugs": { "url": "https://github.com/creationix/js-git/issues" }, + "scripts": { + "test": "ls test/test-* | xargs -n1 node" + }, "dependencies": { - "push-to-pull": "~0.1.0", - "varint": "0.0.3", - "bops": "~0.1.0" + "bodec": "^0.1.0", + "culvert": "^0.1.2", + "git-sha1": "^0.1.2", + "pako": "^0.2.5" } } diff --git a/specs/high/db.md b/specs/high/db.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/fs.md b/specs/high/fs.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/index.md b/specs/high/index.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/proto.md b/specs/high/proto.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/high/trace.md b/specs/high/trace.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/bops.md b/specs/low/bops.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/continuable.md b/specs/low/continuable.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/deflate.md b/specs/low/deflate.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/http.md b/specs/low/http.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/inflate.md b/specs/low/inflate.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/sha1.md b/specs/low/sha1.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/simple-stream.md b/specs/low/simple-stream.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/ssh.md b/specs/low/ssh.md deleted file mode 100644 index e69de29..0000000 diff --git a/specs/low/tcp.md b/specs/low/tcp.md deleted file mode 100644 index e69de29..0000000 diff --git a/test/run.js b/test/run.js new file mode 100644 index 0000000..e0655bf --- /dev/null +++ b/test/run.js @@ -0,0 +1,48 @@ +// Ultra simple test runner with TAP output. + +var inspect = require('util').inspect; +var defer = require('../lib/defer.js'); +var log = console.log; +console.log = function () { + var args = [].slice.call(arguments).map(function (arg) { + return inspect(arg, {colors:true}); + }); + log(args.join(" ").split("\n").map(function (line) { + return "# " + line; + }).join("\n")); +}; + +module.exports = function (tests) { + var timeout; + var test; + var index = 0; + log("1.." + (tests.length)); + next(); + function next(err) { + if (timeout) clearTimeout(timeout); + if (index) { + if (err) { + log(err.stack.split("\n").map(function (line) { + return "# " + line; + }).join("\n")); + log("not ok " + index + " - " + test.name); + } + else { + log("ok " + index + " - " + test.name); + } + } + test = tests[index++]; + if (!test) return; + timeout = setTimeout(onTimeout, 1000); + try { + if (test.length) test(next); + else test(); + } + catch (err) { return next(err); } + if (!test.length) defer(next); + } + + function onTimeout() { + next(new Error("Test timeout")); + } +}; \ No newline at end of file diff --git a/test/sample-pack.js b/test/sample-pack.js new file mode 100644 index 0000000..d90a99c --- /dev/null +++ b/test/sample-pack.js @@ -0,0 +1,5 @@ +var bodec = require('bodec'); + +// This is a small sample packfile with couple offset deltas +// pack-5851ce932ec42973b51d631afe25da247c3dc49a.pack +module.exports = bodec.fromBase64('UEFDSwAAAAIAAAAQnQ54nJ3MWwoCMQxA0f+uIhtQ0nYeKYgobsENZNoEC/OQMTK6e2cN/l4411YRYCo5kseITVLpSmAfOVLSnFJB6kJqSukDuSevMhu0moed9CmrKjKFwpIxtT7TINh2vSqReHX8tseywr1OcOPXJuMIJ6vTJa/CVpe5fo55mc7gY2p86LFBOGCH6PY6VTP5x7prKfAVA54Xe+yLWTbQOor7AZUCSPmRDnicnctRCgIhEADQf08xFyjGUVeFiKIrdAEdZ0lYd8OM7fh1hn4fvNFFQEi8JCcuCoWSmakwY8xoHGMxkdgimZjVM3VZB8wUPMUJLWrRPml0IdspuJl1JHJBSijGRlLpPR5bh3ttcEuvXZYFTqO2C3dJo25r/Rx5a2fQJlpNHgnhgBOi+mmrY8g/V11LgVV2mOsi6guDiEL9mA94nJ3PTWrDMBBA4b1OMRdosDT6hRIKvkIuIMkjd6htGXVCkts3Z+j2wbd4MohA+5Cai874uiQXQmuIjagsAWMp3rWS0WCM6syDDgGbDCXEmhz5Zl00iayv2mpyHk2xVLVZlhJUvst3H3DjHeb8+6Btg0/h/asOysL94Oel9v0KGpPVxjtE+Jj8NKl33VmE/mPV3M8XrO8x4WOFkusPSIc+eOUjb9B4I/UHHmNMM5QOeJydy1sKwjAQRuH3rGI2oGQmlzYgIrgDcQNp8hcDTSsxostXt+B5/OD0BpAzMJmzJJs4J5Fh5OiCsB3nMFvoOAakkaHusWHtpJm1y9YYb4KXSawgR/GY9MQ+8OB/TZhVfPbb1uhaKp3j44VloUMv9ZQaYi/bWt77tNUjsQmWxTttaae91uqrtfSOf151wRorqN9Ac1mgPgYNRBeSDnicncvdCcIwEADg90xxCyiXn6YGRBRXcIG75IKBppX2xI6vM/j6waerCGAozFyCiA2Jx+QJh5Rd8l5cHUiSdcVTzeZFq8wKY5TkamYsIWO1Xkau8VRdHNhF5BLJsUWqht76XFZ4tA532j4yTXDW1q95FdK2zG0/5qVfwPoUrIshWThgRDQ/7U1V/rnmVgpsSxdQ2dV8AbwRRT6TC3icnczNCQIxEEDhe6qYBpT8JwuyCHvybgOTmOBAsoE4ouUrluD1wfd4lgLexpqjL9G5kG6YUtY56ohqccbVaEzQoaLXAp98HxOu1GHDx6u0Biemfs6zINPY6X3Mo6+gzGKV9jYEOEgvpfjWTszlHysuOzFhg+03ER9fQDcKqQl4nDM0MDAzMVFIL0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcMoSoy60kVmVeajlYifjVm28/SzW0d12ZKCB++trFC8ZKOxBKjMBqauylWlkm6kbyCrH0Gp01vHQ9NnMNAFftOrq1AXic80jNyclXCM8vyknhckxJUSjOz03lAgBQjAcOPXicS8zLL8lILVJIy8xJ5QIAI9cEvLEBeJyrTC1RSMzLL8lILVJIy8xJ5QIAOsAGLmWAPnicm8lYOqEUAAX6AhVkEHicKw2aEAQABEABqqoCeJwzNDAwMzFRyK1ML0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcAgAxUhBDqAJ4nDM0MDAzMVFIL0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcAgAPuQ9dqAJ4nDM0MDAzMVFIL0pNLcnMS9crqShhCK3dYPty+oksL6Y+ub1WMq+Voh9ZAAAZvA8xPHic80jNyclXCM8vyknhAgAcMgQnuZAj3ZpSLQckQi9VfpQYWt+hefM='); diff --git a/test/test-config-codec.js b/test/test-config-codec.js new file mode 100644 index 0000000..22640ba --- /dev/null +++ b/test/test-config-codec.js @@ -0,0 +1,69 @@ +var run = require('./run.js'); + +// The thing we mean to test. +var codec = require('../lib/config-codec.js'); + +var sample = '\ +[user]\n\ +\tname = Tim Caswell\n\ +\temail = tim@creationix.com\n\ +[core]\n\ +\teditor = vim\n\ +\twhitespace = fix,-indent-with-non-tab,trailing-space,cr-at-eol\n\ +[web]\n\ +\tbrowser = google-chrome\n\ +[color]\n\ +\tui = true\n\ +[color "branch"]\n\ +\tcurrent = yellow bold\n\ +\tlocal = green bold\n\ +\tremote = cyan bold\n\ +[color "diff"]\n\ +\tmeta = yellow bold\n\ +\tfrag = magenta bold\n\ +\told = red bold\n\ +\tnew = green bold\n\ +\twhitespace = red reverse\n\ +[github]\n\ +\tuser = creationix\n\ +\ttoken = token'; + +var config; + +run([ + function testDecode() { + config = codec.decode(sample); + if (config.user.name !== "Tim Caswell") { + throw new Error("Failed to parse user.name"); + } + if (config.color.ui != "true") { + throw new Error("Failed to parse color.ui"); + } + if (config.color.diff.meta !== "yellow bold") { + throw new Error("Failed to parse color.diff.meta"); + } + }, + function testEncode() { + var encoded = codec.encode(config); + var config2 = codec.decode(encoded); + if (JSON.stringify(config) !== JSON.stringify(config2)) { + console.log(config); + console.log(encoded); + console.log(config2); + throw new Error("Encode failed"); + } + }, + function testEncode2() { + var encoded = codec.encode({ + foo: { + bar: { + baz: true + } + } + }); + if (encoded !== '[foo "bar"]\n\tbaz = true\n') { + console.log(encoded); + throw new Error("Invalid encoding of single deep config"); + } + } +]); diff --git a/test/test-mem-db.js b/test/test-mem-db.js new file mode 100644 index 0000000..f7db59c --- /dev/null +++ b/test/test-mem-db.js @@ -0,0 +1,57 @@ +var run = require('./run.js'); +var bodec = require('bodec'); +var sha1 = require('git-sha1'); +var codec = require('../lib/object-codec.js'); + +var repo = {}; +require('../mixins/mem-db.js')(repo); + +var blob = bodec.fromUnicode("Hello World\n"); +var blobHash = "557db03de997c86a4a028e1ebd3a1ceb225be238"; +run([ + function testSaveAs(end) { + repo.saveAs("blob", blob, function (err, hash) { + if (err) return end(err); + if (hash !== blobHash) { + console.log([hash, blobHash]); + return end(new Error("Hash mismatch")); + } + end(); + }); + }, + function testLoadRaw(end) { + repo.loadRaw(blobHash, function (err, bin) { + if (err) return end(err); + var obj = codec.deframe(bin, true); + if (obj.type !== "blob") return err(new Error("Wrong type")); + if (bodec.toUnicode(obj.body) !== bodec.toUnicode(blob)) { + return err(new Error("Wrong body")); + } + end(); + }); + }, + function testLoadAs(end) { + repo.loadAs("blob", blobHash, function (err, body) { + if (err) return end(err); + if (bodec.toUnicode(body) !== bodec.toUnicode(blob)) { + return err(new Error("Wrong body")); + } + end(); + }); + }, + function testSaveRaw(end) { + var newBody = bodec.fromUnicode("A new body\n"); + var bin = codec.frame({type:"blob",body:newBody}); + var hash = sha1(bin); + repo.saveRaw(hash, bin, function (err) { + if (err) return end(err); + repo.loadAs("blob", hash, function (err, body) { + if (err) return end(err); + if (bodec.toUnicode(body) !== bodec.toUnicode(newBody)) { + return end(new Error("Body mismatch")); + } + end(); + }); + }); + } +]); diff --git a/test/test-object-codec.js b/test/test-object-codec.js new file mode 100644 index 0000000..49fb75e --- /dev/null +++ b/test/test-object-codec.js @@ -0,0 +1,212 @@ +var modes = require('../lib/modes.js'); +var bodec = require('bodec'); +var sha1 = require('git-sha1'); +var run = require('./run.js'); + +// The thing we mean to test. +var codec = require('../lib/object-codec.js'); + +var blobHash, treeHash, commitHash, tagHash; +var blob, tree, commit, tag; +var blobBin, treeBin, commitBin, tagBin; + +run([ + function testEncodeBlob() { + blob = bodec.fromUnicode("Hello World\n"); + blobBin = codec.frame({type: "blob", body: blob}); + blobHash = sha1(blobBin); + if (blobHash !== '557db03de997c86a4a028e1ebd3a1ceb225be238') { + throw new Error("Invalid blob hash"); + } + }, + function testEncodeBlobInvalidType() { + try { + codec.frame({type: "blob", body: "Not a binary value"}); + } + catch (err) { + return; + } + throw new Error("Expected an error when passin in a non-binary blob"); + }, + function testEncodeTree() { + tree = { + "greeting.txt": { + mode: modes.file, + hash: blobHash + } + }; + treeBin = codec.frame({type: "tree", body: tree}); + treeHash = sha1(treeBin); + if (treeHash !== "648fc86e8557bdabbc2c828a19535f833727fa62") { + throw new Error("Invalid tree hash"); + } + }, + function testTreeSort() { + var tree = { + "README.md": {"mode":modes.blob,"hash":"42bd87a816800cb87646e95b71273983a71a26dc"}, + "a.js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "a-js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "b": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "b-js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "c": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "c.js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, + "a": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, + "b.js": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, + "c-js": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, + }; + var treeBin = codec.frame({type: "tree", body: tree}); + var treeHash = sha1(treeBin); + if (treeHash !== "f78893bf52bc695f343372d4210c8c0803c7c4db") { + throw new Error("Invalid tree hash"); + } + }, + function testEncodeCommit() { + var person = { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790884, + offset: 7 * 60 + } + }; + commit = { + tree: treeHash, + author: person, + committer: person, + message: "Test Commit\n", + parents: [] + }; + commitBin = codec.frame({type: "commit", body: commit}); + commitHash = sha1(commitBin); + if (commitHash !== "500c37fc17988b90c82d812a2d6fc25b15354bf2") { + throw new Error("Invalid commit hash"); + } + }, + function testEncodeTag() { + tag = { + object: commitHash, + type: "commit", + tag: "mytag", + tagger: { + name: "Tim Caswell", + email: "tim@creationix.com", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + message: "Tag it!\n" + }; + tagBin = codec.frame({type: "tag", body: tag}); + tagHash = sha1(tagBin); + if (tagHash !== "49522787662a0183652dc9cafa5c008b5a0e0c2a") { + throw new Error("Invalid tag hash"); + } + }, + function testDecodeTag() { + var obj = codec.deframe(tagBin, true); + if (obj.type !== "tag") throw new Error("Invalid type"); + if (!(obj.body.object === tag.object && obj.body.message === tag.message)) { + throw new Error("Problem decoding"); + } + }, + function testDecodeCommit() { + var obj = codec.deframe(commitBin, true); + if (obj.type !== "commit") throw new Error("Invalid type"); + if (!(obj.body.tree === commit.tree && + obj.body.message === commit.message && + obj.body.author.date.seconds === commit.author.date.seconds)) { + throw new Error("Problem decoding"); + } + }, + function testDecodeTree() { + var obj = codec.deframe(treeBin, true); + if (obj.type !== "tree") throw new Error("Invalid type"); + if (obj.body["greeting.txt"].hash !== tree["greeting.txt"].hash) { + throw new Error("Problem decoding"); + } + }, + function testDecodeBlob() { + var obj = codec.deframe(blobBin, true); + if (obj.type !== "blob") throw new Error("Invalid type"); + if (bodec.toUnicode(obj.body) !== bodec.toUnicode(blob)) { + throw new Error("Problem decoding"); + } + }, + function testUnicodeFilePath() { + var name = "æðelen"; + var tree = {}; + tree[name] = { + mode: modes.file, + hash: blobHash + }; + var bin = codec.frame({type:"tree", body: tree}); + var obj = codec.deframe(bin, true); + var newName = Object.keys(obj.body)[0]; + if (newName !== name) { + console.log(newName + " != " + name); + throw new Error("Problem storing and retrieving utf8 paths"); + } + if (obj.body[name].hash !== tree[name].hash) { + throw new Error("Problem decoding hash hex"); + } + }, + function testUnicodeCommit() { + var person = { + name: "Laȝamon", + email: "laȝamon@chronicles-of-england.org", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }; + var commit = { + tree: treeHash, + author: person, + committer: person, + message: "An preost wes on leoden, Laȝamon was ihoten\nHe wes Leovenaðes sone -- liðe him be Drihten\n", + parents: [] + }; + var bin = codec.frame({type:"commit", body:commit}); + var obj = codec.deframe(bin, true); + if (commit.author.name !== obj.body.author.name || + commit.author.email !== obj.body.author.email || + commit.message !== obj.body.message) { + console.log([obj.body.author, obj.body.message]); + throw new Error("Problem decoding utf8 parts in commit"); + } + }, + function testUnicodeTag() { + var tag = { + object: commitHash, + type: "commit", + tag: "Laȝamon", + tagger: { + name: "Laȝamon", + email: "laȝamon@chronicles-of-england.org", + date: { + seconds: 1391790910, + offset: 7 * 60 + } + }, + message: "He wonede at Ernleȝe at æðelen are chirechen,\nUppen Sevarne staþe, sel þar him þuhte,\nOnfest Radestone, þer he bock radde.\n" + }; + var bin = codec.frame({type:"tag", body:tag}); + var obj = codec.deframe(bin, true); + if (tag.tagger.name !== obj.body.tagger.name || + tag.tagger.email !== obj.body.tagger.email || + tag.message !== obj.body.message) { + console.log([obj.body.tagger, obj.body.message]); + throw new Error("Problem decoding utf8 parts in tag"); + } + }, + function testBinaryBlob() { + var blob = bodec.create(256); + for (var i = 0; i < 256; i++) { blob[i] = i; } + var bin = codec.frame({type:"blob",body:blob}); + var obj = codec.deframe(bin, true); + if (bodec.toRaw(blob) !== bodec.toRaw(obj.body)) { + throw new Error("Problem decoding binary blob"); + } + } +]); diff --git a/test/test-pack-codec.js b/test/test-pack-codec.js new file mode 100644 index 0000000..4f6c1d2 --- /dev/null +++ b/test/test-pack-codec.js @@ -0,0 +1,100 @@ +var bodec = require('bodec'); +var run = require('./run.js'); +var decoders = require('../lib/object-codec.js').decoders; +var encoders = require('../lib/object-codec.js').encoders; + +// The thing we mean to test. +var codec = require('../lib/pack-codec.js'); + +var pack = require('./sample-pack.js'); +var items = []; +var newPack; + +function unpackStream(stream) { + var meta, out = [], finished = false; + var write = codec.decodePack(onItem); + for (var i = 0, l = stream.length; i < l; i += 128) { + var slice = bodec.slice(stream, i, i + 128); + try { + // console.log("SLICE", slice); + write(slice); + } + catch (err) { + throw err; + } + } + write(); + + function onItem(item) { + // console.log("UNPACK", item); + if (item === undefined) { + finished = true; + } + else if (!meta) { + meta = item; + } + else { + out.push(item); + } + } + if (!finished) throw new Error("unpack stream didn't finish"); + if (out.length !== meta.num) throw new Error("Item num mismatch"); + return out; +} + + +run([ + function testDecodePack() { + var counts = {}; + items = unpackStream(pack).map(function (item) { + counts[item.type] = counts[item.type] || 0; + counts[item.type]++; + if (item.type === "tree" || item.type === "tag" || item.type === "commit") { + item.body = decoders[item.type](item.body); + } + return item; + }); + if (counts.commit !== 6) throw new Error("Wrong number of commits parsed"); + if (counts.tree !== 4) throw new Error("Wrong number of trees parsed"); + if (counts.blob !== 4) throw new Error("Wrong number of blobs parsed"); + if (counts['ofs-delta'] !== 2) throw new Error("Wrong number of offset deltas parsed"); + }, + function testEncodePack() { + var done = false; + var outs = []; + + var write = codec.encodePack(function (item) { + if (item === undefined) { + done = true; + return; + } + if (!bodec.isBinary(item)) throw new Error("encode output must be buffers"); + outs.push(item); + }); + write({num:items.length}); + items.forEach(function (item) { + if (!bodec.isBinary(item.body)) { + item.body = encoders[item.type](item.body); + } + write(item); + }); + write(); + + if (!done) throw new Error("Output stream never ended"); + + newPack = bodec.join(outs); + }, + function verifyEncodePack() { + try { + unpackStream(newPack); + if (bodec.toHex(pack) !== bodec.toHex(newPack)) { + throw new Error("Final pack doesn't match original."); + } + } + catch (err) { + console.log(bodec.toHex(pack)); + console.log(bodec.toHex(newPack)); + throw err; + } + } +]); diff --git a/test/test-pack-ops.js b/test/test-pack-ops.js new file mode 100644 index 0000000..001d958 --- /dev/null +++ b/test/test-pack-ops.js @@ -0,0 +1,55 @@ +var run = require('./run.js'); + +var repo = {}; +require('../mixins/mem-db.js')(repo); + +var pack = require('./sample-pack.js'); +var hashes; + +run([ + function setup() { + require('../mixins/pack-ops.js')(repo); + }, + function testUnpack(end) { + repo.unpack(singleStream(pack), { + onProgress: onProgress + }, function (err, result) { + if (err) return end(err); + hashes = result; + if (hashes.length !== 16) { + return end(new Error("Wrong number of objects unpacked")); + } + end(); + }); + function onProgress(progress) { + // console.log(progress); + } + }, + function testPack(end) { + var stream; + var parts = []; + repo.pack(hashes, {}, function (err, result) { + if (err) return end(err); + stream = result; + stream.take(onRead); + }); + function onRead(err, chunk) { + if (err) return end(err); + // console.log(chunk); + if (chunk) { + parts.push(chunk); + return stream.take(onRead); + } + end(); + } + } +]); + +function singleStream(item) { + var done = false; + return { take: function (callback) { + if (done) return callback(); + done = true; + callback(null, item); + }}; +} \ No newline at end of file diff --git a/test/test-sha1.js b/test/test-sha1.js deleted file mode 100644 index 7c30561..0000000 --- a/test/test-sha1.js +++ /dev/null @@ -1,51 +0,0 @@ -var sha1 = require('../lib/sha1.js'); - -var tests = [ - "", "da39a3ee5e6b4b0d3255bfef95601890afd80709", - "a", "86f7e437faa5a7fce15d1ddcb9eaeaea377667b8", - "abc", "a9993e364706816aba3e25717850c26c9cd0d89d", - "message digest", "c12252ceda8be8994d5fa0290a47231c1d16aae3", - "abcdefghijklmnopqrstuvwxyz", "32d10c7b8cf96570ca04ce37f2a19d84240d3a89", - "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", - "84983e441c3bd26ebaae4aa1f95129e5e54670f1", - "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabc", - "a6319f25020d5ff8722d40ae750dbab67d94fe4f", - "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZab", - "edb3a03256d1c6d148034ec4795181931c933f46", - "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZa", - "677734f7bf40b2b244cae100bf365598fbf4741d", -]; - -for (var i = 0; i < tests.length; i += 2) { - var input = tests[i]; - console.log("\n" + JSON.stringify(input)); - var expectedHex = tests[i + 1]; - console.log(expectedHex); - var hash = sha1(input); - console.log(hash); - if (hash !== expectedHex) { - throw new Error(hash + " != " + expectedHex + " for '" + input + "'"); - } - var sha1sum = sha1(); - for (var j = 0, l = input.length; j < l; j += 17) { - sha1sum.update(input.substr(j, 17)); - } - hash = sha1sum.digest(); - console.log(hash); - if (hash !== expectedHex) { - throw new Error(hash + " != " + expectedHex + " for '" + input + "'"); - } -} - -console.log("\n1,000,000 repetitions of the character 'a'"); -var expectedHex = "34aa973cd4c4daa4f61eeb2bdbad27316534016f"; -console.log(expectedHex); -var sha1sum = sha1(); -for (var i = 0; i < 100000; i++) { - sha1sum.update("aaaaaaaaaa"); -} -var hash = sha1sum.digest(); -console.log(hash); -if (hash !== expectedHex) { - throw new Error(hash + " != " + expectedHex + " for '" + input + "'"); -} diff --git a/test/test-zlib.js b/test/test-zlib.js new file mode 100644 index 0000000..78f8090 --- /dev/null +++ b/test/test-zlib.js @@ -0,0 +1,44 @@ +var run = require('./run.js'); +var bodec = require('bodec'); + +// The thing we mean to test. +var inflate = require('../lib/inflate.js'); +var deflate = require('../lib/deflate.js'); +var inflateStream = require('../lib/inflate-stream.js'); + +var bin = bodec.create(1024); +for (var i = 0; i < 1024; i++) { + bin[i] = i >> 2 | i % 4 & 0x7f; +} + +run([ + function testRoundTrip() { + var deflated = deflate(bin); + if (!bodec.isBinary(deflated)) { + throw new Error("deflate output should be native binary"); + } + var inflated = inflate(deflated); + if (!bodec.isBinary(inflated)) { + throw new Error("inflate output should be native binary"); + } + if (bodec.toRaw(bin) !== bodec.toRaw(inflated)) { + console.log([bin, inflated]); + throw new Error("Problem with roundtrip"); + } + }, + function testStream() { + var done = false; + var chunks = []; + var deflated = deflate(bin); + var inf = inflateStream(); + + for (var i = 0, l = deflated.length; i < l; ++i) { + inf.write(deflated[i]); + } + var inflated = inf.flush(); + if (bodec.toRaw(bin) !== bodec.toRaw(inflated)) { + console.log([bin.length, inflated.length]); + throw new Error("Problem with roundtrip"); + } + } +]);