Skip to content

Commit

Permalink
Merge pull request #7 from cubehero/streaming
Browse files Browse the repository at this point in the history
Added data streaming for blob files
  • Loading branch information
notatestuser committed Mar 8, 2014
2 parents 6531a0a + afd9176 commit d15426c
Show file tree
Hide file tree
Showing 5 changed files with 95 additions and 3 deletions.
15 changes: 13 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,13 @@
lib
node_modules
# ignore emacs and other editor files
*~
\#*\#
.\#*
*.swp
*.swo

src/*.js
test/*.js

lib/
node_modules/

19 changes: 19 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -302,6 +302,25 @@ Each file has the following properties:

* `callback` - `(err, data)`

Warning: this method only returns the complete file up to 200k, which is the default
buffer size for running child_process.exec(). If the file you're reading is bigger than
that, or if you're not sure, you need to use dataStream()

### `Blob#dataStream()`

* returns - [dataStream, errorStream]

Returns streams for you to use to get the data.

Usage:

data = ""
[dataStream, _] = blob.dataStream()
dataStream.on 'data', (buf) ->
data += buf.toString()
.on 'end', ->
callback(data)

## Submodule
### `Submodule#id`
`String`
Expand Down
16 changes: 16 additions & 0 deletions src/blob.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,26 @@ module.exports = class Blob
#
# callback - Receives `(err, data)`.
#
# Warning, this only returns files less than 200k, the standard buffer size for
# node's exec(). If you need to get bigger files, you should use dataStream() to
# get a stream for the file's data
#
data: (callback) ->
@repo.git "cat-file", {p: true}, @id
, (err, stdout, stderr) ->
return callback err, stdout

# Public: Get the blob contents as a stream
#
# returns - [dataStream, errstream]
#
# Usage:
# [blobstream, _] = blob.dataStream()
# blobstream.pipe(res)
#
dataStream: () ->
streams = @repo.git.streamCmd "cat-file", {p: true}, [@id]
return streams

toString: ->
"#<Blob '#{@id}'>"
15 changes: 14 additions & 1 deletion src/git.coffee
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
fs = require 'fs'
{exec} = require 'child_process'
{exec, spawn} = require 'child_process'

module.exports = Git = (git_dir, dot_git) ->
dot_git ||= "#{git_dir}/.git"
Expand All @@ -21,6 +21,19 @@ module.exports = Git = (git_dir, dot_git) ->
git.cmd = (command, options, args, callback) ->
git command, options, args, callback

# Public: stream results of git command
#
# This is used for large files that you'd need to stream.
#
# returns [outstream, errstream]
#
git.streamCmd = (command, options, args) ->
options ?= {}
options = options_to_argv options
args ?= []
allargs = [command].concat(options).concat(args)
process = spawn Git.bin, allargs, {cwd: git_dir, encoding: 'binary'}
return [process.stdout, process.stderr]

# Public: Get a list of the remote names.
#
Expand Down
33 changes: 33 additions & 0 deletions test/blob.test.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -43,3 +43,36 @@ describe "Blob", ->
data.should.be.type "string"
data.should.include "!!!"

describe "#dataStream", ->
describe "of a file off the root", ->
repo = git "#{__dirname}/fixtures/branched"
data = ""
before (done) ->
repo.tree().blobs (err, blobs) ->
[dataStream, _] = blobs[0].dataStream()
dataStream.on 'data', (buf) ->
data += buf.toString()
.on 'end', ->
done()

it "is a string", ->
data.should.be.type "string"
data.should.include "Bla"

describe "of a file in a subdir", ->
repo = git "#{__dirname}/fixtures/branched"
data = ""
before (done) ->
repo.tree().trees (err, trees) ->
trees[0].blobs (err, blobs) ->
[dataStream, _] = blobs[0].dataStream()
dataStream.on 'data', (buf) ->
data += buf.toString()
.on 'end', ->
done()

it "is a string", ->
data.should.be.type "string"
data.should.include "!!!"


0 comments on commit d15426c

Please sign in to comment.