Extract header.
This commit is contained in:
parent
b77ee78afd
commit
8de753eec9
11 changed files with 108 additions and 35 deletions
|
@ -1,4 +1,4 @@
|
|||
// Generated by ToffeeScript 1.6.2
|
||||
// Generated by ToffeeScript 1.6.2-5
|
||||
(function() {
|
||||
var Curl, curls, e, id, m, p,
|
||||
__hasProp = {}.hasOwnProperty;
|
||||
|
@ -102,6 +102,11 @@
|
|||
return callback.call(_this, chunk);
|
||||
};
|
||||
break;
|
||||
case 'header':
|
||||
this.on_header = function(chunk) {
|
||||
return callback.call(_this, chunk);
|
||||
};
|
||||
break;
|
||||
case 'error':
|
||||
this.on_error = function(e) {
|
||||
delete curls[_this.id];
|
||||
|
|
|
@ -78,6 +78,9 @@ Curl::on = (event, callback) ->
|
|||
# (Buffer chunk) ->
|
||||
@on_write = (chunk) =>
|
||||
callback.call @, chunk
|
||||
when 'header'
|
||||
@on_header = (chunk) =>
|
||||
callback.call @, chunk
|
||||
when 'error'
|
||||
# (Error error) ->
|
||||
@on_error = (e) =>
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Generated by ToffeeScript 1.6.2
|
||||
// Generated by ToffeeScript 1.6.2-5
|
||||
(function() {
|
||||
var Curl, CurlBuilder, e,
|
||||
__hasProp = {}.hasOwnProperty,
|
||||
|
@ -11,6 +11,18 @@
|
|||
Curl = require(__dirname + '/Curl');
|
||||
}
|
||||
|
||||
function merge_chunks(chunks, length) {
|
||||
var chunk, data, position, _i, _len;
|
||||
data = new Buffer(length);
|
||||
position = 0;
|
||||
for (_i = 0, _len = chunks.length; _i < _len; _i++) {
|
||||
chunk = chunks[_i];
|
||||
chunk.copy(data, position);
|
||||
position += chunk.length;
|
||||
}
|
||||
return data;
|
||||
};
|
||||
|
||||
CurlBuilder = (function() {
|
||||
function CurlBuilder() {}
|
||||
|
||||
|
@ -31,12 +43,11 @@
|
|||
};
|
||||
|
||||
CurlBuilder.create = function(defaultOptions) {
|
||||
var curl;
|
||||
curl = function() {
|
||||
function curl() {
|
||||
return curl.perform.apply(curl, arguments);
|
||||
};
|
||||
curl.perform = function() {
|
||||
var args, c, cb, k, length, v, _ref, _ref1, _ref2, _ref3, _ref4;
|
||||
var args, c, cb, header_length, k, length, v, _ref, _ref1, _ref2, _ref3, _ref4;
|
||||
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
|
||||
if (this.running) {
|
||||
throw new Error('the cURL session is busy, use curl.create to create another cURL Session');
|
||||
|
@ -51,6 +62,7 @@
|
|||
this.options = {};
|
||||
}
|
||||
length = 0;
|
||||
header_length = 0;
|
||||
this.debug = (_ref1 = (_ref2 = this.defaultOptions.DEBUG) != null ? _ref2 : this.options.DEBUG) != null ? _ref1 : this.debug;
|
||||
this.effectiveOptions = {};
|
||||
_ref3 = this.defaultOptions;
|
||||
|
@ -69,30 +81,34 @@
|
|||
});
|
||||
c = this.curl_;
|
||||
c.chunks = [];
|
||||
c.header_chunks = [];
|
||||
c.on('data', function(chunk) {
|
||||
curl.log("receive " + chunk.length + " bytes");
|
||||
c.chunks.push(chunk);
|
||||
length += chunk.length;
|
||||
return chunk.length;
|
||||
});
|
||||
c.on('header', function(chunk) {
|
||||
curl.log("receive " + chunk.length + " header");
|
||||
c.header_chunks.push(chunk);
|
||||
header_length += chunk.length;
|
||||
return chunk.length;
|
||||
});
|
||||
c.on('end', function() {
|
||||
var chunk, data, position, _i, _len, _ref5,
|
||||
var data, header,
|
||||
_this = this;
|
||||
curl.log("receive succeeded.");
|
||||
curl.running = false;
|
||||
data = new Buffer(length);
|
||||
position = 0;
|
||||
_ref5 = c.chunks;
|
||||
for (_i = 0, _len = _ref5.length; _i < _len; _i++) {
|
||||
chunk = _ref5[_i];
|
||||
chunk.copy(data, position);
|
||||
position += chunk.length;
|
||||
}
|
||||
data = merge_chunks(c.chunks, length);
|
||||
header = merge_chunks(c.header_chunks, header_length);
|
||||
c.chunks = [];
|
||||
c.header_chunks = [];
|
||||
if (c.options.RAW) {
|
||||
curl.body = data;
|
||||
curl.header = header;
|
||||
} else {
|
||||
curl.body = data.toString();
|
||||
curl.header = header.toString();
|
||||
}
|
||||
curl.status = curl.code = c.getinfo('RESPONSE_CODE');
|
||||
process.nextTick(function() {
|
||||
|
|
|
@ -3,6 +3,14 @@ try
|
|||
catch e
|
||||
Curl = require __dirname + '/Curl'
|
||||
|
||||
merge_chunks = (chunks, length) ->
|
||||
data = new Buffer(length)
|
||||
position = 0
|
||||
for chunk in chunks
|
||||
chunk.copy data, position
|
||||
position += chunk.length
|
||||
data
|
||||
|
||||
class CurlBuilder
|
||||
@curls: {}
|
||||
@id: 0
|
||||
|
@ -31,6 +39,7 @@ class CurlBuilder
|
|||
@options ?= {}
|
||||
|
||||
length = 0
|
||||
header_length = 0
|
||||
|
||||
@debug = @defaultOptions.DEBUG ? @options.DEBUG ? @debug
|
||||
@effectiveOptions = {}
|
||||
|
@ -45,26 +54,33 @@ class CurlBuilder
|
|||
|
||||
c = @curl_
|
||||
c.chunks = []
|
||||
c.header_chunks = []
|
||||
c.on 'data', (chunk) ->
|
||||
curl.log "receive #{chunk.length} bytes"
|
||||
c.chunks.push chunk
|
||||
length += chunk.length
|
||||
chunk.length
|
||||
|
||||
c.on 'header', (chunk) ->
|
||||
curl.log "receive #{chunk.length} header"
|
||||
c.header_chunks.push chunk
|
||||
header_length += chunk.length
|
||||
chunk.length
|
||||
|
||||
c.on 'end', ->
|
||||
curl.log "receive succeeded."
|
||||
curl.running = false
|
||||
data = new Buffer(length)
|
||||
position = 0
|
||||
for chunk in c.chunks
|
||||
chunk.copy data, position
|
||||
position += chunk.length
|
||||
data = merge_chunks(c.chunks, length)
|
||||
header = merge_chunks(c.header_chunks, header_length)
|
||||
c.chunks = []
|
||||
c.header_chunks = []
|
||||
|
||||
if c.options.RAW
|
||||
curl.body = data
|
||||
curl.header = header
|
||||
else
|
||||
curl.body = data.toString()
|
||||
curl.header = header.toString()
|
||||
curl.status = curl.code = c.getinfo('RESPONSE_CODE')
|
||||
|
||||
# if curl returns to fast, avoid cb recursive call
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue