diff --git a/CHANGELOG.md b/CHANGELOG.md
index f321e0e..7971c3c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,12 @@ Changelog
# 2.x release
+## v2.2.1
+
+- Fix: `compress` flag shouldn't overwrite existing `Accept-Encoding` header.
+- Fix: multiple `import` rules, where `PassThrough` etc. doesn't have a named export when using node <10 and `--exerimental-modules` flag.
+- Other: Better README.
+
## v2.2.0
- Enhance: Support all `ArrayBuffer` view types
diff --git a/LIMITS.md b/LIMITS.md
index bdcf66a..9c4b8c0 100644
--- a/LIMITS.md
+++ b/LIMITS.md
@@ -26,5 +26,7 @@ Known differences
- If you are using `res.clone()` and writing an isomorphic app, note that stream on Node.js have a smaller internal buffer size (16Kb, aka `highWaterMark`) from client-side browsers (>1Mb, not consistent across browsers).
+- Because node.js stream doesn't expose a [*disturbed*](https://fetch.spec.whatwg.org/#concept-readablestream-disturbed) property like Stream spec, using a consumed stream for `new Response(body)` will not set `bodyUsed` flag correctly.
+
[readable-stream]: https://nodejs.org/api/stream.html#stream_readable_streams
[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
diff --git a/README.md b/README.md
index af7c8dd..3b20230 100644
--- a/README.md
+++ b/README.md
@@ -183,8 +183,6 @@ fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
});
```
-[TODO]: # (Somewhere i think we also should mention arrayBuffer also if you want to be cross-fetch compatible.)
-
#### Buffer
If you prefer to cache binary data in full, use buffer(). (NOTE: buffer() is a `node-fetch` only API)
@@ -265,8 +263,6 @@ Perform an HTTP(S) fetch.
`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected promise.
-[TODO]: # (It might be a good idea to reformat the options section into a table layout, like the headers section, instead of current code block.)
-
### Options
@@ -285,7 +281,7 @@ The default values are shown after each option key.
timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies)
compress: true, // support gzip/deflate content encoding. false to disable
size: 0, // maximum response body size in bytes. 0 to disable
- agent: null // http(s).Agent instance, allows custom proxy, certificate etc.
+ agent: null // http(s).Agent instance, allows custom proxy, certificate, dns lookup etc.
}
```
@@ -293,15 +289,14 @@ The default values are shown after each option key.
If no values are set, the following request headers will be sent automatically:
-[TODO]: # ("we always said content-length will be "automatically calculated, if possible" in the default header section, but we never explain what's the condition for it to be calculated, and that chunked transfer-encoding will be used when they are not calculated or supplied." - "Maybe also add Transfer-Encoding: chunked? That header is added by Node.js automatically if the input is a stream, I believe.")
-
-Header | Value
------------------ | --------------------------------------------------------
-`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
-`Accept` | `*/*`
-`Connection` | `close` _(when no `options.agent` is present)_
-`Content-Length` | _(automatically calculated, if possible)_
-`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
+Header | Value
+------------------- | --------------------------------------------------------
+`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
+`Accept` | `*/*`
+`Connection` | `close` _(when no `options.agent` is present)_
+`Content-Length` | _(automatically calculated, if possible)_
+`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_
+`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
### Class: Request
@@ -451,8 +446,6 @@ Consume the body and return a promise that will resolve to one of these formats.
Consume the body and return a promise that will resolve to a Buffer.
-[TODO]: # (textConverted API should mention an optional dependency on encoding, which users need to install by themselves, and this is done purely for backward compatibility with 1.x release.)
-
#### body.textConverted()
*(node-fetch extension)*
@@ -461,6 +454,8 @@ Consume the body and return a promise that will resolve to a Buffer.
Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible.
+(This API requires an optional dependency on npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.)
+
### Class: FetchError
diff --git a/package.json b/package.json
index db0d97b..577d467 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "node-fetch",
- "version": "2.2.0",
+ "version": "2.2.1",
"description": "A light-weight module that brings window.fetch to node.js",
"main": "lib/index",
"browser": "./browser.js",
diff --git a/src/body.js b/src/body.js
index c6dd4b6..d39149f 100644
--- a/src/body.js
+++ b/src/body.js
@@ -5,7 +5,8 @@
* Body interface provides common methods for Request and Response
*/
-import Stream, { PassThrough } from 'stream';
+import Stream from 'stream';
+
import Blob, { BUFFER } from './blob.js';
import FetchError from './fetch-error.js';
@@ -14,6 +15,9 @@ try { convert = require('encoding').convert; } catch(e) {}
const INTERNALS = Symbol('Body internals');
+// fix an issue where "PassThrough" isn't a named export for node <10
+const PassThrough = Stream.PassThrough;
+
/**
* Body mixin
*
diff --git a/src/index.js b/src/index.js
index fa485e4..dca44aa 100644
--- a/src/index.js
+++ b/src/index.js
@@ -7,11 +7,11 @@
* All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
*/
-import { resolve as resolve_url } from 'url';
+import Url from 'url';
import http from 'http';
import https from 'https';
import zlib from 'zlib';
-import { PassThrough } from 'stream';
+import Stream from 'stream';
import Body, { writeToStream, getTotalBytes } from './body';
import Response from './response';
@@ -19,6 +19,10 @@ import Headers, { createHeadersLenient } from './headers';
import Request, { getNodeRequestOptions } from './request';
import FetchError from './fetch-error';
+// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
+const PassThrough = Stream.PassThrough;
+const resolve_url = Url.resolve;
+
/**
* Fetch function
*
diff --git a/src/request.js b/src/request.js
index 748ba55..c4d2959 100644
--- a/src/request.js
+++ b/src/request.js
@@ -7,12 +7,17 @@
* All spec algorithm step numbers are based on https://fetch.spec.whatwg.org/commit-snapshots/ae716822cb3a61843226cd090eefc6589446c1d2/.
*/
-import { format as format_url, parse as parse_url } from 'url';
+import Url from 'url';
+
import Headers, { exportNodeCompatibleHeaders } from './headers.js';
import Body, { clone, extractContentType, getTotalBytes } from './body';
const INTERNALS = Symbol('Request internals');
+// fix an issue where "format", "parse" aren't a named export for node <10
+const parse_url = Url.parse;
+const format_url = Url.format;
+
/**
* Check if a value is an instance of Request.
*
@@ -187,9 +192,10 @@ export function getNodeRequestOptions(request) {
}
// HTTP-network-or-cache fetch step 2.15
- if (request.compress) {
+ if (request.compress && !headers.has('Accept-Encoding')) {
headers.set('Accept-Encoding', 'gzip,deflate');
}
+
if (!headers.has('Connection') && !request.agent) {
headers.set('Connection', 'close');
}
diff --git a/src/response.js b/src/response.js
index 506d876..ce946f6 100644
--- a/src/response.js
+++ b/src/response.js
@@ -5,12 +5,16 @@
* Response class provides content decoding
*/
-import { STATUS_CODES } from 'http';
+import http from 'http';
+
import Headers from './headers.js';
import Body, { clone } from './body';
const INTERNALS = Symbol('Response internals');
+// fix an issue where "STATUS_CODES" aren't a named export for node <10
+const STATUS_CODES = http.STATUS_CODES;
+
/**
* Response class
*
diff --git a/test/test.js b/test/test.js
index e8342cb..ef57940 100644
--- a/test/test.js
+++ b/test/test.js
@@ -731,6 +731,19 @@ describe('node-fetch', () => {
});
});
+ it('should not overwrite existing accept-encoding header when auto decompression is true', function() {
+ const url = `${base}inspect`;
+ const opts = {
+ compress: true,
+ headers: {
+ 'Accept-Encoding': 'gzip'
+ }
+ };
+ return fetch(url, opts).then(res => res.json()).then(res => {
+ expect(res.headers['accept-encoding']).to.equal('gzip');
+ });
+ });
+
it('should allow custom timeout', function() {
this.timeout(500);
const url = `${base}timeout`;
@@ -782,7 +795,7 @@ describe('node-fetch', () => {
it('should set default User-Agent', function () {
const url = `${base}inspect`;
- fetch(url).then(res => res.json()).then(res => {
+ return fetch(url).then(res => res.json()).then(res => {
expect(res.headers['user-agent']).to.startWith('node-fetch/');
});
});
@@ -794,7 +807,7 @@ describe('node-fetch', () => {
'user-agent': 'faked'
}
};
- fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, opts).then(res => res.json()).then(res => {
expect(res.headers['user-agent']).to.equal('faked');
});
});
@@ -813,7 +826,7 @@ describe('node-fetch', () => {
'accept': 'application/json'
}
};
- fetch(url, opts).then(res => res.json()).then(res => {
+ return fetch(url, opts).then(res => res.json()).then(res => {
expect(res.headers.accept).to.equal('application/json');
});
});