Summary: Bumps [pako](https://github.com/nodeca/pako) from 1.0.11 to 2.0.2. <details> <summary>Changelog</summary> <p><em>Sourced from <a href="https://github.com/nodeca/pako/blob/master/CHANGELOG.md">pako's changelog</a>.</em></p> <blockquote> <h2>[2.0.2] - 2020-11-19</h2> <h3>Fixed</h3> <ul> <li>Fix esm build named exports.</li> </ul> <h2>[2.0.1] - 2020-11-17</h2> <h3>Changed</h3> <ul> <li>Changed esm build <code>.js</code> => <code>.mjs</code> to fix node.js <code>import</code>.</li> <li>Added <code>module</code> entry in package.json for some bundlers.</li> </ul> <h2>[2.0.0] - 2020-11-17</h2> <h3>Changed</h3> <ul> <li>Removed binary strings and <code>Array</code> support.</li> <li>Removed fallbacks for TypedArray methods (<code>.set()</code>, <code>.subarray()</code>).</li> <li>Rewritten top-level wrappers.</li> <li>Removed support of <code>Inflate</code> & <code>Deflate</code> instance create without <code>new</code>.</li> <li><code>Inflate.push()</code> no longer needs second param (end is auto-detected).</li> <li>Increased default inflate chunk size to 64K.</li> <li>Moved exported constants to <code>.constants</code>.</li> <li>Switched to es6. Legacy es5 builds available in <code>/dist</code>.</li> <li>Added esm build.</li> <li>Structure of <code>/dist</code> folder changed.</li> <li>Upgraded build tools to modern ones.</li> </ul> </blockquote> </details> <details> <summary>Commits</summary> <ul> <li><a href="45cce9f4d6"><code>45cce9f</code></a> 2.0.2 released</li> <li><a href="b3861d9a66"><code>b3861d9</code></a> dist rebuild</li> <li><a href="d0382badcc"><code>d0382ba</code></a> Fix esm build named exports</li> <li><a href="8d5f9c70f8"><code>8d5f9c7</code></a> 2.0.1 released</li> <li><a href="70ee7697ac"><code>70ee769</code></a> dist rebuild</li> <li><a href="bd90fca738"><code>bd90fca</code></a> Add <code>module</code> entry for some bundlers</li> <li><a href="84d6931fe8"><code>84d6931</code></a> Rename module build .js => .mjs to fix node import (<a href="https://github-redirect.dependabot.com/nodeca/pako/issues/200">https://github.com/facebook/flipper/issues/200</a>)</li> <li><a href="52df0c510f"><code>52df0c5</code></a> 2.0.0 released</li> <li><a href="a8faeffc94"><code>a8faeff</code></a> dist rebuild</li> <li><a href="b4d9a94488"><code>b4d9a94</code></a> Added esm build, <a href="https://github-redirect.dependabot.com/nodeca/pako/issues/97">https://github.com/facebook/flipper/issues/97</a></li> <li>Additional commits viewable in <a href="https://github.com/nodeca/pako/compare/1.0.11...2.0.2">compare view</a></li> </ul> </details> <br /> [](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) --- <details> <summary>Dependabot commands and options</summary> <br /> You can trigger Dependabot actions by commenting on this PR: - `dependabot rebase` will rebase this PR - `dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `dependabot merge` will merge this PR after your CI passes on it - `dependabot squash and merge` will squash and merge this PR after your CI passes on it - `dependabot cancel merge` will cancel a previously requested merge and block automerging - `dependabot reopen` will reopen this PR if it is closed - `dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) </details> Pull Request resolved: https://github.com/facebook/flipper/pull/1786 Reviewed By: passy Differential Revision: D25664507 Pulled By: cekkaewnumchai fbshipit-source-id: bd33a7a11ef38b54675cde31d1243742476263d9
104 lines
3.2 KiB
TypeScript
104 lines
3.2 KiB
TypeScript
/**
|
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
*
|
|
* This source code is licensed under the MIT license found in the
|
|
* LICENSE file in the root directory of this source tree.
|
|
*
|
|
* @format
|
|
*/
|
|
|
|
import pako from 'pako';
|
|
import {Request, Response, Header} from './types';
|
|
import {Base64} from 'js-base64';
|
|
|
|
export function getHeaderValue(headers: Array<Header>, key: string): string {
|
|
for (const header of headers) {
|
|
if (header.key.toLowerCase() === key.toLowerCase()) {
|
|
return header.value;
|
|
}
|
|
}
|
|
return '';
|
|
}
|
|
|
|
export function decodeBody(container: Request | Response): string {
|
|
if (!container.data) {
|
|
return '';
|
|
}
|
|
|
|
try {
|
|
const isGzip =
|
|
getHeaderValue(container.headers, 'Content-Encoding') === 'gzip';
|
|
if (isGzip) {
|
|
try {
|
|
const binStr = Base64.atob(container.data);
|
|
const dataArr = new Uint8Array(binStr.length);
|
|
for (let i = 0; i < binStr.length; i++) {
|
|
dataArr[i] = binStr.charCodeAt(i);
|
|
}
|
|
// The request is gzipped, so convert the base64 back to the raw bytes first,
|
|
// then inflate. pako will detect the BOM headers and return a proper utf-8 string right away
|
|
return pako.inflate(dataArr, {to: 'string'});
|
|
} catch (e) {
|
|
// on iOS, the stream send to flipper is already inflated, so the content-encoding will not
|
|
// match the actual data anymore, and we should skip inflating.
|
|
// In that case, we intentionally fall-through
|
|
if (!('' + e).includes('incorrect header check')) {
|
|
throw e;
|
|
}
|
|
}
|
|
}
|
|
// If this is not a gzipped request, assume we are interested in a proper utf-8 string.
|
|
// - If the raw binary data in is needed, in base64 form, use container.data directly
|
|
// - either directly use container.data (for example)
|
|
return Base64.decode(container.data);
|
|
} catch (e) {
|
|
console.warn(
|
|
`Flipper failed to decode request/response body (size: ${container.data.length}): ${e}`,
|
|
);
|
|
return '';
|
|
}
|
|
}
|
|
|
|
export function convertRequestToCurlCommand(request: Request): string {
|
|
let command: string = `curl -v -X ${request.method}`;
|
|
command += ` ${escapedString(request.url)}`;
|
|
// Add headers
|
|
request.headers.forEach((header: Header) => {
|
|
const headerStr = `${header.key}: ${header.value}`;
|
|
command += ` -H ${escapedString(headerStr)}`;
|
|
});
|
|
// Add body. TODO: we only want this for non-binary data! See D23403095
|
|
const body = decodeBody(request);
|
|
if (body) {
|
|
command += ` -d ${escapedString(body)}`;
|
|
}
|
|
return command;
|
|
}
|
|
|
|
function escapeCharacter(x: string) {
|
|
const code = x.charCodeAt(0);
|
|
return code < 16 ? '\\u0' + code.toString(16) : '\\u' + code.toString(16);
|
|
}
|
|
|
|
const needsEscapingRegex = /[\u0000-\u001f\u007f-\u009f!]/g;
|
|
|
|
// Escape util function, inspired by Google DevTools. Works only for POSIX
|
|
// based systems.
|
|
function escapedString(str: string) {
|
|
if (needsEscapingRegex.test(str) || str.includes("'")) {
|
|
return (
|
|
"$'" +
|
|
str
|
|
.replace(/\\/g, '\\\\')
|
|
.replace(/\'/g, "\\'")
|
|
.replace(/\n/g, '\\n')
|
|
.replace(/\r/g, '\\r')
|
|
.replace(needsEscapingRegex, escapeCharacter) +
|
|
"'"
|
|
);
|
|
}
|
|
|
|
// Simply use singly quoted string.
|
|
return "'" + str + "'";
|
|
}
|