Fix Flipper crashing when decoding partial encoded requests

Summary:
Changelog: Fixed an issue where Flipper would crash when decoding large partial requests.

The current processing of partial requests assumes that the proviced base64 string is always an utf-8 string, which is incorrect as it might contain binary data as well. This causes `atob` build in to throw errors when trying to decode binary base64 strings with the following exception:

{F538782963}

However, what is worse, if those strings were larger than ~2 mb, it would completely crash Electron rather than on the JS level, with reports like:

```

Crashed Thread:        0  CrRendererMain  Dispatch queue: com.apple.main-thread

Exception Type:        EXC_BAD_ACCESS (SIGSEGV)
Exception Codes:       EXC_I386_GPFLT
Exception Note:        EXC_CORPSE_NOTIFY

Termination Signal:    Segmentation fault: 11
Termination Reason:    Namespace SIGNAL, Code 0xb
Terminating Process:   exc handler [85268]

Thread 0 Crashed:: CrRendererMain  Dispatch queue: com.apple.main-thread
0   com.github.Electron.framework 	0x000000011155b16f v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 22324575
1   com.github.Electron.framework 	0x000000011155e811 v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 22338561
2   com.github.Electron.framework 	0x00000001117e2e62 v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 24978002
3   com.github.Electron.framework 	0x000000010fa32660 v8::internal::ClassScope::ResolvePrivateNamesPartially() + 14944
4   com.github.Electron.framework 	0x000000010fa322b5 v8::internal::ClassScope::ResolvePrivateNamesPartially() + 14005
5   com.github.Electron.framework 	0x000000010fa31933 v8::internal::ClassScope::ResolvePrivateNamesPartially() + 11571
6   com.github.Electron.framework 	0x000000011007ef58 v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 451400
```

Reproduced this JS issue by lowering the `MAX_BODY_SIZE_IN_BYTES` in `NetworkFlipperPlugin.java` to 10KB, which causes all requests to be processed as partials.

Reproducing the the Electron crash is a lot harder, as it requires a surface that makes large, binary requests (more than a few mb), that is still intercepted by the Network layer. The best example I could find is sending large pictures or videos through a messenger for android chat. In that case it is still hard to produce due to caching though.

Fun fact, you can crash your own flipper and get the above crash by running this command:
`btoa(require("fs").readFileSync("/Users/mweststrate/Desktop/Screen Recording 2021-03-24 at 16.08.27 crop.mov", "binary"))`, where the provided file must be a few mb's large (this one is 10).

A result of fixing this issue, is that images that were send as partials can now be correctly previewed in the Network plugin again.

Reviewed By: jknoxville

Differential Revision: D27302961

fbshipit-source-id: 1ac86840f7268062bb59c789f3904537df3c51fa
This commit is contained in:
Michel Weststrate
2021-03-25 05:02:05 -07:00
committed by Facebook GitHub Bot
parent 2622c084df
commit f095a00c78
9 changed files with 264 additions and 106 deletions

View File

@@ -8,8 +8,16 @@
*/ */
import {combineBase64Chunks} from '../chunks'; import {combineBase64Chunks} from '../chunks';
import {TestUtils} from 'flipper-plugin'; import {TestUtils, createState} from 'flipper-plugin';
import * as NetworkPlugin from '../index'; import * as NetworkPlugin from '../index';
import {assembleChunksIfResponseIsComplete} from '../chunks';
import path from 'path';
import {PartialResponses, Response} from '../types';
import {Base64} from 'js-base64';
import * as fs from 'fs';
import {promisify} from 'util';
const readFile = promisify(fs.readFile);
test('Test assembling base64 chunks', () => { test('Test assembling base64 chunks', () => {
const message = 'wassup john?'; const message = 'wassup john?';
@@ -20,7 +28,7 @@ test('Test assembling base64 chunks', () => {
} }
const output = combineBase64Chunks(chunks); const output = combineBase64Chunks(chunks);
expect(output).toBe('wassup john?'); expect(Base64.decode(output)).toBe('wassup john?');
}); });
test('Reducer correctly adds initial chunk', () => { test('Reducer correctly adds initial chunk', () => {
@@ -121,3 +129,47 @@ test('Reducer correctly combines initial response and followup chunk', () => {
} }
`); `);
}); });
async function readJsonFixture(filename: string) {
return JSON.parse(
await readFile(path.join(__dirname, 'fixtures', filename), 'utf-8'),
);
}
test('handle small binary payloads correctly', async () => {
const input = await readJsonFixture('partial_failing_example.json');
const partials = createState<PartialResponses>({
test: input,
});
const responses = createState<Record<string, Response>>({});
expect(() => {
// this used to throw
assembleChunksIfResponseIsComplete(partials, responses, 'test');
}).not.toThrow();
});
test('handle non binary payloads correcty', async () => {
const input = await readJsonFixture('partial_utf8_before.json');
const partials = createState<PartialResponses>({
test: input,
});
const responses = createState<Record<string, Response>>({});
expect(() => {
assembleChunksIfResponseIsComplete(partials, responses, 'test');
}).not.toThrow();
const expected = await readJsonFixture('partial_utf8_after.json');
expect(responses.get()['test']).toEqual(expected);
});
test('handle binary payloads correcty', async () => {
const input = await readJsonFixture('partial_binary_before.json');
const partials = createState<PartialResponses>({
test: input,
});
const responses = createState<Record<string, Response>>({});
expect(() => {
assembleChunksIfResponseIsComplete(partials, responses, 'test');
}).not.toThrow();
const expected = await readJsonFixture('partial_binary_after.json');
expect(responses.get()['test']).toEqual(expected);
});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -7,22 +7,67 @@
* @format * @format
*/ */
import {TextDecoder} from 'util'; import type {PartialResponses, Response} from './types';
import {Atom} from 'flipper-plugin';
import {Base64} from 'js-base64';
export function assembleChunksIfResponseIsComplete(
partialResponses: Atom<PartialResponses>,
responses: Atom<Record<string, Response>>,
responseId: string,
) {
const partialResponseEntry = partialResponses.get()[responseId];
const numChunks = partialResponseEntry.initialResponse?.totalChunks;
if (
!partialResponseEntry.initialResponse ||
!numChunks ||
Object.keys(partialResponseEntry.followupChunks).length + 1 < numChunks
) {
// Partial response not yet complete, do nothing.
return;
}
// Partial response has all required chunks, convert it to a full Response.
const response: Response = partialResponseEntry.initialResponse;
const allChunks: string[] =
response.data != null
? [
response.data,
...Object.entries(partialResponseEntry.followupChunks)
// It's important to parseInt here or it sorts lexicographically
.sort((a, b) => parseInt(a[0], 10) - parseInt(b[0], 10))
.map(([_k, v]: [string, string]) => v),
]
: [];
const data = combineBase64Chunks(allChunks);
responses.update((draft) => {
draft[responseId] = {
...response,
// Currently data is always decoded at render time, so re-encode it to match the single response format.
data,
};
});
partialResponses.update((draft) => {
delete draft[responseId];
});
}
export function combineBase64Chunks(chunks: string[]): string { export function combineBase64Chunks(chunks: string[]): string {
const byteArray = chunks.map( const byteArray = chunks.map((b64Chunk) => {
(b64Chunk) => return Base64.toUint8Array(b64Chunk);
Uint8Array.from(atob(b64Chunk), (c) => c.charCodeAt(0)).buffer, });
);
const size = byteArray const size = byteArray
.map((b) => b.byteLength) .map((b) => b.byteLength)
.reduce((prev, curr) => prev + curr, 0); .reduce((prev, curr) => prev + curr, 0);
const buffer = new Uint8Array(size); const buffer = new Uint8Array(size);
let offset = 0; let offset = 0;
for (let i = 0; i < byteArray.length; i++) { for (let i = 0; i < byteArray.length; i++) {
buffer.set(new Uint8Array(byteArray[i]), offset); buffer.set(byteArray[i], offset);
offset += byteArray[i].byteLength; offset += byteArray[i].byteLength;
} }
const data = new TextDecoder('utf-8').decode(buffer);
return data; return Base64.fromUint8Array(buffer);
} }

View File

@@ -27,7 +27,6 @@ import {
TableHighlightedRows, TableHighlightedRows,
TableRows, TableRows,
TableBodyRow, TableBodyRow,
produce,
} from 'flipper'; } from 'flipper';
import { import {
Request, Request,
@@ -37,13 +36,14 @@ import {
ResponseFollowupChunk, ResponseFollowupChunk,
Header, Header,
MockRoute, MockRoute,
PartialResponses,
} from './types'; } from './types';
import {convertRequestToCurlCommand, getHeaderValue, decodeBody} from './utils'; import {convertRequestToCurlCommand, getHeaderValue, decodeBody} from './utils';
import RequestDetails from './RequestDetails'; import RequestDetails from './RequestDetails';
import {clipboard} from 'electron'; import {clipboard} from 'electron';
import {URL} from 'url'; import {URL} from 'url';
import {MockResponseDialog} from './MockResponseDialog'; import {MockResponseDialog} from './MockResponseDialog';
import {combineBase64Chunks} from './chunks'; import {assembleChunksIfResponseIsComplete} from './chunks';
import { import {
PluginClient, PluginClient,
Device, Device,
@@ -191,12 +191,10 @@ export function plugin(client: PluginClient<Events, Methods>) {
{persist: 'responses'}, {persist: 'responses'},
); );
const partialResponses = createState<{ const partialResponses = createState<PartialResponses>(
[id: string]: { {},
initialResponse?: Response; {persist: 'partialResponses'},
followupChunks: {[id: number]: string}; );
};
}>({}, {persist: 'partialResponses'});
client.onDeepLink((payload: unknown) => { client.onDeepLink((payload: unknown) => {
if (typeof payload === 'string') { if (typeof payload === 'string') {
@@ -247,89 +245,25 @@ export function plugin(client: PluginClient<Events, Methods>) {
const message: Response | ResponseFollowupChunk = data as const message: Response | ResponseFollowupChunk = data as
| Response | Response
| ResponseFollowupChunk; | ResponseFollowupChunk;
if (message.index !== undefined && message.index > 0) {
// It's a follow up chunk
const followupChunk: ResponseFollowupChunk = message as ResponseFollowupChunk;
const partialResponseEntry = partialResponses.get()[followupChunk.id] ?? {
followupChunks: {},
};
const newPartialResponseEntry = produce(partialResponseEntry, (draft) => {
draft.followupChunks[followupChunk.index] = followupChunk.data;
});
const newPartialResponse = {
...partialResponses.get(),
[followupChunk.id]: newPartialResponseEntry,
};
assembleChunksIfResponseIsComplete(newPartialResponse, followupChunk.id);
return;
}
// It's an initial chunk
const partialResponse: Response = message as Response;
const partialResponseEntry = partialResponses.get()[partialResponse.id] ?? {
followupChunks: {},
};
const newPartialResponseEntry = {
...partialResponseEntry,
initialResponse: partialResponse,
};
const newPartialResponse = {
...partialResponses.get(),
[partialResponse.id]: newPartialResponseEntry,
};
assembleChunksIfResponseIsComplete(newPartialResponse, partialResponse.id);
});
function assembleChunksIfResponseIsComplete(
partialResp: {
[id: string]: {
initialResponse?: Response;
followupChunks: {[id: number]: string};
};
},
responseId: string,
) {
const partialResponseEntry = partialResp[responseId];
const numChunks = partialResponseEntry.initialResponse?.totalChunks;
if (
!partialResponseEntry.initialResponse ||
!numChunks ||
Object.keys(partialResponseEntry.followupChunks).length + 1 < numChunks
) {
// Partial response not yet complete, do nothing.
partialResponses.set(partialResp);
return;
}
// Partial response has all required chunks, convert it to a full Response.
const response: Response = partialResponseEntry.initialResponse;
const allChunks: string[] =
response.data != null
? [
response.data,
...Object.entries(partialResponseEntry.followupChunks)
// It's important to parseInt here or it sorts lexicographically
.sort((a, b) => parseInt(a[0], 10) - parseInt(b[0], 10))
.map(([_k, v]: [string, string]) => v),
]
: [];
const data = combineBase64Chunks(allChunks);
const newResponse = {
...response,
// Currently data is always decoded at render time, so re-encode it to match the single response format.
data: btoa(data),
};
responses.update((draft) => {
draft[newResponse.id] = newResponse;
});
partialResponses.update((draft) => { partialResponses.update((draft) => {
delete draft[newResponse.id]; if (!draft[message.id]) {
draft[message.id] = {
followupChunks: {},
};
}
const entry = draft[message.id];
if (message.index !== undefined && message.index > 0) {
// It's a follow up chunk
const chunk = message as ResponseFollowupChunk;
entry.followupChunks[chunk.index] = chunk.data;
} else {
// It's an initial chunk
entry.initialResponse = message as Response;
}
}); });
} assembleChunksIfResponseIsComplete(partialResponses, responses, message.id);
});
function supportsMocks(device: Device): Promise<boolean> { function supportsMocks(device: Device): Promise<boolean> {
if (device.isArchived) { if (device.isArchived) {

View File

@@ -82,13 +82,9 @@ export type MockRoute = {
enabled: boolean; enabled: boolean;
}; };
export type PersistedState = { export type PartialResponses = {
requests: {[id: string]: Request}; [id: string]: {
responses: {[id: string]: Response}; initialResponse?: Response;
partialResponses: { followupChunks: {[id: number]: string};
[id: string]: {
initialResponse?: Response;
followupChunks: {[id: number]: string};
};
}; };
}; };