Summary:
Changelog: Fixed an issue where Flipper would crash when decoding large partial requests.
The current processing of partial requests assumes that the proviced base64 string is always an utf-8 string, which is incorrect as it might contain binary data as well. This causes `atob` build in to throw errors when trying to decode binary base64 strings with the following exception:
{F538782963}
However, what is worse, if those strings were larger than ~2 mb, it would completely crash Electron rather than on the JS level, with reports like:
```
Crashed Thread: 0 CrRendererMain Dispatch queue: com.apple.main-thread
Exception Type: EXC_BAD_ACCESS (SIGSEGV)
Exception Codes: EXC_I386_GPFLT
Exception Note: EXC_CORPSE_NOTIFY
Termination Signal: Segmentation fault: 11
Termination Reason: Namespace SIGNAL, Code 0xb
Terminating Process: exc handler [85268]
Thread 0 Crashed:: CrRendererMain Dispatch queue: com.apple.main-thread
0 com.github.Electron.framework 0x000000011155b16f v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 22324575
1 com.github.Electron.framework 0x000000011155e811 v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 22338561
2 com.github.Electron.framework 0x00000001117e2e62 v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 24978002
3 com.github.Electron.framework 0x000000010fa32660 v8::internal::ClassScope::ResolvePrivateNamesPartially() + 14944
4 com.github.Electron.framework 0x000000010fa322b5 v8::internal::ClassScope::ResolvePrivateNamesPartially() + 14005
5 com.github.Electron.framework 0x000000010fa31933 v8::internal::ClassScope::ResolvePrivateNamesPartially() + 11571
6 com.github.Electron.framework 0x000000011007ef58 v8::internal::SetupIsolateDelegate::SetupHeap(v8::internal::Heap*) + 451400
```
Reproduced this JS issue by lowering the `MAX_BODY_SIZE_IN_BYTES` in `NetworkFlipperPlugin.java` to 10KB, which causes all requests to be processed as partials.
Reproducing the the Electron crash is a lot harder, as it requires a surface that makes large, binary requests (more than a few mb), that is still intercepted by the Network layer. The best example I could find is sending large pictures or videos through a messenger for android chat. In that case it is still hard to produce due to caching though.
Fun fact, you can crash your own flipper and get the above crash by running this command:
`btoa(require("fs").readFileSync("/Users/mweststrate/Desktop/Screen Recording 2021-03-24 at 16.08.27 crop.mov", "binary"))`, where the provided file must be a few mb's large (this one is 10).
A result of fixing this issue, is that images that were send as partials can now be correctly previewed in the Network plugin again.
Reviewed By: jknoxville
Differential Revision: D27302961
fbshipit-source-id: 1ac86840f7268062bb59c789f3904537df3c51fa
176 lines
4.7 KiB
TypeScript
176 lines
4.7 KiB
TypeScript
/**
|
|
* Copyright (c) Facebook, Inc. and its affiliates.
|
|
*
|
|
* This source code is licensed under the MIT license found in the
|
|
* LICENSE file in the root directory of this source tree.
|
|
*
|
|
* @format
|
|
*/
|
|
|
|
import {combineBase64Chunks} from '../chunks';
|
|
import {TestUtils, createState} from 'flipper-plugin';
|
|
import * as NetworkPlugin from '../index';
|
|
import {assembleChunksIfResponseIsComplete} from '../chunks';
|
|
import path from 'path';
|
|
import {PartialResponses, Response} from '../types';
|
|
import {Base64} from 'js-base64';
|
|
import * as fs from 'fs';
|
|
import {promisify} from 'util';
|
|
|
|
const readFile = promisify(fs.readFile);
|
|
|
|
test('Test assembling base64 chunks', () => {
|
|
const message = 'wassup john?';
|
|
const chunks = message.match(/.{1,2}/g)?.map(btoa);
|
|
|
|
if (chunks === undefined) {
|
|
throw new Error('invalid chunks');
|
|
}
|
|
|
|
const output = combineBase64Chunks(chunks);
|
|
expect(Base64.decode(output)).toBe('wassup john?');
|
|
});
|
|
|
|
test('Reducer correctly adds initial chunk', () => {
|
|
const {instance, sendEvent} = TestUtils.startPlugin(NetworkPlugin);
|
|
expect(instance.partialResponses.get()).toEqual({});
|
|
|
|
sendEvent('partialResponse', {
|
|
id: '1',
|
|
timestamp: 123,
|
|
status: 200,
|
|
data: 'hello',
|
|
reason: 'nothing',
|
|
headers: [],
|
|
isMock: false,
|
|
insights: null,
|
|
index: 0,
|
|
totalChunks: 2,
|
|
});
|
|
|
|
expect(instance.partialResponses.get()['1']).toMatchInlineSnapshot(`
|
|
Object {
|
|
"followupChunks": Object {},
|
|
"initialResponse": Object {
|
|
"data": "hello",
|
|
"headers": Array [],
|
|
"id": "1",
|
|
"index": 0,
|
|
"insights": null,
|
|
"isMock": false,
|
|
"reason": "nothing",
|
|
"status": 200,
|
|
"timestamp": 123,
|
|
"totalChunks": 2,
|
|
},
|
|
}
|
|
`);
|
|
});
|
|
|
|
test('Reducer correctly adds followup chunk', () => {
|
|
const {instance, sendEvent} = TestUtils.startPlugin(NetworkPlugin);
|
|
expect(instance.partialResponses.get()).toEqual({});
|
|
|
|
sendEvent('partialResponse', {
|
|
id: '1',
|
|
totalChunks: 2,
|
|
index: 1,
|
|
data: 'hello',
|
|
});
|
|
expect(instance.partialResponses.get()['1']).toMatchInlineSnapshot(`
|
|
Object {
|
|
"followupChunks": Object {
|
|
"1": "hello",
|
|
},
|
|
}
|
|
`);
|
|
});
|
|
|
|
test('Reducer correctly combines initial response and followup chunk', () => {
|
|
const {instance, sendEvent} = TestUtils.startPlugin(NetworkPlugin);
|
|
instance.partialResponses.set({
|
|
'1': {
|
|
followupChunks: {},
|
|
initialResponse: {
|
|
data: 'aGVs',
|
|
headers: [],
|
|
id: '1',
|
|
insights: null,
|
|
isMock: false,
|
|
reason: 'nothing',
|
|
status: 200,
|
|
timestamp: 123,
|
|
index: 0,
|
|
totalChunks: 2,
|
|
},
|
|
},
|
|
});
|
|
expect(instance.responses.get()).toEqual({});
|
|
sendEvent('partialResponse', {
|
|
id: '1',
|
|
totalChunks: 2,
|
|
index: 1,
|
|
data: 'bG8=',
|
|
});
|
|
|
|
expect(instance.partialResponses.get()).toEqual({});
|
|
expect(instance.responses.get()['1']).toMatchInlineSnapshot(`
|
|
Object {
|
|
"data": "aGVsbG8=",
|
|
"headers": Array [],
|
|
"id": "1",
|
|
"index": 0,
|
|
"insights": null,
|
|
"isMock": false,
|
|
"reason": "nothing",
|
|
"status": 200,
|
|
"timestamp": 123,
|
|
"totalChunks": 2,
|
|
}
|
|
`);
|
|
});
|
|
|
|
async function readJsonFixture(filename: string) {
|
|
return JSON.parse(
|
|
await readFile(path.join(__dirname, 'fixtures', filename), 'utf-8'),
|
|
);
|
|
}
|
|
|
|
test('handle small binary payloads correctly', async () => {
|
|
const input = await readJsonFixture('partial_failing_example.json');
|
|
const partials = createState<PartialResponses>({
|
|
test: input,
|
|
});
|
|
const responses = createState<Record<string, Response>>({});
|
|
expect(() => {
|
|
// this used to throw
|
|
assembleChunksIfResponseIsComplete(partials, responses, 'test');
|
|
}).not.toThrow();
|
|
});
|
|
|
|
test('handle non binary payloads correcty', async () => {
|
|
const input = await readJsonFixture('partial_utf8_before.json');
|
|
const partials = createState<PartialResponses>({
|
|
test: input,
|
|
});
|
|
const responses = createState<Record<string, Response>>({});
|
|
expect(() => {
|
|
assembleChunksIfResponseIsComplete(partials, responses, 'test');
|
|
}).not.toThrow();
|
|
const expected = await readJsonFixture('partial_utf8_after.json');
|
|
expect(responses.get()['test']).toEqual(expected);
|
|
});
|
|
|
|
test('handle binary payloads correcty', async () => {
|
|
const input = await readJsonFixture('partial_binary_before.json');
|
|
const partials = createState<PartialResponses>({
|
|
test: input,
|
|
});
|
|
const responses = createState<Record<string, Response>>({});
|
|
expect(() => {
|
|
assembleChunksIfResponseIsComplete(partials, responses, 'test');
|
|
}).not.toThrow();
|
|
const expected = await readJsonFixture('partial_binary_after.json');
|
|
expect(responses.get()['test']).toEqual(expected);
|
|
});
|