Fix error handling during piping in updater

This commit is contained in:
Fedor Indutny 2022-03-02 11:48:07 -08:00 committed by GitHub
parent 34eb6a541d
commit fe9cdfbed9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 77 additions and 21 deletions

View File

@ -8,6 +8,8 @@ import fs from 'fs/promises';
import { tmpdir } from 'os';
import { strictAssert } from '../../util/assert';
import * as durations from '../../util/durations';
import { getGotOptions } from '../../updater/got';
import {
computeDiff,
getBlockMapFileName,
@ -73,8 +75,10 @@ describe('updater/differential', () => {
let server: http.Server;
let baseUrl: string;
let shouldTimeout: 'response' | undefined;
beforeEach(callback => {
shouldTimeout = undefined;
server = http.createServer(async (req, res) => {
if (!req.headers['user-agent']?.includes('Signal-Desktop')) {
res.writeHead(403);
@ -107,11 +111,13 @@ describe('updater/differential', () => {
const BOUNDARY = 'f8f254ce1ba37627';
res.setHeader(
'content-type',
`multipart/byteranges; boundary=${BOUNDARY}`
);
res.writeHead(206);
res.writeHead(206, {
'content-type': `multipart/byteranges; boundary=${BOUNDARY}`,
});
if (shouldTimeout === 'response') {
res.flushHeaders();
return;
}
const totalSize = fullFile.length;
@ -256,7 +262,11 @@ describe('updater/differential', () => {
const outFile = path.join(outDir, 'out.bin');
const chunks = new Array<number>();
await download(outFile, data, size => chunks.push(size));
await download(outFile, data, {
statusCallback(size) {
chunks.push(size);
},
});
const expected = await fs.readFile(path.join(FIXTURES, newFile));
const actual = await fs.readFile(outFile);
@ -267,5 +277,33 @@ describe('updater/differential', () => {
'Expected multiple callback invocations'
);
});
it('handles response timeouts gracefully', async () => {
const data = await prepareDownload({
oldFile: path.join(FIXTURES, oldFile),
newUrl: `${baseUrl}/${newFile}`,
sha512: newHash,
});
const outDir = await fs.mkdtemp(path.join(tmpdir(), 'signal-temp-'));
await fs.mkdir(outDir, { recursive: true });
const outFile = path.join(outDir, 'out.bin');
shouldTimeout = 'response';
await assert.isRejected(
download(outFile, data, {
gotOptions: {
...getGotOptions(),
timeout: {
connect: 0.5 * durations.SECOND,
lookup: 0.5 * durations.SECOND,
socket: 0.5 * durations.SECOND,
},
},
}),
/Timeout awaiting 'socket' for 500ms/
);
});
});
});

View File

@ -484,12 +484,12 @@ export abstract class Updater {
);
try {
await downloadDifferentialData(
targetUpdatePath,
differentialData,
updateOnProgress ? this.throttledSendDownloadingUpdate : undefined,
this.logger
);
await downloadDifferentialData(targetUpdatePath, differentialData, {
statusCallback: updateOnProgress
? this.throttledSendDownloadingUpdate
: undefined,
logger: this.logger,
});
gotUpdate = true;
} catch (error) {

View File

@ -70,6 +70,14 @@ export type PrepareDownloadOptionsType = Readonly<{
sha512: string;
}>;
export type DownloadOptionsType = Readonly<{
statusCallback?: (downloadedSize: number) => void;
logger?: LoggerType;
// Testing
gotOptions?: ReturnType<typeof getGotOptions>;
}>;
export type DownloadRangesOptionsType = Readonly<{
url: string;
output: FileHandle;
@ -77,6 +85,9 @@ export type DownloadRangesOptionsType = Readonly<{
logger?: LoggerType;
abortSignal?: AbortSignal;
chunkStatusCallback: (chunkSize: number) => void;
// Testing
gotOptions?: ReturnType<typeof getGotOptions>;
}>;
export function getBlockMapFileName(fileName: string): string {
@ -240,8 +251,7 @@ export function isValidPreparedData(
export async function download(
newFile: string,
{ diff, oldFile, newUrl, sha512 }: PrepareDownloadResultType,
statusCallback?: (downloadedSize: number) => void,
logger?: LoggerType
{ statusCallback, logger, gotOptions }: DownloadOptionsType = {}
): Promise<void> {
const input = await open(oldFile, 'r');
@ -288,6 +298,7 @@ export async function download(
ranges: downloadActions,
logger,
abortSignal,
gotOptions,
chunkStatusCallback(chunkSize) {
downloadedSize += chunkSize;
if (!abortSignal.aborted) {
@ -336,7 +347,14 @@ export async function downloadRanges(
}
// Request multiple ranges in a single request
const { url, output, logger, abortSignal, chunkStatusCallback } = options;
const {
url,
output,
logger,
abortSignal,
chunkStatusCallback,
gotOptions = getGotOptions(),
} = options;
logger?.info('updater/downloadRanges: downloading ranges', ranges.length);
@ -350,8 +368,7 @@ export async function downloadRanges(
diffByRange.set(`${readOffset}-${readOffset + size - 1}`, diff);
}
const gotOptions = getGotOptions();
const stream = got.stream(`${url}`, {
const stream = got.stream(url, {
...gotOptions,
headers: {
...gotOptions.headers,
@ -402,6 +419,7 @@ export async function downloadRanges(
dicer.on('part', part => partPromises.push(onPart(part)));
dicer.once('finish', () => stream.destroy());
stream.once('error', err => dicer.destroy(err));
// Pipe the response stream fully into dicer
// NOTE: we can't use `pipeline` due to a dicer bug:
@ -425,7 +443,6 @@ export async function downloadRanges(
return;
}
throw new Error('Missing ranges');
logger?.info(
'updater/downloadRanges: downloading missing ranges',
diffByRange.size

View File

@ -7,10 +7,11 @@ import ProxyAgent from 'proxy-agent';
import * as packageJson from '../../package.json';
import { getUserAgent } from '../util/getUserAgent';
import * as durations from '../util/durations';
export const GOT_CONNECT_TIMEOUT = 2 * 60 * 1000;
export const GOT_LOOKUP_TIMEOUT = 2 * 60 * 1000;
export const GOT_SOCKET_TIMEOUT = 2 * 60 * 1000;
export const GOT_CONNECT_TIMEOUT = 5 * durations.MINUTE;
export const GOT_LOOKUP_TIMEOUT = 5 * durations.MINUTE;
export const GOT_SOCKET_TIMEOUT = 5 * durations.MINUTE;
export function getProxyUrl(): string | undefined {
return process.env.HTTPS_PROXY || process.env.https_proxy;