refactor: Make piping consistent

This commit is contained in:
Joachim Van Herwegen
2020-11-10 16:02:49 +01:00
parent 715ba126f9
commit 95ab0b4e76
7 changed files with 56 additions and 25 deletions

View File

@@ -2,6 +2,7 @@ import { getLoggerFor } from '../../logging/LogUtil';
import type { HttpResponse } from '../../server/HttpResponse';
import { INTERNAL_QUADS } from '../../util/ContentTypes';
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
import { pipeSafe } from '../../util/Util';
import type { MetadataWriter } from './metadata/MetadataWriter';
import type { ResponseDescription } from './response/ResponseDescription';
import { ResponseWriter } from './ResponseWriter';
@@ -33,7 +34,10 @@ export class BasicResponseWriter extends ResponseWriter {
input.response.writeHead(input.result.statusCode);
if (input.result.data) {
input.result.data.pipe(input.response);
const pipe = pipeSafe(input.result.data, input.response);
pipe.on('error', (error): void => {
this.logger.error(`Writing to HttpResponse failed with message ${error.message}`);
});
} else {
// If there is input data the response will end once the input stream ends
input.response.end();

View File

@@ -5,7 +5,7 @@ import { getLoggerFor } from '../../logging/LogUtil';
import { APPLICATION_SPARQL_UPDATE } from '../../util/ContentTypes';
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
import { UnsupportedMediaTypeHttpError } from '../../util/errors/UnsupportedMediaTypeHttpError';
import { pipeStreamsAndErrors, readableToString } from '../../util/Util';
import { pipeSafe, readableToString } from '../../util/Util';
import type { BodyParserArgs } from './BodyParser';
import { BodyParser } from './BodyParser';
import type { SparqlUpdatePatch } from './SparqlUpdatePatch';
@@ -29,10 +29,8 @@ export class SparqlUpdateBodyParser extends BodyParser {
// Note that readableObjectMode is only defined starting from Node 12
// It is impossible to check if object mode is enabled in Node 10 (without accessing private variables)
const options = { objectMode: request.readableObjectMode };
const toAlgebraStream = new PassThrough(options);
const dataCopy = new PassThrough(options);
pipeStreamsAndErrors(request, toAlgebraStream);
pipeStreamsAndErrors(request, dataCopy);
const toAlgebraStream = pipeSafe(request, new PassThrough(options));
const dataCopy = pipeSafe(request, new PassThrough(options));
let algebra: Algebra.Operation;
try {
const sparql = await readableToString(toAlgebraStream);

View File

@@ -5,7 +5,7 @@ import { RepresentationMetadata } from '../../ldp/representation/RepresentationM
import { INTERNAL_QUADS } from '../../util/ContentTypes';
import { UnsupportedHttpError } from '../../util/errors/UnsupportedHttpError';
import { CONTENT_TYPE } from '../../util/UriConstants';
import { pipeStreamsAndErrors } from '../../util/Util';
import { pipeSafe } from '../../util/Util';
import { checkRequest } from './ConversionUtil';
import type { RepresentationConverterArgs } from './RepresentationConverter';
import { TypedRepresentationConverter } from './TypedRepresentationConverter';
@@ -39,8 +39,8 @@ export class RdfToQuadConverter extends TypedRepresentationConverter {
// Wrap the stream such that errors are transformed
// (Node 10 requires both writableObjectMode and readableObjectMode)
const data = new PassThrough({ writableObjectMode: true, readableObjectMode: true });
pipeStreamsAndErrors(rawQuads, data, (error): Error => new UnsupportedHttpError(error.message));
const pass = new PassThrough({ writableObjectMode: true, readableObjectMode: true });
const data = pipeSafe(rawQuads, pass, (error): Error => new UnsupportedHttpError(error.message));
return {
binary: false,

View File

@@ -7,7 +7,7 @@ import { RepresentationMetadata } from '../ldp/representation/RepresentationMeta
import { TEXT_TURTLE } from './ContentTypes';
import { LDP, RDF } from './UriConstants';
import { toNamedNode } from './UriUtil';
import { pipeStreamsAndErrors, pushQuad } from './Util';
import { pipeSafe, pushQuad } from './Util';
export class MetadataController {
/**
@@ -46,7 +46,7 @@ export class MetadataController {
* @returns The Readable object.
*/
public serializeQuads(quads: Quad[]): Readable {
return pipeStreamsAndErrors(streamifyArray(quads), new StreamWriter({ format: TEXT_TURTLE }));
return pipeSafe(streamifyArray(quads), new StreamWriter({ format: TEXT_TURTLE }));
}
/**
@@ -56,6 +56,6 @@ export class MetadataController {
* @returns A promise containing the array of quads.
*/
public async parseQuads(readable: Readable): Promise<Quad[]> {
return await arrayifyStream(pipeStreamsAndErrors(readable, new StreamParser({ format: TEXT_TURTLE })));
return await arrayifyStream(pipeSafe(readable, new StreamParser({ format: TEXT_TURTLE })));
}
}

View File

@@ -59,20 +59,26 @@ export const matchingMediaType = (mediaA: string, mediaB: string): boolean => {
};
/**
* Pipes one stream into another.
* Makes sure an error of the first stream gets passed to the second.
* Pipes one stream into another and emits errors of the first stream with the second.
* In case of an error in the first stream the second one will be destroyed with the given error.
* @param readable - Initial readable stream.
* @param destination - The destination for writing data.
* @param mapError - Optional function that takes the error and converts it to a new error.
*
* @returns The destination stream.
*/
export const pipeStreamsAndErrors = <T extends Writable>(readable: NodeJS.ReadableStream, destination: T,
export const pipeSafe = <T extends Writable>(readable: NodeJS.ReadableStream, destination: T,
mapError?: (error: Error) => Error): T => {
// Not using `stream.pipeline` since the result there only emits an error event if the last stream has the error
readable.pipe(destination);
readable.on('error', (error): boolean => {
readable.on('error', (error): void => {
logger.warn(`Piped stream errored with ${error.message}`);
return destination.emit('error', mapError ? mapError(error) : error);
// From https://nodejs.org/api/stream.html#stream_readable_pipe_destination_options :
// "One important caveat is that if the Readable stream emits an error during processing, the Writable destination
// is not closed automatically. If an error occurs, it will be necessary to manually close each stream
// in order to prevent memory leaks."
destination.destroy(mapError ? mapError(error) : error);
});
return destination;
};